Show More
@@ -1,988 +1,986 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import time |
|
20 | 20 | import logging |
|
21 | 21 | import operator |
|
22 | 22 | |
|
23 | 23 | from pyramid.httpexceptions import HTTPFound, HTTPForbidden, HTTPBadRequest |
|
24 | 24 | |
|
25 | 25 | from rhodecode.lib import helpers as h, diffs, rc_cache |
|
26 | 26 | from rhodecode.lib.str_utils import safe_str |
|
27 | 27 | from rhodecode.lib.utils import repo_name_slug |
|
28 | 28 | from rhodecode.lib.utils2 import ( |
|
29 | 29 | StrictAttributeDict, |
|
30 | 30 | str2bool, |
|
31 | 31 | safe_int, |
|
32 | 32 | datetime_to_time, |
|
33 | 33 | ) |
|
34 | 34 | from rhodecode.lib.markup_renderer import MarkupRenderer, relative_links |
|
35 | 35 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
36 | 36 | from rhodecode.lib.vcs.exceptions import RepositoryRequirementError |
|
37 | 37 | from rhodecode.model import repo |
|
38 | 38 | from rhodecode.model import repo_group |
|
39 | 39 | from rhodecode.model import user_group |
|
40 | 40 | from rhodecode.model import user |
|
41 | 41 | from rhodecode.model.db import User |
|
42 | 42 | from rhodecode.model.scm import ScmModel |
|
43 | 43 | from rhodecode.model.settings import VcsSettingsModel, IssueTrackerSettingsModel |
|
44 | 44 | from rhodecode.model.repo import ReadmeFinder |
|
45 | 45 | |
|
46 | 46 | log = logging.getLogger(__name__) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | ADMIN_PREFIX: str = "/_admin" |
|
50 | 50 | STATIC_FILE_PREFIX: str = "/_static" |
|
51 | 51 | |
|
52 | 52 | URL_NAME_REQUIREMENTS = { |
|
53 | 53 | # group name can have a slash in them, but they must not end with a slash |
|
54 | 54 | "group_name": r".*?[^/]", |
|
55 | 55 | "repo_group_name": r".*?[^/]", |
|
56 | 56 | # repo names can have a slash in them, but they must not end with a slash |
|
57 | 57 | "repo_name": r".*?[^/]", |
|
58 | 58 | # file path eats up everything at the end |
|
59 | 59 | "f_path": r".*", |
|
60 | 60 | # reference types |
|
61 | 61 | "source_ref_type": r"(branch|book|tag|rev|\%\(source_ref_type\)s)", |
|
62 | 62 | "target_ref_type": r"(branch|book|tag|rev|\%\(target_ref_type\)s)", |
|
63 | 63 | } |
|
64 | 64 | |
|
65 | 65 | |
|
66 | 66 | def add_route_with_slash(config, name, pattern, **kw): |
|
67 | 67 | config.add_route(name, pattern, **kw) |
|
68 | 68 | if not pattern.endswith("/"): |
|
69 | 69 | config.add_route(name + "_slash", pattern + "/", **kw) |
|
70 | 70 | |
|
71 | 71 | |
|
72 | 72 | def add_route_requirements(route_path, requirements=None): |
|
73 | 73 | """ |
|
74 | 74 | Adds regex requirements to pyramid routes using a mapping dict |
|
75 | 75 | e.g:: |
|
76 | 76 | add_route_requirements('{repo_name}/settings') |
|
77 | 77 | """ |
|
78 | 78 | requirements = requirements or URL_NAME_REQUIREMENTS |
|
79 | 79 | for key, regex in list(requirements.items()): |
|
80 | 80 | route_path = route_path.replace("{%s}" % key, "{%s:%s}" % (key, regex)) |
|
81 | 81 | return route_path |
|
82 | 82 | |
|
83 | 83 | |
|
84 | 84 | def get_format_ref_id(repo): |
|
85 | 85 | """Returns a `repo` specific reference formatter function""" |
|
86 | 86 | if h.is_svn(repo): |
|
87 | 87 | return _format_ref_id_svn |
|
88 | 88 | else: |
|
89 | 89 | return _format_ref_id |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | def _format_ref_id(name, raw_id): |
|
93 | 93 | """Default formatting of a given reference `name`""" |
|
94 | 94 | return name |
|
95 | 95 | |
|
96 | 96 | |
|
97 | 97 | def _format_ref_id_svn(name, raw_id): |
|
98 | 98 | """Special way of formatting a reference for Subversion including path""" |
|
99 | 99 | return f"{name}@{raw_id}" |
|
100 | 100 | |
|
101 | 101 | |
|
102 | 102 | class TemplateArgs(StrictAttributeDict): |
|
103 | 103 | pass |
|
104 | 104 | |
|
105 | 105 | |
|
106 | 106 | class BaseAppView(object): |
|
107 | 107 | DONT_CHECKOUT_VIEWS = ["channelstream_connect", "ops_ping"] |
|
108 | 108 | EXTRA_VIEWS_TO_IGNORE = ['login', 'register', 'logout'] |
|
109 | 109 | SETUP_2FA_VIEW = 'setup_2fa' |
|
110 | 110 | VERIFY_2FA_VIEW = 'check_2fa' |
|
111 | 111 | |
|
112 | 112 | def __init__(self, context, request): |
|
113 | 113 | self.request = request |
|
114 | 114 | self.context = context |
|
115 | 115 | self.session = request.session |
|
116 | 116 | if not hasattr(request, "user"): |
|
117 | 117 | # NOTE(marcink): edge case, we ended up in matched route |
|
118 | 118 | # but probably of web-app context, e.g API CALL/VCS CALL |
|
119 | 119 | if hasattr(request, "vcs_call") or hasattr(request, "rpc_method"): |
|
120 | 120 | log.warning("Unable to process request `%s` in this scope", request) |
|
121 | 121 | raise HTTPBadRequest() |
|
122 | 122 | |
|
123 | 123 | self._rhodecode_user = request.user # auth user |
|
124 | 124 | self._rhodecode_db_user = self._rhodecode_user.get_instance() |
|
125 | 125 | self.user_data = self._rhodecode_db_user.user_data if self._rhodecode_db_user else {} |
|
126 | 126 | self._maybe_needs_password_change( |
|
127 | 127 | request.matched_route.name, self._rhodecode_db_user |
|
128 | 128 | ) |
|
129 | 129 | self._maybe_needs_2fa_configuration( |
|
130 | 130 | request.matched_route.name, self._rhodecode_db_user |
|
131 | 131 | ) |
|
132 | 132 | self._maybe_needs_2fa_check( |
|
133 | 133 | request.matched_route.name, self._rhodecode_db_user |
|
134 | 134 | ) |
|
135 | 135 | |
|
136 | 136 | def _maybe_needs_password_change(self, view_name, user_obj): |
|
137 | 137 | if view_name in self.DONT_CHECKOUT_VIEWS: |
|
138 | 138 | return |
|
139 | 139 | |
|
140 | 140 | log.debug( |
|
141 | 141 | "Checking if user %s needs password change on view %s", user_obj, view_name |
|
142 | 142 | ) |
|
143 | 143 | |
|
144 | 144 | skip_user_views = [ |
|
145 | 145 | "logout", |
|
146 | 146 | "login", |
|
147 | 147 | "my_account_password", |
|
148 | 148 | "my_account_password_update", |
|
149 | 149 | ] |
|
150 | 150 | |
|
151 | 151 | if not user_obj: |
|
152 | 152 | return |
|
153 | 153 | |
|
154 | 154 | if user_obj.username == User.DEFAULT_USER: |
|
155 | 155 | return |
|
156 | 156 | |
|
157 | 157 | now = time.time() |
|
158 | 158 | should_change = self.user_data.get("force_password_change") |
|
159 | 159 | change_after = safe_int(should_change) or 0 |
|
160 | 160 | if should_change and now > change_after: |
|
161 | 161 | log.debug("User %s requires password change", user_obj) |
|
162 | 162 | h.flash( |
|
163 | 163 | "You are required to change your password", |
|
164 | 164 | "warning", |
|
165 | 165 | ignore_duplicate=True, |
|
166 | 166 | ) |
|
167 | 167 | |
|
168 | 168 | if view_name not in skip_user_views: |
|
169 | 169 | raise HTTPFound(self.request.route_path("my_account_password")) |
|
170 | 170 | |
|
171 | 171 | def _maybe_needs_2fa_configuration(self, view_name, user_obj): |
|
172 | 172 | if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE: |
|
173 | 173 | return |
|
174 | 174 | |
|
175 | 175 | if not user_obj: |
|
176 | 176 | return |
|
177 | 177 | |
|
178 | 178 | if user_obj.has_forced_2fa and user_obj.extern_type != 'rhodecode': |
|
179 | 179 | return |
|
180 | 180 | |
|
181 | if (user_obj.has_enabled_2fa | |
|
182 | and not self.user_data.get('secret_2fa')) \ | |
|
183 | and view_name != self.SETUP_2FA_VIEW: | |
|
181 | if user_obj.needs_2fa_configure and view_name != self.SETUP_2FA_VIEW: | |
|
184 | 182 | h.flash( |
|
185 | 183 | "You are required to configure 2FA", |
|
186 | 184 | "warning", |
|
187 | 185 | ignore_duplicate=False, |
|
188 | 186 | ) |
|
189 | 187 | raise HTTPFound(self.request.route_path(self.SETUP_2FA_VIEW)) |
|
190 | 188 | |
|
191 | 189 | def _maybe_needs_2fa_check(self, view_name, user_obj): |
|
192 | 190 | if view_name in self.DONT_CHECKOUT_VIEWS + self.EXTRA_VIEWS_TO_IGNORE: |
|
193 | 191 | return |
|
194 | 192 | |
|
195 | 193 | if not user_obj: |
|
196 | 194 | return |
|
197 | 195 | |
|
198 |
if |
|
|
196 | if user_obj.has_check_2fa_flag and view_name != self.VERIFY_2FA_VIEW: | |
|
199 | 197 | raise HTTPFound(self.request.route_path(self.VERIFY_2FA_VIEW)) |
|
200 | 198 | |
|
201 | 199 | def _log_creation_exception(self, e, repo_name): |
|
202 | 200 | _ = self.request.translate |
|
203 | 201 | reason = None |
|
204 | 202 | if len(e.args) == 2: |
|
205 | 203 | reason = e.args[1] |
|
206 | 204 | |
|
207 | 205 | if reason == "INVALID_CERTIFICATE": |
|
208 | 206 | log.exception("Exception creating a repository: invalid certificate") |
|
209 | 207 | msg = _("Error creating repository %s: invalid certificate") % repo_name |
|
210 | 208 | else: |
|
211 | 209 | log.exception("Exception creating a repository") |
|
212 | 210 | msg = _("Error creating repository %s") % repo_name |
|
213 | 211 | return msg |
|
214 | 212 | |
|
215 | 213 | def _get_local_tmpl_context(self, include_app_defaults=True): |
|
216 | 214 | c = TemplateArgs() |
|
217 | 215 | c.auth_user = self.request.user |
|
218 | 216 | # TODO(marcink): migrate the usage of c.rhodecode_user to c.auth_user |
|
219 | 217 | c.rhodecode_user = self.request.user |
|
220 | 218 | |
|
221 | 219 | if include_app_defaults: |
|
222 | 220 | from rhodecode.lib.base import attach_context_attributes |
|
223 | 221 | |
|
224 | 222 | attach_context_attributes(c, self.request, self.request.user.user_id) |
|
225 | 223 | |
|
226 | 224 | c.is_super_admin = c.auth_user.is_admin |
|
227 | 225 | |
|
228 | 226 | c.can_create_repo = c.is_super_admin |
|
229 | 227 | c.can_create_repo_group = c.is_super_admin |
|
230 | 228 | c.can_create_user_group = c.is_super_admin |
|
231 | 229 | |
|
232 | 230 | c.is_delegated_admin = False |
|
233 | 231 | |
|
234 | 232 | if not c.auth_user.is_default and not c.is_super_admin: |
|
235 | 233 | c.can_create_repo = h.HasPermissionAny("hg.create.repository")( |
|
236 | 234 | user=self.request.user |
|
237 | 235 | ) |
|
238 | 236 | repositories = c.auth_user.repositories_admin or c.can_create_repo |
|
239 | 237 | |
|
240 | 238 | c.can_create_repo_group = h.HasPermissionAny("hg.repogroup.create.true")( |
|
241 | 239 | user=self.request.user |
|
242 | 240 | ) |
|
243 | 241 | repository_groups = ( |
|
244 | 242 | c.auth_user.repository_groups_admin or c.can_create_repo_group |
|
245 | 243 | ) |
|
246 | 244 | |
|
247 | 245 | c.can_create_user_group = h.HasPermissionAny("hg.usergroup.create.true")( |
|
248 | 246 | user=self.request.user |
|
249 | 247 | ) |
|
250 | 248 | user_groups = c.auth_user.user_groups_admin or c.can_create_user_group |
|
251 | 249 | # delegated admin can create, or manage some objects |
|
252 | 250 | c.is_delegated_admin = repositories or repository_groups or user_groups |
|
253 | 251 | return c |
|
254 | 252 | |
|
255 | 253 | def _get_template_context(self, tmpl_args, **kwargs): |
|
256 | 254 | local_tmpl_args = {"defaults": {}, "errors": {}, "c": tmpl_args} |
|
257 | 255 | local_tmpl_args.update(kwargs) |
|
258 | 256 | return local_tmpl_args |
|
259 | 257 | |
|
260 | 258 | def load_default_context(self): |
|
261 | 259 | """ |
|
262 | 260 | example: |
|
263 | 261 | |
|
264 | 262 | def load_default_context(self): |
|
265 | 263 | c = self._get_local_tmpl_context() |
|
266 | 264 | c.custom_var = 'foobar' |
|
267 | 265 | |
|
268 | 266 | return c |
|
269 | 267 | """ |
|
270 | 268 | raise NotImplementedError("Needs implementation in view class") |
|
271 | 269 | |
|
272 | 270 | |
|
273 | 271 | class RepoAppView(BaseAppView): |
|
274 | 272 | def __init__(self, context, request): |
|
275 | 273 | super().__init__(context, request) |
|
276 | 274 | self.db_repo = request.db_repo |
|
277 | 275 | self.db_repo_name = self.db_repo.repo_name |
|
278 | 276 | self.db_repo_pull_requests = ScmModel().get_pull_requests(self.db_repo) |
|
279 | 277 | self.db_repo_artifacts = ScmModel().get_artifacts(self.db_repo) |
|
280 | 278 | self.db_repo_patterns = IssueTrackerSettingsModel(repo=self.db_repo) |
|
281 | 279 | |
|
282 | 280 | def _handle_missing_requirements(self, error): |
|
283 | 281 | log.error( |
|
284 | 282 | "Requirements are missing for repository %s: %s", |
|
285 | 283 | self.db_repo_name, |
|
286 | 284 | safe_str(error), |
|
287 | 285 | ) |
|
288 | 286 | |
|
289 | 287 | def _prepare_and_set_clone_url(self, c): |
|
290 | 288 | username = "" |
|
291 | 289 | if self._rhodecode_user.username != User.DEFAULT_USER: |
|
292 | 290 | username = self._rhodecode_user.username |
|
293 | 291 | |
|
294 | 292 | _def_clone_uri = c.clone_uri_tmpl |
|
295 | 293 | _def_clone_uri_id = c.clone_uri_id_tmpl |
|
296 | 294 | _def_clone_uri_ssh = c.clone_uri_ssh_tmpl |
|
297 | 295 | |
|
298 | 296 | c.clone_repo_url = self.db_repo.clone_url( |
|
299 | 297 | user=username, uri_tmpl=_def_clone_uri |
|
300 | 298 | ) |
|
301 | 299 | c.clone_repo_url_id = self.db_repo.clone_url( |
|
302 | 300 | user=username, uri_tmpl=_def_clone_uri_id |
|
303 | 301 | ) |
|
304 | 302 | c.clone_repo_url_ssh = self.db_repo.clone_url( |
|
305 | 303 | uri_tmpl=_def_clone_uri_ssh, ssh=True |
|
306 | 304 | ) |
|
307 | 305 | |
|
308 | 306 | def _get_local_tmpl_context(self, include_app_defaults=True): |
|
309 | 307 | _ = self.request.translate |
|
310 | 308 | c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults) |
|
311 | 309 | |
|
312 | 310 | # register common vars for this type of view |
|
313 | 311 | c.rhodecode_db_repo = self.db_repo |
|
314 | 312 | c.repo_name = self.db_repo_name |
|
315 | 313 | c.repository_pull_requests = self.db_repo_pull_requests |
|
316 | 314 | c.repository_artifacts = self.db_repo_artifacts |
|
317 | 315 | c.repository_is_user_following = ScmModel().is_following_repo( |
|
318 | 316 | self.db_repo_name, self._rhodecode_user.user_id |
|
319 | 317 | ) |
|
320 | 318 | self.path_filter = PathFilter(None) |
|
321 | 319 | |
|
322 | 320 | c.repository_requirements_missing = {} |
|
323 | 321 | try: |
|
324 | 322 | self.rhodecode_vcs_repo = self.db_repo.scm_instance() |
|
325 | 323 | # NOTE(marcink): |
|
326 | 324 | # comparison to None since if it's an object __bool__ is expensive to |
|
327 | 325 | # calculate |
|
328 | 326 | if self.rhodecode_vcs_repo is not None: |
|
329 | 327 | path_perms = self.rhodecode_vcs_repo.get_path_permissions( |
|
330 | 328 | c.auth_user.username |
|
331 | 329 | ) |
|
332 | 330 | self.path_filter = PathFilter(path_perms) |
|
333 | 331 | except RepositoryRequirementError as e: |
|
334 | 332 | c.repository_requirements_missing = {"error": str(e)} |
|
335 | 333 | self._handle_missing_requirements(e) |
|
336 | 334 | self.rhodecode_vcs_repo = None |
|
337 | 335 | |
|
338 | 336 | c.path_filter = self.path_filter # used by atom_feed_entry.mako |
|
339 | 337 | |
|
340 | 338 | if self.rhodecode_vcs_repo is None: |
|
341 | 339 | # unable to fetch this repo as vcs instance, report back to user |
|
342 | 340 | log.debug( |
|
343 | 341 | "Repository was not found on filesystem, check if it exists or is not damaged" |
|
344 | 342 | ) |
|
345 | 343 | h.flash( |
|
346 | 344 | _( |
|
347 | 345 | "The repository `%(repo_name)s` cannot be loaded in filesystem. " |
|
348 | 346 | "Please check if it exist, or is not damaged." |
|
349 | 347 | ) |
|
350 | 348 | % {"repo_name": c.repo_name}, |
|
351 | 349 | category="error", |
|
352 | 350 | ignore_duplicate=True, |
|
353 | 351 | ) |
|
354 | 352 | if c.repository_requirements_missing: |
|
355 | 353 | route = self.request.matched_route.name |
|
356 | 354 | if route.startswith(("edit_repo", "repo_summary")): |
|
357 | 355 | # allow summary and edit repo on missing requirements |
|
358 | 356 | return c |
|
359 | 357 | |
|
360 | 358 | raise HTTPFound( |
|
361 | 359 | h.route_path("repo_summary", repo_name=self.db_repo_name) |
|
362 | 360 | ) |
|
363 | 361 | |
|
364 | 362 | else: # redirect if we don't show missing requirements |
|
365 | 363 | raise HTTPFound(h.route_path("home")) |
|
366 | 364 | |
|
367 | 365 | c.has_origin_repo_read_perm = False |
|
368 | 366 | if self.db_repo.fork: |
|
369 | 367 | c.has_origin_repo_read_perm = h.HasRepoPermissionAny( |
|
370 | 368 | "repository.write", "repository.read", "repository.admin" |
|
371 | 369 | )(self.db_repo.fork.repo_name, "summary fork link") |
|
372 | 370 | |
|
373 | 371 | return c |
|
374 | 372 | |
|
375 | 373 | def _get_f_path_unchecked(self, matchdict, default=None): |
|
376 | 374 | """ |
|
377 | 375 | Should only be used by redirects, everything else should call _get_f_path |
|
378 | 376 | """ |
|
379 | 377 | f_path = matchdict.get("f_path") |
|
380 | 378 | if f_path: |
|
381 | 379 | # fix for multiple initial slashes that causes errors for GIT |
|
382 | 380 | return f_path.lstrip("/") |
|
383 | 381 | |
|
384 | 382 | return default |
|
385 | 383 | |
|
386 | 384 | def _get_f_path(self, matchdict, default=None): |
|
387 | 385 | f_path_match = self._get_f_path_unchecked(matchdict, default) |
|
388 | 386 | return self.path_filter.assert_path_permissions(f_path_match) |
|
389 | 387 | |
|
390 | 388 | def _get_general_setting(self, target_repo, settings_key, default=False): |
|
391 | 389 | settings_model = VcsSettingsModel(repo=target_repo) |
|
392 | 390 | settings = settings_model.get_general_settings() |
|
393 | 391 | return settings.get(settings_key, default) |
|
394 | 392 | |
|
395 | 393 | def _get_repo_setting(self, target_repo, settings_key, default=False): |
|
396 | 394 | settings_model = VcsSettingsModel(repo=target_repo) |
|
397 | 395 | settings = settings_model.get_repo_settings_inherited() |
|
398 | 396 | return settings.get(settings_key, default) |
|
399 | 397 | |
|
400 | 398 | def _get_readme_data(self, db_repo, renderer_type, commit_id=None, path="/"): |
|
401 | 399 | log.debug("Looking for README file at path %s", path) |
|
402 | 400 | if commit_id: |
|
403 | 401 | landing_commit_id = commit_id |
|
404 | 402 | else: |
|
405 | 403 | landing_commit = db_repo.get_landing_commit() |
|
406 | 404 | if isinstance(landing_commit, EmptyCommit): |
|
407 | 405 | return None, None |
|
408 | 406 | landing_commit_id = landing_commit.raw_id |
|
409 | 407 | |
|
410 | 408 | cache_namespace_uid = f"repo.{db_repo.repo_id}" |
|
411 | 409 | region = rc_cache.get_or_create_region( |
|
412 | 410 | "cache_repo", cache_namespace_uid, use_async_runner=False |
|
413 | 411 | ) |
|
414 | 412 | start = time.time() |
|
415 | 413 | |
|
416 | 414 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) |
|
417 | 415 | def generate_repo_readme( |
|
418 | 416 | repo_id, _commit_id, _repo_name, _readme_search_path, _renderer_type |
|
419 | 417 | ): |
|
420 | 418 | readme_data = None |
|
421 | 419 | readme_filename = None |
|
422 | 420 | |
|
423 | 421 | commit = db_repo.get_commit(_commit_id) |
|
424 | 422 | log.debug("Searching for a README file at commit %s.", _commit_id) |
|
425 | 423 | readme_node = ReadmeFinder(_renderer_type).search( |
|
426 | 424 | commit, path=_readme_search_path |
|
427 | 425 | ) |
|
428 | 426 | |
|
429 | 427 | if readme_node: |
|
430 | 428 | log.debug("Found README node: %s", readme_node) |
|
431 | 429 | |
|
432 | 430 | relative_urls = { |
|
433 | 431 | "raw": h.route_path( |
|
434 | 432 | "repo_file_raw", |
|
435 | 433 | repo_name=_repo_name, |
|
436 | 434 | commit_id=commit.raw_id, |
|
437 | 435 | f_path=readme_node.path, |
|
438 | 436 | ), |
|
439 | 437 | "standard": h.route_path( |
|
440 | 438 | "repo_files", |
|
441 | 439 | repo_name=_repo_name, |
|
442 | 440 | commit_id=commit.raw_id, |
|
443 | 441 | f_path=readme_node.path, |
|
444 | 442 | ), |
|
445 | 443 | } |
|
446 | 444 | |
|
447 | 445 | readme_data = self._render_readme_or_none( |
|
448 | 446 | commit, readme_node, relative_urls |
|
449 | 447 | ) |
|
450 | 448 | readme_filename = readme_node.str_path |
|
451 | 449 | |
|
452 | 450 | return readme_data, readme_filename |
|
453 | 451 | |
|
454 | 452 | readme_data, readme_filename = generate_repo_readme( |
|
455 | 453 | db_repo.repo_id, |
|
456 | 454 | landing_commit_id, |
|
457 | 455 | db_repo.repo_name, |
|
458 | 456 | path, |
|
459 | 457 | renderer_type, |
|
460 | 458 | ) |
|
461 | 459 | |
|
462 | 460 | compute_time = time.time() - start |
|
463 | 461 | log.debug( |
|
464 | 462 | "Repo README for path %s generated and computed in %.4fs", |
|
465 | 463 | path, |
|
466 | 464 | compute_time, |
|
467 | 465 | ) |
|
468 | 466 | return readme_data, readme_filename |
|
469 | 467 | |
|
470 | 468 | def _render_readme_or_none(self, commit, readme_node, relative_urls): |
|
471 | 469 | log.debug("Found README file `%s` rendering...", readme_node.path) |
|
472 | 470 | renderer = MarkupRenderer() |
|
473 | 471 | try: |
|
474 | 472 | html_source = renderer.render( |
|
475 | 473 | readme_node.str_content, filename=readme_node.path |
|
476 | 474 | ) |
|
477 | 475 | if relative_urls: |
|
478 | 476 | return relative_links(html_source, relative_urls) |
|
479 | 477 | return html_source |
|
480 | 478 | except Exception: |
|
481 | 479 | log.exception("Exception while trying to render the README") |
|
482 | 480 | |
|
483 | 481 | def get_recache_flag(self): |
|
484 | 482 | for flag_name in ["force_recache", "force-recache", "no-cache"]: |
|
485 | 483 | flag_val = self.request.GET.get(flag_name) |
|
486 | 484 | if str2bool(flag_val): |
|
487 | 485 | return True |
|
488 | 486 | return False |
|
489 | 487 | |
|
490 | 488 | def get_commit_preload_attrs(cls): |
|
491 | 489 | pre_load = [ |
|
492 | 490 | "author", |
|
493 | 491 | "branch", |
|
494 | 492 | "date", |
|
495 | 493 | "message", |
|
496 | 494 | "parents", |
|
497 | 495 | "obsolete", |
|
498 | 496 | "phase", |
|
499 | 497 | "hidden", |
|
500 | 498 | ] |
|
501 | 499 | return pre_load |
|
502 | 500 | |
|
503 | 501 | |
|
504 | 502 | class PathFilter(object): |
|
505 | 503 | # Expects and instance of BasePathPermissionChecker or None |
|
506 | 504 | def __init__(self, permission_checker): |
|
507 | 505 | self.permission_checker = permission_checker |
|
508 | 506 | |
|
509 | 507 | def assert_path_permissions(self, path): |
|
510 | 508 | if self.path_access_allowed(path): |
|
511 | 509 | return path |
|
512 | 510 | raise HTTPForbidden() |
|
513 | 511 | |
|
514 | 512 | def path_access_allowed(self, path): |
|
515 | 513 | log.debug("Checking ACL permissions for PathFilter for `%s`", path) |
|
516 | 514 | if self.permission_checker: |
|
517 | 515 | has_access = path and self.permission_checker.has_access(path) |
|
518 | 516 | log.debug( |
|
519 | 517 | "ACL Permissions checker enabled, ACL Check has_access: %s", has_access |
|
520 | 518 | ) |
|
521 | 519 | return has_access |
|
522 | 520 | |
|
523 | 521 | log.debug("ACL permissions checker not enabled, skipping...") |
|
524 | 522 | return True |
|
525 | 523 | |
|
526 | 524 | def filter_patchset(self, patchset): |
|
527 | 525 | if not self.permission_checker or not patchset: |
|
528 | 526 | return patchset, False |
|
529 | 527 | had_filtered = False |
|
530 | 528 | filtered_patchset = [] |
|
531 | 529 | for patch in patchset: |
|
532 | 530 | filename = patch.get("filename", None) |
|
533 | 531 | if not filename or self.permission_checker.has_access(filename): |
|
534 | 532 | filtered_patchset.append(patch) |
|
535 | 533 | else: |
|
536 | 534 | had_filtered = True |
|
537 | 535 | if had_filtered: |
|
538 | 536 | if isinstance(patchset, diffs.LimitedDiffContainer): |
|
539 | 537 | filtered_patchset = diffs.LimitedDiffContainer( |
|
540 | 538 | patchset.diff_limit, patchset.cur_diff_size, filtered_patchset |
|
541 | 539 | ) |
|
542 | 540 | return filtered_patchset, True |
|
543 | 541 | else: |
|
544 | 542 | return patchset, False |
|
545 | 543 | |
|
546 | 544 | def render_patchset_filtered( |
|
547 | 545 | self, diffset, patchset, source_ref=None, target_ref=None |
|
548 | 546 | ): |
|
549 | 547 | filtered_patchset, has_hidden_changes = self.filter_patchset(patchset) |
|
550 | 548 | result = diffset.render_patchset( |
|
551 | 549 | filtered_patchset, source_ref=source_ref, target_ref=target_ref |
|
552 | 550 | ) |
|
553 | 551 | result.has_hidden_changes = has_hidden_changes |
|
554 | 552 | return result |
|
555 | 553 | |
|
556 | 554 | def get_raw_patch(self, diff_processor): |
|
557 | 555 | if self.permission_checker is None: |
|
558 | 556 | return diff_processor.as_raw() |
|
559 | 557 | elif self.permission_checker.has_full_access: |
|
560 | 558 | return diff_processor.as_raw() |
|
561 | 559 | else: |
|
562 | 560 | return "# Repository has user-specific filters, raw patch generation is disabled." |
|
563 | 561 | |
|
564 | 562 | @property |
|
565 | 563 | def is_enabled(self): |
|
566 | 564 | return self.permission_checker is not None |
|
567 | 565 | |
|
568 | 566 | |
|
569 | 567 | class RepoGroupAppView(BaseAppView): |
|
570 | 568 | def __init__(self, context, request): |
|
571 | 569 | super().__init__(context, request) |
|
572 | 570 | self.db_repo_group = request.db_repo_group |
|
573 | 571 | self.db_repo_group_name = self.db_repo_group.group_name |
|
574 | 572 | |
|
575 | 573 | def _get_local_tmpl_context(self, include_app_defaults=True): |
|
576 | 574 | _ = self.request.translate |
|
577 | 575 | c = super()._get_local_tmpl_context(include_app_defaults=include_app_defaults) |
|
578 | 576 | c.repo_group = self.db_repo_group |
|
579 | 577 | return c |
|
580 | 578 | |
|
581 | 579 | def _revoke_perms_on_yourself(self, form_result): |
|
582 | 580 | _updates = [ |
|
583 | 581 | u |
|
584 | 582 | for u in form_result["perm_updates"] |
|
585 | 583 | if self._rhodecode_user.user_id == int(u[0]) |
|
586 | 584 | ] |
|
587 | 585 | _additions = [ |
|
588 | 586 | u |
|
589 | 587 | for u in form_result["perm_additions"] |
|
590 | 588 | if self._rhodecode_user.user_id == int(u[0]) |
|
591 | 589 | ] |
|
592 | 590 | _deletions = [ |
|
593 | 591 | u |
|
594 | 592 | for u in form_result["perm_deletions"] |
|
595 | 593 | if self._rhodecode_user.user_id == int(u[0]) |
|
596 | 594 | ] |
|
597 | 595 | admin_perm = "group.admin" |
|
598 | 596 | if ( |
|
599 | 597 | _updates |
|
600 | 598 | and _updates[0][1] != admin_perm |
|
601 | 599 | or _additions |
|
602 | 600 | and _additions[0][1] != admin_perm |
|
603 | 601 | or _deletions |
|
604 | 602 | and _deletions[0][1] != admin_perm |
|
605 | 603 | ): |
|
606 | 604 | return True |
|
607 | 605 | return False |
|
608 | 606 | |
|
609 | 607 | |
|
610 | 608 | class UserGroupAppView(BaseAppView): |
|
611 | 609 | def __init__(self, context, request): |
|
612 | 610 | super().__init__(context, request) |
|
613 | 611 | self.db_user_group = request.db_user_group |
|
614 | 612 | self.db_user_group_name = self.db_user_group.users_group_name |
|
615 | 613 | |
|
616 | 614 | |
|
617 | 615 | class UserAppView(BaseAppView): |
|
618 | 616 | def __init__(self, context, request): |
|
619 | 617 | super().__init__(context, request) |
|
620 | 618 | self.db_user = request.db_user |
|
621 | 619 | self.db_user_id = self.db_user.user_id |
|
622 | 620 | |
|
623 | 621 | _ = self.request.translate |
|
624 | 622 | if not request.db_user_supports_default: |
|
625 | 623 | if self.db_user.username == User.DEFAULT_USER: |
|
626 | 624 | h.flash( |
|
627 | 625 | _("Editing user `{}` is disabled.".format(User.DEFAULT_USER)), |
|
628 | 626 | category="warning", |
|
629 | 627 | ) |
|
630 | 628 | raise HTTPFound(h.route_path("users")) |
|
631 | 629 | |
|
632 | 630 | |
|
633 | 631 | class DataGridAppView(object): |
|
634 | 632 | """ |
|
635 | 633 | Common class to have re-usable grid rendering components |
|
636 | 634 | """ |
|
637 | 635 | |
|
638 | 636 | def _extract_ordering(self, request, column_map=None): |
|
639 | 637 | column_map = column_map or {} |
|
640 | 638 | column_index = safe_int(request.GET.get("order[0][column]")) |
|
641 | 639 | order_dir = request.GET.get("order[0][dir]", "desc") |
|
642 | 640 | order_by = request.GET.get("columns[%s][data][sort]" % column_index, "name_raw") |
|
643 | 641 | |
|
644 | 642 | # translate datatable to DB columns |
|
645 | 643 | order_by = column_map.get(order_by) or order_by |
|
646 | 644 | |
|
647 | 645 | search_q = request.GET.get("search[value]") |
|
648 | 646 | return search_q, order_by, order_dir |
|
649 | 647 | |
|
650 | 648 | def _extract_chunk(self, request): |
|
651 | 649 | start = safe_int(request.GET.get("start"), 0) |
|
652 | 650 | length = safe_int(request.GET.get("length"), 25) |
|
653 | 651 | draw = safe_int(request.GET.get("draw")) |
|
654 | 652 | return draw, start, length |
|
655 | 653 | |
|
656 | 654 | def _get_order_col(self, order_by, model): |
|
657 | 655 | if isinstance(order_by, str): |
|
658 | 656 | try: |
|
659 | 657 | return operator.attrgetter(order_by)(model) |
|
660 | 658 | except AttributeError: |
|
661 | 659 | return None |
|
662 | 660 | else: |
|
663 | 661 | return order_by |
|
664 | 662 | |
|
665 | 663 | |
|
666 | 664 | class BaseReferencesView(RepoAppView): |
|
667 | 665 | """ |
|
668 | 666 | Base for reference view for branches, tags and bookmarks. |
|
669 | 667 | """ |
|
670 | 668 | |
|
671 | 669 | def load_default_context(self): |
|
672 | 670 | c = self._get_local_tmpl_context() |
|
673 | 671 | return c |
|
674 | 672 | |
|
675 | 673 | def load_refs_context(self, ref_items, partials_template): |
|
676 | 674 | _render = self.request.get_partial_renderer(partials_template) |
|
677 | 675 | pre_load = ["author", "date", "message", "parents"] |
|
678 | 676 | |
|
679 | 677 | is_svn = h.is_svn(self.rhodecode_vcs_repo) |
|
680 | 678 | is_hg = h.is_hg(self.rhodecode_vcs_repo) |
|
681 | 679 | |
|
682 | 680 | format_ref_id = get_format_ref_id(self.rhodecode_vcs_repo) |
|
683 | 681 | |
|
684 | 682 | closed_refs = {} |
|
685 | 683 | if is_hg: |
|
686 | 684 | closed_refs = self.rhodecode_vcs_repo.branches_closed |
|
687 | 685 | |
|
688 | 686 | data = [] |
|
689 | 687 | for ref_name, commit_id in ref_items: |
|
690 | 688 | commit = self.rhodecode_vcs_repo.get_commit( |
|
691 | 689 | commit_id=commit_id, pre_load=pre_load |
|
692 | 690 | ) |
|
693 | 691 | closed = ref_name in closed_refs |
|
694 | 692 | |
|
695 | 693 | # TODO: johbo: Unify generation of reference links |
|
696 | 694 | use_commit_id = "/" in ref_name or is_svn |
|
697 | 695 | |
|
698 | 696 | if use_commit_id: |
|
699 | 697 | files_url = h.route_path( |
|
700 | 698 | "repo_files", |
|
701 | 699 | repo_name=self.db_repo_name, |
|
702 | 700 | f_path=ref_name if is_svn else "", |
|
703 | 701 | commit_id=commit_id, |
|
704 | 702 | _query=dict(at=ref_name), |
|
705 | 703 | ) |
|
706 | 704 | |
|
707 | 705 | else: |
|
708 | 706 | files_url = h.route_path( |
|
709 | 707 | "repo_files", |
|
710 | 708 | repo_name=self.db_repo_name, |
|
711 | 709 | f_path=ref_name if is_svn else "", |
|
712 | 710 | commit_id=ref_name, |
|
713 | 711 | _query=dict(at=ref_name), |
|
714 | 712 | ) |
|
715 | 713 | |
|
716 | 714 | data.append( |
|
717 | 715 | { |
|
718 | 716 | "name": _render("name", ref_name, files_url, closed), |
|
719 | 717 | "name_raw": ref_name, |
|
720 | 718 | "date": _render("date", commit.date), |
|
721 | 719 | "date_raw": datetime_to_time(commit.date), |
|
722 | 720 | "author": _render("author", commit.author), |
|
723 | 721 | "commit": _render( |
|
724 | 722 | "commit", commit.message, commit.raw_id, commit.idx |
|
725 | 723 | ), |
|
726 | 724 | "commit_raw": commit.idx, |
|
727 | 725 | "compare": _render( |
|
728 | 726 | "compare", format_ref_id(ref_name, commit.raw_id) |
|
729 | 727 | ), |
|
730 | 728 | } |
|
731 | 729 | ) |
|
732 | 730 | |
|
733 | 731 | return data |
|
734 | 732 | |
|
735 | 733 | |
|
736 | 734 | class RepoRoutePredicate(object): |
|
737 | 735 | def __init__(self, val, config): |
|
738 | 736 | self.val = val |
|
739 | 737 | |
|
740 | 738 | def text(self): |
|
741 | 739 | return f"repo_route = {self.val}" |
|
742 | 740 | |
|
743 | 741 | phash = text |
|
744 | 742 | |
|
745 | 743 | def __call__(self, info, request): |
|
746 | 744 | if hasattr(request, "vcs_call"): |
|
747 | 745 | # skip vcs calls |
|
748 | 746 | return |
|
749 | 747 | |
|
750 | 748 | repo_name = info["match"]["repo_name"] |
|
751 | 749 | |
|
752 | 750 | repo_name_parts = repo_name.split("/") |
|
753 | 751 | repo_slugs = [x for x in (repo_name_slug(x) for x in repo_name_parts)] |
|
754 | 752 | |
|
755 | 753 | if repo_name_parts != repo_slugs: |
|
756 | 754 | # short-skip if the repo-name doesn't follow slug rule |
|
757 | 755 | log.warning( |
|
758 | 756 | "repo_name: %s is different than slug %s", repo_name_parts, repo_slugs |
|
759 | 757 | ) |
|
760 | 758 | return False |
|
761 | 759 | |
|
762 | 760 | repo_model = repo.RepoModel() |
|
763 | 761 | |
|
764 | 762 | by_name_match = repo_model.get_by_repo_name(repo_name, cache=False) |
|
765 | 763 | |
|
766 | 764 | def redirect_if_creating(route_info, db_repo): |
|
767 | 765 | skip_views = ["edit_repo_advanced_delete"] |
|
768 | 766 | route = route_info["route"] |
|
769 | 767 | # we should skip delete view so we can actually "remove" repositories |
|
770 | 768 | # if they get stuck in creating state. |
|
771 | 769 | if route.name in skip_views: |
|
772 | 770 | return |
|
773 | 771 | |
|
774 | 772 | if db_repo.repo_state in [repo.Repository.STATE_PENDING]: |
|
775 | 773 | repo_creating_url = request.route_path( |
|
776 | 774 | "repo_creating", repo_name=db_repo.repo_name |
|
777 | 775 | ) |
|
778 | 776 | raise HTTPFound(repo_creating_url) |
|
779 | 777 | |
|
780 | 778 | if by_name_match: |
|
781 | 779 | # register this as request object we can re-use later |
|
782 | 780 | request.db_repo = by_name_match |
|
783 | 781 | request.db_repo_name = request.db_repo.repo_name |
|
784 | 782 | |
|
785 | 783 | redirect_if_creating(info, by_name_match) |
|
786 | 784 | return True |
|
787 | 785 | |
|
788 | 786 | by_id_match = repo_model.get_repo_by_id(repo_name) |
|
789 | 787 | if by_id_match: |
|
790 | 788 | request.db_repo = by_id_match |
|
791 | 789 | request.db_repo_name = request.db_repo.repo_name |
|
792 | 790 | redirect_if_creating(info, by_id_match) |
|
793 | 791 | return True |
|
794 | 792 | |
|
795 | 793 | return False |
|
796 | 794 | |
|
797 | 795 | |
|
798 | 796 | class RepoForbidArchivedRoutePredicate(object): |
|
799 | 797 | def __init__(self, val, config): |
|
800 | 798 | self.val = val |
|
801 | 799 | |
|
802 | 800 | def text(self): |
|
803 | 801 | return f"repo_forbid_archived = {self.val}" |
|
804 | 802 | |
|
805 | 803 | phash = text |
|
806 | 804 | |
|
807 | 805 | def __call__(self, info, request): |
|
808 | 806 | _ = request.translate |
|
809 | 807 | rhodecode_db_repo = request.db_repo |
|
810 | 808 | |
|
811 | 809 | log.debug( |
|
812 | 810 | "%s checking if archived flag for repo for %s", |
|
813 | 811 | self.__class__.__name__, |
|
814 | 812 | rhodecode_db_repo.repo_name, |
|
815 | 813 | ) |
|
816 | 814 | |
|
817 | 815 | if rhodecode_db_repo.archived: |
|
818 | 816 | log.warning( |
|
819 | 817 | "Current view is not supported for archived repo:%s", |
|
820 | 818 | rhodecode_db_repo.repo_name, |
|
821 | 819 | ) |
|
822 | 820 | |
|
823 | 821 | h.flash( |
|
824 | 822 | h.literal(_("Action not supported for archived repository.")), |
|
825 | 823 | category="warning", |
|
826 | 824 | ) |
|
827 | 825 | summary_url = request.route_path( |
|
828 | 826 | "repo_summary", repo_name=rhodecode_db_repo.repo_name |
|
829 | 827 | ) |
|
830 | 828 | raise HTTPFound(summary_url) |
|
831 | 829 | return True |
|
832 | 830 | |
|
833 | 831 | |
|
834 | 832 | class RepoTypeRoutePredicate(object): |
|
835 | 833 | def __init__(self, val, config): |
|
836 | 834 | self.val = val or ["hg", "git", "svn"] |
|
837 | 835 | |
|
838 | 836 | def text(self): |
|
839 | 837 | return f"repo_accepted_type = {self.val}" |
|
840 | 838 | |
|
841 | 839 | phash = text |
|
842 | 840 | |
|
843 | 841 | def __call__(self, info, request): |
|
844 | 842 | if hasattr(request, "vcs_call"): |
|
845 | 843 | # skip vcs calls |
|
846 | 844 | return |
|
847 | 845 | |
|
848 | 846 | rhodecode_db_repo = request.db_repo |
|
849 | 847 | |
|
850 | 848 | log.debug( |
|
851 | 849 | "%s checking repo type for %s in %s", |
|
852 | 850 | self.__class__.__name__, |
|
853 | 851 | rhodecode_db_repo.repo_type, |
|
854 | 852 | self.val, |
|
855 | 853 | ) |
|
856 | 854 | |
|
857 | 855 | if rhodecode_db_repo.repo_type in self.val: |
|
858 | 856 | return True |
|
859 | 857 | else: |
|
860 | 858 | log.warning( |
|
861 | 859 | "Current view is not supported for repo type:%s", |
|
862 | 860 | rhodecode_db_repo.repo_type, |
|
863 | 861 | ) |
|
864 | 862 | return False |
|
865 | 863 | |
|
866 | 864 | |
|
867 | 865 | class RepoGroupRoutePredicate(object): |
|
868 | 866 | def __init__(self, val, config): |
|
869 | 867 | self.val = val |
|
870 | 868 | |
|
871 | 869 | def text(self): |
|
872 | 870 | return f"repo_group_route = {self.val}" |
|
873 | 871 | |
|
874 | 872 | phash = text |
|
875 | 873 | |
|
876 | 874 | def __call__(self, info, request): |
|
877 | 875 | if hasattr(request, "vcs_call"): |
|
878 | 876 | # skip vcs calls |
|
879 | 877 | return |
|
880 | 878 | |
|
881 | 879 | repo_group_name = info["match"]["repo_group_name"] |
|
882 | 880 | |
|
883 | 881 | repo_group_name_parts = repo_group_name.split("/") |
|
884 | 882 | repo_group_slugs = [ |
|
885 | 883 | x for x in [repo_name_slug(x) for x in repo_group_name_parts] |
|
886 | 884 | ] |
|
887 | 885 | if repo_group_name_parts != repo_group_slugs: |
|
888 | 886 | # short-skip if the repo-name doesn't follow slug rule |
|
889 | 887 | log.warning( |
|
890 | 888 | "repo_group_name: %s is different than slug %s", |
|
891 | 889 | repo_group_name_parts, |
|
892 | 890 | repo_group_slugs, |
|
893 | 891 | ) |
|
894 | 892 | return False |
|
895 | 893 | |
|
896 | 894 | repo_group_model = repo_group.RepoGroupModel() |
|
897 | 895 | by_name_match = repo_group_model.get_by_group_name(repo_group_name, cache=False) |
|
898 | 896 | |
|
899 | 897 | if by_name_match: |
|
900 | 898 | # register this as request object we can re-use later |
|
901 | 899 | request.db_repo_group = by_name_match |
|
902 | 900 | request.db_repo_group_name = request.db_repo_group.group_name |
|
903 | 901 | return True |
|
904 | 902 | |
|
905 | 903 | return False |
|
906 | 904 | |
|
907 | 905 | |
|
908 | 906 | class UserGroupRoutePredicate(object): |
|
909 | 907 | def __init__(self, val, config): |
|
910 | 908 | self.val = val |
|
911 | 909 | |
|
912 | 910 | def text(self): |
|
913 | 911 | return f"user_group_route = {self.val}" |
|
914 | 912 | |
|
915 | 913 | phash = text |
|
916 | 914 | |
|
917 | 915 | def __call__(self, info, request): |
|
918 | 916 | if hasattr(request, "vcs_call"): |
|
919 | 917 | # skip vcs calls |
|
920 | 918 | return |
|
921 | 919 | |
|
922 | 920 | user_group_id = info["match"]["user_group_id"] |
|
923 | 921 | user_group_model = user_group.UserGroup() |
|
924 | 922 | by_id_match = user_group_model.get(user_group_id, cache=False) |
|
925 | 923 | |
|
926 | 924 | if by_id_match: |
|
927 | 925 | # register this as request object we can re-use later |
|
928 | 926 | request.db_user_group = by_id_match |
|
929 | 927 | return True |
|
930 | 928 | |
|
931 | 929 | return False |
|
932 | 930 | |
|
933 | 931 | |
|
934 | 932 | class UserRoutePredicateBase(object): |
|
935 | 933 | supports_default = None |
|
936 | 934 | |
|
937 | 935 | def __init__(self, val, config): |
|
938 | 936 | self.val = val |
|
939 | 937 | |
|
940 | 938 | def text(self): |
|
941 | 939 | raise NotImplementedError() |
|
942 | 940 | |
|
943 | 941 | def __call__(self, info, request): |
|
944 | 942 | if hasattr(request, "vcs_call"): |
|
945 | 943 | # skip vcs calls |
|
946 | 944 | return |
|
947 | 945 | |
|
948 | 946 | user_id = info["match"]["user_id"] |
|
949 | 947 | user_model = user.User() |
|
950 | 948 | by_id_match = user_model.get(user_id, cache=False) |
|
951 | 949 | |
|
952 | 950 | if by_id_match: |
|
953 | 951 | # register this as request object we can re-use later |
|
954 | 952 | request.db_user = by_id_match |
|
955 | 953 | request.db_user_supports_default = self.supports_default |
|
956 | 954 | return True |
|
957 | 955 | |
|
958 | 956 | return False |
|
959 | 957 | |
|
960 | 958 | |
|
961 | 959 | class UserRoutePredicate(UserRoutePredicateBase): |
|
962 | 960 | supports_default = False |
|
963 | 961 | |
|
964 | 962 | def text(self): |
|
965 | 963 | return f"user_route = {self.val}" |
|
966 | 964 | |
|
967 | 965 | phash = text |
|
968 | 966 | |
|
969 | 967 | |
|
970 | 968 | class UserRouteWithDefaultPredicate(UserRoutePredicateBase): |
|
971 | 969 | supports_default = True |
|
972 | 970 | |
|
973 | 971 | def text(self): |
|
974 | 972 | return f"user_with_default_route = {self.val}" |
|
975 | 973 | |
|
976 | 974 | phash = text |
|
977 | 975 | |
|
978 | 976 | |
|
979 | 977 | def includeme(config): |
|
980 | 978 | config.add_route_predicate("repo_route", RepoRoutePredicate) |
|
981 | 979 | config.add_route_predicate("repo_accepted_types", RepoTypeRoutePredicate) |
|
982 | 980 | config.add_route_predicate( |
|
983 | 981 | "repo_forbid_when_archived", RepoForbidArchivedRoutePredicate |
|
984 | 982 | ) |
|
985 | 983 | config.add_route_predicate("repo_group_route", RepoGroupRoutePredicate) |
|
986 | 984 | config.add_route_predicate("user_group_route", UserGroupRoutePredicate) |
|
987 | 985 | config.add_route_predicate("user_route_with_default", UserRouteWithDefaultPredicate) |
|
988 | 986 | config.add_route_predicate("user_route", UserRoutePredicate) |
@@ -1,67 +1,67 b'' | |||
|
1 | 1 | import pytest |
|
2 | 2 | |
|
3 | 3 | from rhodecode.model.meta import Session |
|
4 | 4 | from rhodecode.tests.fixture import Fixture |
|
5 | 5 | from rhodecode.tests.routes import route_path |
|
6 | 6 | from rhodecode.model.settings import SettingsModel |
|
7 | 7 | |
|
8 | 8 | fixture = Fixture() |
|
9 | 9 | |
|
10 | 10 | |
|
11 | 11 | @pytest.mark.usefixtures('app') |
|
12 | 12 | class Test2FA(object): |
|
13 | 13 | @classmethod |
|
14 | 14 | def setup_class(cls): |
|
15 | 15 | cls.password = 'valid-one' |
|
16 | 16 | |
|
17 | 17 | @classmethod |
|
18 | 18 | def teardown_class(cls): |
|
19 | 19 | SettingsModel().create_or_update_setting('auth_rhodecode_global_2fa', False) |
|
20 | 20 | |
|
21 | 21 | def test_redirect_to_2fa_setup_if_enabled_for_user(self, user_util): |
|
22 | 22 | user = user_util.create_user(password=self.password) |
|
23 | 23 | user.has_enabled_2fa = True |
|
24 | 24 | self.app.post( |
|
25 | 25 | route_path('login'), |
|
26 | 26 | {'username': user.username, |
|
27 | 27 | 'password': self.password}) |
|
28 | 28 | |
|
29 | 29 | response = self.app.get('/') |
|
30 | 30 | assert response.status_code == 302 |
|
31 | 31 | assert response.location.endswith(route_path('setup_2fa')) |
|
32 | 32 | |
|
33 | 33 | def test_redirect_to_2fa_check_if_2fa_configured(self, user_util): |
|
34 | 34 | user = user_util.create_user(password=self.password) |
|
35 | 35 | user.has_enabled_2fa = True |
|
36 | user.secret_2fa | |
|
36 | user.init_secret_2fa() | |
|
37 | 37 | Session().add(user) |
|
38 | 38 | Session().commit() |
|
39 | 39 | self.app.post( |
|
40 | 40 | route_path('login'), |
|
41 | 41 | {'username': user.username, |
|
42 | 42 | 'password': self.password}) |
|
43 | 43 | response = self.app.get('/') |
|
44 | 44 | assert response.status_code == 302 |
|
45 | 45 | assert response.location.endswith(route_path('check_2fa')) |
|
46 | 46 | |
|
47 | 47 | def test_2fa_recovery_codes_works_only_once(self, user_util): |
|
48 | 48 | user = user_util.create_user(password=self.password) |
|
49 | 49 | user.has_enabled_2fa = True |
|
50 | user.secret_2fa | |
|
51 |
recovery_cod_to_check = user. |
|
|
50 | user.init_secret_2fa() | |
|
51 | recovery_cod_to_check = user.init_2fa_recovery_codes()[0] | |
|
52 | 52 | Session().add(user) |
|
53 | 53 | Session().commit() |
|
54 | 54 | self.app.post( |
|
55 | 55 | route_path('login'), |
|
56 | 56 | {'username': user.username, |
|
57 | 57 | 'password': self.password}) |
|
58 | 58 | response = self.app.post(route_path('check_2fa'), {'totp': recovery_cod_to_check}) |
|
59 | 59 | assert response.status_code == 302 |
|
60 | 60 | response = self.app.post(route_path('check_2fa'), {'totp': recovery_cod_to_check}) |
|
61 | 61 | response.mustcontain('Code is invalid. Try again!') |
|
62 | 62 | |
|
63 | 63 | def test_2fa_state_when_forced_by_admin(self, user_util): |
|
64 | 64 | user = user_util.create_user(password=self.password) |
|
65 | 65 | user.has_enabled_2fa = False |
|
66 | 66 | SettingsModel().create_or_update_setting('auth_rhodecode_global_2fa', True) |
|
67 | 67 | assert user.has_enabled_2fa |
@@ -1,541 +1,554 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import time |
|
20 | 20 | import json |
|
21 | 21 | import pyotp |
|
22 | 22 | import qrcode |
|
23 | 23 | import collections |
|
24 | 24 | import datetime |
|
25 | 25 | import formencode |
|
26 | 26 | import formencode.htmlfill |
|
27 | 27 | import logging |
|
28 | 28 | import urllib.parse |
|
29 | 29 | import requests |
|
30 | 30 | from io import BytesIO |
|
31 | 31 | from base64 import b64encode |
|
32 | 32 | |
|
33 | 33 | from pyramid.renderers import render |
|
34 | 34 | from pyramid.response import Response |
|
35 | 35 | from pyramid.httpexceptions import HTTPFound |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | from rhodecode.apps._base import BaseAppView |
|
39 | 39 | from rhodecode.authentication.base import authenticate, HTTP_TYPE |
|
40 | 40 | from rhodecode.authentication.plugins import auth_rhodecode |
|
41 | 41 | from rhodecode.events import UserRegistered, trigger |
|
42 | 42 | from rhodecode.lib import helpers as h |
|
43 | 43 | from rhodecode.lib import audit_logger |
|
44 | 44 | from rhodecode.lib.auth import ( |
|
45 | 45 | AuthUser, HasPermissionAnyDecorator, CSRFRequired, LoginRequired, NotAnonymous) |
|
46 | 46 | from rhodecode.lib.base import get_ip_addr |
|
47 | 47 | from rhodecode.lib.exceptions import UserCreationError |
|
48 | 48 | from rhodecode.lib.utils2 import safe_str |
|
49 | 49 | from rhodecode.model.db import User, UserApiKeys |
|
50 | 50 | from rhodecode.model.forms import LoginForm, RegisterForm, PasswordResetForm, TOTPForm |
|
51 | 51 | from rhodecode.model.meta import Session |
|
52 | 52 | from rhodecode.model.auth_token import AuthTokenModel |
|
53 | 53 | from rhodecode.model.settings import SettingsModel |
|
54 | 54 | from rhodecode.model.user import UserModel |
|
55 | 55 | from rhodecode.translation import _ |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | log = logging.getLogger(__name__) |
|
59 | 59 | |
|
60 | 60 | CaptchaData = collections.namedtuple( |
|
61 | 61 | 'CaptchaData', 'active, private_key, public_key') |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | def store_user_in_session(session, user_identifier, remember=False): |
|
65 | 65 | user = User.get_by_username_or_primary_email(user_identifier) |
|
66 | 66 | auth_user = AuthUser(user.user_id) |
|
67 | 67 | auth_user.set_authenticated() |
|
68 | 68 | cs = auth_user.get_cookie_store() |
|
69 | 69 | session['rhodecode_user'] = cs |
|
70 | 70 | user.update_lastlogin() |
|
71 | 71 | Session().commit() |
|
72 | 72 | |
|
73 | 73 | # If they want to be remembered, update the cookie |
|
74 | 74 | if remember: |
|
75 | 75 | _year = (datetime.datetime.now() + |
|
76 | 76 | datetime.timedelta(seconds=60 * 60 * 24 * 365)) |
|
77 | 77 | session._set_cookie_expires(_year) |
|
78 | 78 | |
|
79 | 79 | session.save() |
|
80 | 80 | |
|
81 | 81 | safe_cs = cs.copy() |
|
82 | 82 | safe_cs['password'] = '****' |
|
83 | 83 | log.info('user %s is now authenticated and stored in ' |
|
84 | 84 | 'session, session attrs %s', user_identifier, safe_cs) |
|
85 | 85 | |
|
86 | 86 | # dumps session attrs back to cookie |
|
87 | 87 | session._update_cookie_out() |
|
88 | 88 | # we set new cookie |
|
89 | 89 | headers = None |
|
90 | 90 | if session.request['set_cookie']: |
|
91 | 91 | # send set-cookie headers back to response to update cookie |
|
92 | 92 | headers = [('Set-Cookie', session.request['cookie_out'])] |
|
93 | 93 | return headers |
|
94 | 94 | |
|
95 | 95 | |
|
96 | 96 | def get_came_from(request): |
|
97 | 97 | came_from = safe_str(request.GET.get('came_from', '')) |
|
98 | 98 | parsed = urllib.parse.urlparse(came_from) |
|
99 | 99 | |
|
100 | 100 | allowed_schemes = ['http', 'https'] |
|
101 | 101 | default_came_from = h.route_path('home') |
|
102 | 102 | if parsed.scheme and parsed.scheme not in allowed_schemes: |
|
103 | 103 | log.error('Suspicious URL scheme detected %s for url %s', |
|
104 | 104 | parsed.scheme, parsed) |
|
105 | 105 | came_from = default_came_from |
|
106 | 106 | elif parsed.netloc and request.host != parsed.netloc: |
|
107 | 107 | log.error('Suspicious NETLOC detected %s for url %s server url ' |
|
108 | 108 | 'is: %s', parsed.netloc, parsed, request.host) |
|
109 | 109 | came_from = default_came_from |
|
110 | 110 | elif any(bad_char in came_from for bad_char in ('\r', '\n')): |
|
111 | 111 | log.error('Header injection detected `%s` for url %s server url ', |
|
112 | 112 | parsed.path, parsed) |
|
113 | 113 | came_from = default_came_from |
|
114 | 114 | |
|
115 | 115 | return came_from or default_came_from |
|
116 | 116 | |
|
117 | 117 | |
|
118 | 118 | class LoginView(BaseAppView): |
|
119 | 119 | |
|
120 | 120 | def load_default_context(self): |
|
121 | 121 | c = self._get_local_tmpl_context() |
|
122 | 122 | c.came_from = get_came_from(self.request) |
|
123 | 123 | return c |
|
124 | 124 | |
|
125 | 125 | def _get_captcha_data(self): |
|
126 | 126 | settings = SettingsModel().get_all_settings() |
|
127 | 127 | private_key = settings.get('rhodecode_captcha_private_key') |
|
128 | 128 | public_key = settings.get('rhodecode_captcha_public_key') |
|
129 | 129 | active = bool(private_key) |
|
130 | 130 | return CaptchaData( |
|
131 | 131 | active=active, private_key=private_key, public_key=public_key) |
|
132 | 132 | |
|
133 | 133 | def validate_captcha(self, private_key): |
|
134 | 134 | |
|
135 | 135 | captcha_rs = self.request.POST.get('g-recaptcha-response') |
|
136 | 136 | url = "https://www.google.com/recaptcha/api/siteverify" |
|
137 | 137 | params = { |
|
138 | 138 | 'secret': private_key, |
|
139 | 139 | 'response': captcha_rs, |
|
140 | 140 | 'remoteip': get_ip_addr(self.request.environ) |
|
141 | 141 | } |
|
142 | 142 | verify_rs = requests.get(url, params=params, verify=True, timeout=60) |
|
143 | 143 | verify_rs = verify_rs.json() |
|
144 | 144 | captcha_status = verify_rs.get('success', False) |
|
145 | 145 | captcha_errors = verify_rs.get('error-codes', []) |
|
146 | 146 | if not isinstance(captcha_errors, list): |
|
147 | 147 | captcha_errors = [captcha_errors] |
|
148 | 148 | captcha_errors = ', '.join(captcha_errors) |
|
149 | 149 | captcha_message = '' |
|
150 | 150 | if captcha_status is False: |
|
151 | 151 | captcha_message = "Bad captcha. Errors: {}".format( |
|
152 | 152 | captcha_errors) |
|
153 | 153 | |
|
154 | 154 | return captcha_status, captcha_message |
|
155 | 155 | |
|
156 | 156 | def login(self): |
|
157 | 157 | c = self.load_default_context() |
|
158 | 158 | auth_user = self._rhodecode_user |
|
159 | 159 | |
|
160 | 160 | # redirect if already logged in |
|
161 | 161 | if (auth_user.is_authenticated and |
|
162 | 162 | not auth_user.is_default and auth_user.ip_allowed): |
|
163 | 163 | raise HTTPFound(c.came_from) |
|
164 | 164 | |
|
165 | 165 | # check if we use headers plugin, and try to login using it. |
|
166 | 166 | try: |
|
167 | 167 | log.debug('Running PRE-AUTH for headers based authentication') |
|
168 | 168 | auth_info = authenticate( |
|
169 | 169 | '', '', self.request.environ, HTTP_TYPE, skip_missing=True) |
|
170 | 170 | if auth_info: |
|
171 | 171 | headers = store_user_in_session( |
|
172 | 172 | self.session, auth_info.get('username')) |
|
173 | 173 | raise HTTPFound(c.came_from, headers=headers) |
|
174 | 174 | except UserCreationError as e: |
|
175 | 175 | log.error(e) |
|
176 | 176 | h.flash(e, category='error') |
|
177 | 177 | |
|
178 | 178 | return self._get_template_context(c) |
|
179 | 179 | |
|
180 | 180 | def login_post(self): |
|
181 | 181 | c = self.load_default_context() |
|
182 | 182 | |
|
183 | 183 | login_form = LoginForm(self.request.translate)() |
|
184 | 184 | |
|
185 | 185 | try: |
|
186 | 186 | self.session.invalidate() |
|
187 | 187 | form_result = login_form.to_python(self.request.POST) |
|
188 | 188 | # form checks for username/password, now we're authenticated |
|
189 | 189 | username = form_result['username'] |
|
190 | 190 | if (user := User.get_by_username_or_primary_email(username)).has_enabled_2fa: |
|
191 |
user. |
|
|
191 | user.has_check_2fa_flag = True | |
|
192 | ||
|
192 | 193 | headers = store_user_in_session( |
|
193 | 194 | self.session, |
|
194 | 195 | user_identifier=username, |
|
195 | 196 | remember=form_result['remember']) |
|
196 | 197 | log.debug('Redirecting to "%s" after login.', c.came_from) |
|
197 | 198 | |
|
198 | 199 | audit_user = audit_logger.UserWrap( |
|
199 | 200 | username=self.request.POST.get('username'), |
|
200 | 201 | ip_addr=self.request.remote_addr) |
|
201 | 202 | action_data = {'user_agent': self.request.user_agent} |
|
202 | 203 | audit_logger.store_web( |
|
203 | 204 | 'user.login.success', action_data=action_data, |
|
204 | 205 | user=audit_user, commit=True) |
|
205 | 206 | |
|
206 | 207 | raise HTTPFound(c.came_from, headers=headers) |
|
207 | 208 | except formencode.Invalid as errors: |
|
208 | 209 | defaults = errors.value |
|
209 | 210 | # remove password from filling in form again |
|
210 | 211 | defaults.pop('password', None) |
|
211 | 212 | render_ctx = { |
|
212 | 213 | 'errors': errors.error_dict, |
|
213 | 214 | 'defaults': defaults, |
|
214 | 215 | } |
|
215 | 216 | |
|
216 | 217 | audit_user = audit_logger.UserWrap( |
|
217 | 218 | username=self.request.POST.get('username'), |
|
218 | 219 | ip_addr=self.request.remote_addr) |
|
219 | 220 | action_data = {'user_agent': self.request.user_agent} |
|
220 | 221 | audit_logger.store_web( |
|
221 | 222 | 'user.login.failure', action_data=action_data, |
|
222 | 223 | user=audit_user, commit=True) |
|
223 | 224 | return self._get_template_context(c, **render_ctx) |
|
224 | 225 | |
|
225 | 226 | except UserCreationError as e: |
|
226 | 227 | # headers auth or other auth functions that create users on |
|
227 | 228 | # the fly can throw this exception signaling that there's issue |
|
228 | 229 | # with user creation, explanation should be provided in |
|
229 | 230 | # Exception itself |
|
230 | 231 | h.flash(e, category='error') |
|
231 | 232 | return self._get_template_context(c) |
|
232 | 233 | |
|
233 | 234 | @CSRFRequired() |
|
234 | 235 | def logout(self): |
|
235 | 236 | auth_user = self._rhodecode_user |
|
236 | 237 | log.info('Deleting session for user: `%s`', auth_user) |
|
237 | 238 | |
|
238 | 239 | action_data = {'user_agent': self.request.user_agent} |
|
239 | 240 | audit_logger.store_web( |
|
240 | 241 | 'user.logout', action_data=action_data, |
|
241 | 242 | user=auth_user, commit=True) |
|
242 | 243 | self.session.delete() |
|
243 | 244 | return HTTPFound(h.route_path('home')) |
|
244 | 245 | |
|
245 | 246 | @HasPermissionAnyDecorator( |
|
246 | 247 | 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') |
|
247 | 248 | def register(self, defaults=None, errors=None): |
|
248 | 249 | c = self.load_default_context() |
|
249 | 250 | defaults = defaults or {} |
|
250 | 251 | errors = errors or {} |
|
251 | 252 | |
|
252 | 253 | settings = SettingsModel().get_all_settings() |
|
253 | 254 | register_message = settings.get('rhodecode_register_message') or '' |
|
254 | 255 | captcha = self._get_captcha_data() |
|
255 | 256 | auto_active = 'hg.register.auto_activate' in User.get_default_user()\ |
|
256 | 257 | .AuthUser().permissions['global'] |
|
257 | 258 | |
|
258 | 259 | render_ctx = self._get_template_context(c) |
|
259 | 260 | render_ctx.update({ |
|
260 | 261 | 'defaults': defaults, |
|
261 | 262 | 'errors': errors, |
|
262 | 263 | 'auto_active': auto_active, |
|
263 | 264 | 'captcha_active': captcha.active, |
|
264 | 265 | 'captcha_public_key': captcha.public_key, |
|
265 | 266 | 'register_message': register_message, |
|
266 | 267 | }) |
|
267 | 268 | return render_ctx |
|
268 | 269 | |
|
269 | 270 | @HasPermissionAnyDecorator( |
|
270 | 271 | 'hg.admin', 'hg.register.auto_activate', 'hg.register.manual_activate') |
|
271 | 272 | def register_post(self): |
|
272 | 273 | from rhodecode.authentication.plugins import auth_rhodecode |
|
273 | 274 | |
|
274 | 275 | self.load_default_context() |
|
275 | 276 | captcha = self._get_captcha_data() |
|
276 | 277 | auto_active = 'hg.register.auto_activate' in User.get_default_user()\ |
|
277 | 278 | .AuthUser().permissions['global'] |
|
278 | 279 | |
|
279 | 280 | extern_name = auth_rhodecode.RhodeCodeAuthPlugin.uid |
|
280 | 281 | extern_type = auth_rhodecode.RhodeCodeAuthPlugin.uid |
|
281 | 282 | |
|
282 | 283 | register_form = RegisterForm(self.request.translate)() |
|
283 | 284 | try: |
|
284 | 285 | |
|
285 | 286 | form_result = register_form.to_python(self.request.POST) |
|
286 | 287 | form_result['active'] = auto_active |
|
287 | 288 | external_identity = self.request.POST.get('external_identity') |
|
288 | 289 | |
|
289 | 290 | if external_identity: |
|
290 | 291 | extern_name = external_identity |
|
291 | 292 | extern_type = external_identity |
|
292 | 293 | |
|
293 | 294 | if captcha.active: |
|
294 | 295 | captcha_status, captcha_message = self.validate_captcha( |
|
295 | 296 | captcha.private_key) |
|
296 | 297 | |
|
297 | 298 | if not captcha_status: |
|
298 | 299 | _value = form_result |
|
299 | 300 | _msg = _('Bad captcha') |
|
300 | 301 | error_dict = {'recaptcha_field': captcha_message} |
|
301 | 302 | raise formencode.Invalid( |
|
302 | 303 | _msg, _value, None, error_dict=error_dict) |
|
303 | 304 | |
|
304 | 305 | new_user = UserModel().create_registration( |
|
305 | 306 | form_result, extern_name=extern_name, extern_type=extern_type) |
|
306 | 307 | |
|
307 | 308 | action_data = {'data': new_user.get_api_data(), |
|
308 | 309 | 'user_agent': self.request.user_agent} |
|
309 | 310 | |
|
310 | 311 | if external_identity: |
|
311 | 312 | action_data['external_identity'] = external_identity |
|
312 | 313 | |
|
313 | 314 | audit_user = audit_logger.UserWrap( |
|
314 | 315 | username=new_user.username, |
|
315 | 316 | user_id=new_user.user_id, |
|
316 | 317 | ip_addr=self.request.remote_addr) |
|
317 | 318 | |
|
318 | 319 | audit_logger.store_web( |
|
319 | 320 | 'user.register', action_data=action_data, |
|
320 | 321 | user=audit_user) |
|
321 | 322 | |
|
322 | 323 | event = UserRegistered(user=new_user, session=self.session) |
|
323 | 324 | trigger(event) |
|
324 | 325 | h.flash( |
|
325 | 326 | _('You have successfully registered with RhodeCode. You can log-in now.'), |
|
326 | 327 | category='success') |
|
327 | 328 | if external_identity: |
|
328 | 329 | h.flash( |
|
329 | 330 | _('Please use the {identity} button to log-in').format( |
|
330 | 331 | identity=external_identity), |
|
331 | 332 | category='success') |
|
332 | 333 | Session().commit() |
|
333 | 334 | |
|
334 | 335 | redirect_ro = self.request.route_path('login') |
|
335 | 336 | raise HTTPFound(redirect_ro) |
|
336 | 337 | |
|
337 | 338 | except formencode.Invalid as errors: |
|
338 | 339 | errors.value.pop('password', None) |
|
339 | 340 | errors.value.pop('password_confirmation', None) |
|
340 | 341 | return self.register( |
|
341 | 342 | defaults=errors.value, errors=errors.error_dict) |
|
342 | 343 | |
|
343 | 344 | except UserCreationError as e: |
|
344 | 345 | # container auth or other auth functions that create users on |
|
345 | 346 | # the fly can throw this exception signaling that there's issue |
|
346 | 347 | # with user creation, explanation should be provided in |
|
347 | 348 | # Exception itself |
|
348 | 349 | h.flash(e, category='error') |
|
349 | 350 | return self.register() |
|
350 | 351 | |
|
351 | 352 | def password_reset(self): |
|
352 | 353 | c = self.load_default_context() |
|
353 | 354 | captcha = self._get_captcha_data() |
|
354 | 355 | |
|
355 | 356 | template_context = { |
|
356 | 357 | 'captcha_active': captcha.active, |
|
357 | 358 | 'captcha_public_key': captcha.public_key, |
|
358 | 359 | 'defaults': {}, |
|
359 | 360 | 'errors': {}, |
|
360 | 361 | } |
|
361 | 362 | |
|
362 | 363 | # always send implicit message to prevent from discovery of |
|
363 | 364 | # matching emails |
|
364 | 365 | msg = _('If such email exists, a password reset link was sent to it.') |
|
365 | 366 | |
|
366 | 367 | def default_response(): |
|
367 | 368 | log.debug('faking response on invalid password reset') |
|
368 | 369 | # make this take 2s, to prevent brute forcing. |
|
369 | 370 | time.sleep(2) |
|
370 | 371 | h.flash(msg, category='success') |
|
371 | 372 | return HTTPFound(self.request.route_path('reset_password')) |
|
372 | 373 | |
|
373 | 374 | if self.request.POST: |
|
374 | 375 | if h.HasPermissionAny('hg.password_reset.disabled')(): |
|
375 | 376 | _email = self.request.POST.get('email', '') |
|
376 | 377 | log.error('Failed attempt to reset password for `%s`.', _email) |
|
377 | 378 | h.flash(_('Password reset has been disabled.'), category='error') |
|
378 | 379 | return HTTPFound(self.request.route_path('reset_password')) |
|
379 | 380 | |
|
380 | 381 | password_reset_form = PasswordResetForm(self.request.translate)() |
|
381 | 382 | description = 'Generated token for password reset from {}'.format( |
|
382 | 383 | datetime.datetime.now().isoformat()) |
|
383 | 384 | |
|
384 | 385 | try: |
|
385 | 386 | form_result = password_reset_form.to_python( |
|
386 | 387 | self.request.POST) |
|
387 | 388 | user_email = form_result['email'] |
|
388 | 389 | |
|
389 | 390 | if captcha.active: |
|
390 | 391 | captcha_status, captcha_message = self.validate_captcha( |
|
391 | 392 | captcha.private_key) |
|
392 | 393 | |
|
393 | 394 | if not captcha_status: |
|
394 | 395 | _value = form_result |
|
395 | 396 | _msg = _('Bad captcha') |
|
396 | 397 | error_dict = {'recaptcha_field': captcha_message} |
|
397 | 398 | raise formencode.Invalid( |
|
398 | 399 | _msg, _value, None, error_dict=error_dict) |
|
399 | 400 | |
|
400 | 401 | # Generate reset URL and send mail. |
|
401 | 402 | user = User.get_by_email(user_email) |
|
402 | 403 | |
|
403 | 404 | # only allow rhodecode based users to reset their password |
|
404 | 405 | # external auth shouldn't allow password reset |
|
405 | 406 | if user and user.extern_type != auth_rhodecode.RhodeCodeAuthPlugin.uid: |
|
406 | 407 | log.warning('User %s with external type `%s` tried a password reset. ' |
|
407 | 408 | 'This try was rejected', user, user.extern_type) |
|
408 | 409 | return default_response() |
|
409 | 410 | |
|
410 | 411 | # generate password reset token that expires in 10 minutes |
|
411 | 412 | reset_token = UserModel().add_auth_token( |
|
412 | 413 | user=user, lifetime_minutes=10, |
|
413 | 414 | role=UserModel.auth_token_role.ROLE_PASSWORD_RESET, |
|
414 | 415 | description=description) |
|
415 | 416 | Session().commit() |
|
416 | 417 | |
|
417 | 418 | log.debug('Successfully created password recovery token') |
|
418 | 419 | password_reset_url = self.request.route_url( |
|
419 | 420 | 'reset_password_confirmation', |
|
420 | 421 | _query={'key': reset_token.api_key}) |
|
421 | 422 | UserModel().reset_password_link( |
|
422 | 423 | form_result, password_reset_url) |
|
423 | 424 | |
|
424 | 425 | action_data = {'email': user_email, |
|
425 | 426 | 'user_agent': self.request.user_agent} |
|
426 | 427 | audit_logger.store_web( |
|
427 | 428 | 'user.password.reset_request', action_data=action_data, |
|
428 | 429 | user=self._rhodecode_user, commit=True) |
|
429 | 430 | |
|
430 | 431 | return default_response() |
|
431 | 432 | |
|
432 | 433 | except formencode.Invalid as errors: |
|
433 | 434 | template_context.update({ |
|
434 | 435 | 'defaults': errors.value, |
|
435 | 436 | 'errors': errors.error_dict, |
|
436 | 437 | }) |
|
437 | 438 | if not self.request.POST.get('email'): |
|
438 | 439 | # case of empty email, we want to report that |
|
439 | 440 | return self._get_template_context(c, **template_context) |
|
440 | 441 | |
|
441 | 442 | if 'recaptcha_field' in errors.error_dict: |
|
442 | 443 | # case of failed captcha |
|
443 | 444 | return self._get_template_context(c, **template_context) |
|
444 | 445 | |
|
445 | 446 | return default_response() |
|
446 | 447 | |
|
447 | 448 | return self._get_template_context(c, **template_context) |
|
448 | 449 | |
|
449 | 450 | @LoginRequired() |
|
450 | 451 | @NotAnonymous() |
|
451 | 452 | def password_reset_confirmation(self): |
|
452 | 453 | self.load_default_context() |
|
453 | 454 | if self.request.GET and self.request.GET.get('key'): |
|
454 | 455 | # make this take 2s, to prevent brute forcing. |
|
455 | 456 | time.sleep(2) |
|
456 | 457 | |
|
457 | 458 | token = AuthTokenModel().get_auth_token( |
|
458 | 459 | self.request.GET.get('key')) |
|
459 | 460 | |
|
460 | 461 | # verify token is the correct role |
|
461 | 462 | if token is None or token.role != UserApiKeys.ROLE_PASSWORD_RESET: |
|
462 | 463 | log.debug('Got token with role:%s expected is %s', |
|
463 | 464 | getattr(token, 'role', 'EMPTY_TOKEN'), |
|
464 | 465 | UserApiKeys.ROLE_PASSWORD_RESET) |
|
465 | 466 | h.flash( |
|
466 | 467 | _('Given reset token is invalid'), category='error') |
|
467 | 468 | return HTTPFound(self.request.route_path('reset_password')) |
|
468 | 469 | |
|
469 | 470 | try: |
|
470 | 471 | owner = token.user |
|
471 | 472 | data = {'email': owner.email, 'token': token.api_key} |
|
472 | 473 | UserModel().reset_password(data) |
|
473 | 474 | h.flash( |
|
474 | 475 | _('Your password reset was successful, ' |
|
475 | 476 | 'a new password has been sent to your email'), |
|
476 | 477 | category='success') |
|
477 | 478 | except Exception as e: |
|
478 | 479 | log.error(e) |
|
479 | 480 | return HTTPFound(self.request.route_path('reset_password')) |
|
480 | 481 | |
|
481 | 482 | return HTTPFound(self.request.route_path('login')) |
|
482 | 483 | |
|
483 | 484 | @LoginRequired() |
|
484 | 485 | @NotAnonymous() |
|
485 | 486 | def setup_2fa(self): |
|
486 | 487 | _ = self.request.translate |
|
487 | 488 | c = self.load_default_context() |
|
488 | 489 | user_instance = self._rhodecode_db_user |
|
489 | 490 | form = TOTPForm(_, user_instance)() |
|
490 | 491 | render_ctx = {} |
|
491 | 492 | if self.request.method == 'POST': |
|
493 | post_items = dict(self.request.POST) | |
|
494 | ||
|
492 | 495 | try: |
|
493 |
form.to_python( |
|
|
496 | form_details = form.to_python(post_items) | |
|
497 | secret = form_details['secret_totp'] | |
|
498 | ||
|
499 | user_instance.init_2fa_recovery_codes(persist=True, force=True) | |
|
500 | user_instance.set_2fa_secret(secret) | |
|
501 | ||
|
494 | 502 | Session().commit() |
|
495 | raise HTTPFound(c.came_from) | |
|
503 | raise HTTPFound(self.request.route_path('my_account_enable_2fa', _query={'show-recovery-codes': 1})) | |
|
496 | 504 | except formencode.Invalid as errors: |
|
497 | 505 | defaults = errors.value |
|
498 | 506 | render_ctx = { |
|
499 | 507 | 'errors': errors.error_dict, |
|
500 | 508 | 'defaults': defaults, |
|
501 | 509 | } |
|
510 | ||
|
511 | # NOTE: here we DO NOT persist the secret 2FA, since this is only for setup, once a setup is completed | |
|
512 | # only then we should persist it | |
|
513 | secret = user_instance.init_secret_2fa(persist=False) | |
|
514 | ||
|
515 | totp_name = f'RhodeCode token ({self.request.user.username})' | |
|
516 | ||
|
502 | 517 | qr = qrcode.QRCode(version=1, box_size=10, border=5) |
|
503 | secret = user_instance.secret_2fa | |
|
504 | Session().flush() | |
|
505 | recovery_codes = user_instance.get_2fa_recovery_codes() | |
|
506 | Session().commit() | |
|
507 | qr.add_data(pyotp.totp.TOTP(secret).provisioning_uri( | |
|
508 | name=self.request.user.name)) | |
|
518 | qr.add_data(pyotp.totp.TOTP(secret).provisioning_uri(name=totp_name)) | |
|
509 | 519 | qr.make(fit=True) |
|
510 | 520 | img = qr.make_image(fill_color='black', back_color='white') |
|
511 | 521 | buffered = BytesIO() |
|
512 | 522 | img.save(buffered) |
|
513 | 523 | return self._get_template_context( |
|
514 | 524 | c, |
|
515 | 525 | qr=b64encode(buffered.getvalue()).decode("utf-8"), |
|
516 | key=secret, recovery_codes=json.dumps(recovery_codes), | |
|
517 | codes_viewed=not bool(recovery_codes), | |
|
526 | key=secret, | |
|
527 | totp_name=totp_name, | |
|
518 | 528 | ** render_ctx |
|
519 | 529 | ) |
|
520 | 530 | |
|
521 | 531 | @LoginRequired() |
|
522 | 532 | @NotAnonymous() |
|
523 | 533 | def verify_2fa(self): |
|
524 | 534 | _ = self.request.translate |
|
525 | 535 | c = self.load_default_context() |
|
526 | 536 | render_ctx = {} |
|
527 | 537 | user_instance = self._rhodecode_db_user |
|
528 | 538 | totp_form = TOTPForm(_, user_instance, allow_recovery_code_use=True)() |
|
529 | 539 | if self.request.method == 'POST': |
|
540 | post_items = dict(self.request.POST) | |
|
541 | # NOTE: inject secret, as it's a post configured saved item. | |
|
542 | post_items['secret_totp'] = user_instance.get_secret_2fa() | |
|
530 | 543 | try: |
|
531 |
totp_form.to_python( |
|
|
532 |
user_instance. |
|
|
544 | totp_form.to_python(post_items) | |
|
545 | user_instance.has_check_2fa_flag = False | |
|
533 | 546 | Session().commit() |
|
534 | 547 | raise HTTPFound(c.came_from) |
|
535 | 548 | except formencode.Invalid as errors: |
|
536 | 549 | defaults = errors.value |
|
537 | 550 | render_ctx = { |
|
538 | 551 | 'errors': errors.error_dict, |
|
539 | 552 | 'defaults': defaults, |
|
540 | 553 | } |
|
541 | 554 | return self._get_template_context(c, **render_ctx) |
@@ -1,359 +1,370 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | |
|
20 | 20 | from rhodecode.apps._base import ADMIN_PREFIX |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | def includeme(config): |
|
24 | 24 | from rhodecode.apps.my_account.views.my_account import MyAccountView |
|
25 | 25 | from rhodecode.apps.my_account.views.my_account_notifications import MyAccountNotificationsView |
|
26 | 26 | from rhodecode.apps.my_account.views.my_account_ssh_keys import MyAccountSshKeysView |
|
27 | 27 | |
|
28 | 28 | config.add_route( |
|
29 | 29 | name='my_account_profile', |
|
30 | 30 | pattern=ADMIN_PREFIX + '/my_account/profile') |
|
31 | 31 | config.add_view( |
|
32 | 32 | MyAccountView, |
|
33 | 33 | attr='my_account_profile', |
|
34 | 34 | route_name='my_account_profile', request_method='GET', |
|
35 | 35 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
36 | 36 | |
|
37 | 37 | # my account edit details |
|
38 | 38 | config.add_route( |
|
39 | 39 | name='my_account_edit', |
|
40 | 40 | pattern=ADMIN_PREFIX + '/my_account/edit') |
|
41 | 41 | config.add_view( |
|
42 | 42 | MyAccountView, |
|
43 | 43 | attr='my_account_edit', |
|
44 | 44 | route_name='my_account_edit', |
|
45 | 45 | request_method='GET', |
|
46 | 46 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
47 | 47 | |
|
48 | 48 | config.add_route( |
|
49 | 49 | name='my_account_update', |
|
50 | 50 | pattern=ADMIN_PREFIX + '/my_account/update') |
|
51 | 51 | config.add_view( |
|
52 | 52 | MyAccountView, |
|
53 | 53 | attr='my_account_update', |
|
54 | 54 | route_name='my_account_update', |
|
55 | 55 | request_method='POST', |
|
56 | 56 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
57 | 57 | |
|
58 | 58 | # my account password |
|
59 | 59 | config.add_route( |
|
60 | 60 | name='my_account_password', |
|
61 | 61 | pattern=ADMIN_PREFIX + '/my_account/password') |
|
62 | 62 | config.add_view( |
|
63 | 63 | MyAccountView, |
|
64 | 64 | attr='my_account_password', |
|
65 | 65 | route_name='my_account_password', request_method='GET', |
|
66 | 66 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
67 | 67 | |
|
68 | 68 | config.add_route( |
|
69 | 69 | name='my_account_password_update', |
|
70 | 70 | pattern=ADMIN_PREFIX + '/my_account/password/update') |
|
71 | 71 | config.add_view( |
|
72 | 72 | MyAccountView, |
|
73 | 73 | attr='my_account_password_update', |
|
74 | 74 | route_name='my_account_password_update', request_method='POST', |
|
75 | 75 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
76 | 76 | |
|
77 | 77 | # my account 2fa |
|
78 | 78 | config.add_route( |
|
79 | 79 | name='my_account_enable_2fa', |
|
80 | 80 | pattern=ADMIN_PREFIX + '/my_account/enable_2fa') |
|
81 | 81 | config.add_view( |
|
82 | 82 | MyAccountView, |
|
83 | 83 | attr='my_account_2fa', |
|
84 | 84 | route_name='my_account_enable_2fa', request_method='GET', |
|
85 | 85 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
86 | ||
|
86 | # my account 2fa save | |
|
87 | 87 | config.add_route( |
|
88 |
name='my_account_ |
|
|
89 |
pattern=ADMIN_PREFIX + '/my_account/ |
|
|
88 | name='my_account_enable_2fa_save', | |
|
89 | pattern=ADMIN_PREFIX + '/my_account/enable_2fa_save') | |
|
90 | 90 | config.add_view( |
|
91 | 91 | MyAccountView, |
|
92 |
attr='my_account_2fa_ |
|
|
93 |
route_name='my_account_ |
|
|
92 | attr='my_account_2fa_update', | |
|
93 | route_name='my_account_enable_2fa_save', request_method='POST', | |
|
94 | renderer='rhodecode:templates/admin/my_account/my_account.mako') | |
|
95 | ||
|
96 | # my account 2fa recovery code-reset | |
|
97 | config.add_route( | |
|
98 | name='my_account_show_2fa_recovery_codes', | |
|
99 | pattern=ADMIN_PREFIX + '/my_account/recovery_codes') | |
|
100 | config.add_view( | |
|
101 | MyAccountView, | |
|
102 | attr='my_account_2fa_show_recovery_codes', | |
|
103 | route_name='my_account_show_2fa_recovery_codes', request_method='POST', xhr=True, | |
|
94 | 104 | renderer='json_ext') |
|
95 | 105 | |
|
106 | # my account 2fa recovery code-reset | |
|
96 | 107 | config.add_route( |
|
97 | 108 | name='my_account_regenerate_2fa_recovery_codes', |
|
98 | 109 | pattern=ADMIN_PREFIX + '/my_account/regenerate_recovery_codes') |
|
99 | 110 | config.add_view( |
|
100 | 111 | MyAccountView, |
|
101 | 112 | attr='my_account_2fa_regenerate_recovery_codes', |
|
102 |
route_name='my_account_regenerate_2fa_recovery_codes', request_method='POST', |
|
|
103 | renderer='json_ext') | |
|
113 | route_name='my_account_regenerate_2fa_recovery_codes', request_method='POST', | |
|
114 | renderer='rhodecode:templates/admin/my_account/my_account.mako') | |
|
104 | 115 | |
|
105 | 116 | # my account tokens |
|
106 | 117 | config.add_route( |
|
107 | 118 | name='my_account_auth_tokens', |
|
108 | 119 | pattern=ADMIN_PREFIX + '/my_account/auth_tokens') |
|
109 | 120 | config.add_view( |
|
110 | 121 | MyAccountView, |
|
111 | 122 | attr='my_account_auth_tokens', |
|
112 | 123 | route_name='my_account_auth_tokens', request_method='GET', |
|
113 | 124 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
114 | 125 | |
|
115 | 126 | config.add_route( |
|
116 | 127 | name='my_account_auth_tokens_view', |
|
117 | 128 | pattern=ADMIN_PREFIX + '/my_account/auth_tokens/view') |
|
118 | 129 | config.add_view( |
|
119 | 130 | MyAccountView, |
|
120 | 131 | attr='my_account_auth_tokens_view', |
|
121 | 132 | route_name='my_account_auth_tokens_view', request_method='POST', xhr=True, |
|
122 | 133 | renderer='json_ext') |
|
123 | 134 | |
|
124 | 135 | config.add_route( |
|
125 | 136 | name='my_account_auth_tokens_add', |
|
126 | 137 | pattern=ADMIN_PREFIX + '/my_account/auth_tokens/new') |
|
127 | 138 | config.add_view( |
|
128 | 139 | MyAccountView, |
|
129 | 140 | attr='my_account_auth_tokens_add', |
|
130 | 141 | route_name='my_account_auth_tokens_add', request_method='POST') |
|
131 | 142 | |
|
132 | 143 | config.add_route( |
|
133 | 144 | name='my_account_auth_tokens_delete', |
|
134 | 145 | pattern=ADMIN_PREFIX + '/my_account/auth_tokens/delete') |
|
135 | 146 | config.add_view( |
|
136 | 147 | MyAccountView, |
|
137 | 148 | attr='my_account_auth_tokens_delete', |
|
138 | 149 | route_name='my_account_auth_tokens_delete', request_method='POST') |
|
139 | 150 | |
|
140 | 151 | # my account ssh keys |
|
141 | 152 | config.add_route( |
|
142 | 153 | name='my_account_ssh_keys', |
|
143 | 154 | pattern=ADMIN_PREFIX + '/my_account/ssh_keys') |
|
144 | 155 | config.add_view( |
|
145 | 156 | MyAccountSshKeysView, |
|
146 | 157 | attr='my_account_ssh_keys', |
|
147 | 158 | route_name='my_account_ssh_keys', request_method='GET', |
|
148 | 159 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
149 | 160 | |
|
150 | 161 | config.add_route( |
|
151 | 162 | name='my_account_ssh_keys_generate', |
|
152 | 163 | pattern=ADMIN_PREFIX + '/my_account/ssh_keys/generate') |
|
153 | 164 | config.add_view( |
|
154 | 165 | MyAccountSshKeysView, |
|
155 | 166 | attr='ssh_keys_generate_keypair', |
|
156 | 167 | route_name='my_account_ssh_keys_generate', request_method='GET', |
|
157 | 168 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
158 | 169 | |
|
159 | 170 | config.add_route( |
|
160 | 171 | name='my_account_ssh_keys_add', |
|
161 | 172 | pattern=ADMIN_PREFIX + '/my_account/ssh_keys/new') |
|
162 | 173 | config.add_view( |
|
163 | 174 | MyAccountSshKeysView, |
|
164 | 175 | attr='my_account_ssh_keys_add', |
|
165 | 176 | route_name='my_account_ssh_keys_add', request_method='POST',) |
|
166 | 177 | |
|
167 | 178 | config.add_route( |
|
168 | 179 | name='my_account_ssh_keys_delete', |
|
169 | 180 | pattern=ADMIN_PREFIX + '/my_account/ssh_keys/delete') |
|
170 | 181 | config.add_view( |
|
171 | 182 | MyAccountSshKeysView, |
|
172 | 183 | attr='my_account_ssh_keys_delete', |
|
173 | 184 | route_name='my_account_ssh_keys_delete', request_method='POST') |
|
174 | 185 | |
|
175 | 186 | # my account user group membership |
|
176 | 187 | config.add_route( |
|
177 | 188 | name='my_account_user_group_membership', |
|
178 | 189 | pattern=ADMIN_PREFIX + '/my_account/user_group_membership') |
|
179 | 190 | config.add_view( |
|
180 | 191 | MyAccountView, |
|
181 | 192 | attr='my_account_user_group_membership', |
|
182 | 193 | route_name='my_account_user_group_membership', |
|
183 | 194 | request_method='GET', |
|
184 | 195 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
185 | 196 | |
|
186 | 197 | # my account emails |
|
187 | 198 | config.add_route( |
|
188 | 199 | name='my_account_emails', |
|
189 | 200 | pattern=ADMIN_PREFIX + '/my_account/emails') |
|
190 | 201 | config.add_view( |
|
191 | 202 | MyAccountView, |
|
192 | 203 | attr='my_account_emails', |
|
193 | 204 | route_name='my_account_emails', request_method='GET', |
|
194 | 205 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
195 | 206 | |
|
196 | 207 | config.add_route( |
|
197 | 208 | name='my_account_emails_add', |
|
198 | 209 | pattern=ADMIN_PREFIX + '/my_account/emails/new') |
|
199 | 210 | config.add_view( |
|
200 | 211 | MyAccountView, |
|
201 | 212 | attr='my_account_emails_add', |
|
202 | 213 | route_name='my_account_emails_add', request_method='POST', |
|
203 | 214 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
204 | 215 | |
|
205 | 216 | config.add_route( |
|
206 | 217 | name='my_account_emails_delete', |
|
207 | 218 | pattern=ADMIN_PREFIX + '/my_account/emails/delete') |
|
208 | 219 | config.add_view( |
|
209 | 220 | MyAccountView, |
|
210 | 221 | attr='my_account_emails_delete', |
|
211 | 222 | route_name='my_account_emails_delete', request_method='POST') |
|
212 | 223 | |
|
213 | 224 | config.add_route( |
|
214 | 225 | name='my_account_repos', |
|
215 | 226 | pattern=ADMIN_PREFIX + '/my_account/repos') |
|
216 | 227 | config.add_view( |
|
217 | 228 | MyAccountView, |
|
218 | 229 | attr='my_account_repos', |
|
219 | 230 | route_name='my_account_repos', request_method='GET', |
|
220 | 231 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
221 | 232 | |
|
222 | 233 | config.add_route( |
|
223 | 234 | name='my_account_watched', |
|
224 | 235 | pattern=ADMIN_PREFIX + '/my_account/watched') |
|
225 | 236 | config.add_view( |
|
226 | 237 | MyAccountView, |
|
227 | 238 | attr='my_account_watched', |
|
228 | 239 | route_name='my_account_watched', request_method='GET', |
|
229 | 240 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
230 | 241 | |
|
231 | 242 | config.add_route( |
|
232 | 243 | name='my_account_bookmarks', |
|
233 | 244 | pattern=ADMIN_PREFIX + '/my_account/bookmarks') |
|
234 | 245 | config.add_view( |
|
235 | 246 | MyAccountView, |
|
236 | 247 | attr='my_account_bookmarks', |
|
237 | 248 | route_name='my_account_bookmarks', request_method='GET', |
|
238 | 249 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
239 | 250 | |
|
240 | 251 | config.add_route( |
|
241 | 252 | name='my_account_bookmarks_update', |
|
242 | 253 | pattern=ADMIN_PREFIX + '/my_account/bookmarks/update') |
|
243 | 254 | config.add_view( |
|
244 | 255 | MyAccountView, |
|
245 | 256 | attr='my_account_bookmarks_update', |
|
246 | 257 | route_name='my_account_bookmarks_update', request_method='POST') |
|
247 | 258 | |
|
248 | 259 | config.add_route( |
|
249 | 260 | name='my_account_goto_bookmark', |
|
250 | 261 | pattern=ADMIN_PREFIX + '/my_account/bookmark/{bookmark_id}') |
|
251 | 262 | config.add_view( |
|
252 | 263 | MyAccountView, |
|
253 | 264 | attr='my_account_goto_bookmark', |
|
254 | 265 | route_name='my_account_goto_bookmark', request_method='GET', |
|
255 | 266 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
256 | 267 | |
|
257 | 268 | config.add_route( |
|
258 | 269 | name='my_account_perms', |
|
259 | 270 | pattern=ADMIN_PREFIX + '/my_account/perms') |
|
260 | 271 | config.add_view( |
|
261 | 272 | MyAccountView, |
|
262 | 273 | attr='my_account_perms', |
|
263 | 274 | route_name='my_account_perms', request_method='GET', |
|
264 | 275 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
265 | 276 | |
|
266 | 277 | config.add_route( |
|
267 | 278 | name='my_account_notifications', |
|
268 | 279 | pattern=ADMIN_PREFIX + '/my_account/notifications') |
|
269 | 280 | config.add_view( |
|
270 | 281 | MyAccountView, |
|
271 | 282 | attr='my_notifications', |
|
272 | 283 | route_name='my_account_notifications', request_method='GET', |
|
273 | 284 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
274 | 285 | |
|
275 | 286 | config.add_route( |
|
276 | 287 | name='my_account_notifications_toggle_visibility', |
|
277 | 288 | pattern=ADMIN_PREFIX + '/my_account/toggle_visibility') |
|
278 | 289 | config.add_view( |
|
279 | 290 | MyAccountView, |
|
280 | 291 | attr='my_notifications_toggle_visibility', |
|
281 | 292 | route_name='my_account_notifications_toggle_visibility', |
|
282 | 293 | request_method='POST', renderer='json_ext') |
|
283 | 294 | |
|
284 | 295 | # my account pull requests |
|
285 | 296 | config.add_route( |
|
286 | 297 | name='my_account_pullrequests', |
|
287 | 298 | pattern=ADMIN_PREFIX + '/my_account/pull_requests') |
|
288 | 299 | config.add_view( |
|
289 | 300 | MyAccountView, |
|
290 | 301 | attr='my_account_pullrequests', |
|
291 | 302 | route_name='my_account_pullrequests', |
|
292 | 303 | request_method='GET', |
|
293 | 304 | renderer='rhodecode:templates/admin/my_account/my_account.mako') |
|
294 | 305 | |
|
295 | 306 | config.add_route( |
|
296 | 307 | name='my_account_pullrequests_data', |
|
297 | 308 | pattern=ADMIN_PREFIX + '/my_account/pull_requests/data') |
|
298 | 309 | config.add_view( |
|
299 | 310 | MyAccountView, |
|
300 | 311 | attr='my_account_pullrequests_data', |
|
301 | 312 | route_name='my_account_pullrequests_data', |
|
302 | 313 | request_method='GET', renderer='json_ext') |
|
303 | 314 | |
|
304 | 315 | # channelstream test |
|
305 | 316 | config.add_route( |
|
306 | 317 | name='my_account_notifications_test_channelstream', |
|
307 | 318 | pattern=ADMIN_PREFIX + '/my_account/test_channelstream') |
|
308 | 319 | config.add_view( |
|
309 | 320 | MyAccountView, |
|
310 | 321 | attr='my_account_notifications_test_channelstream', |
|
311 | 322 | route_name='my_account_notifications_test_channelstream', |
|
312 | 323 | request_method='POST', renderer='json_ext') |
|
313 | 324 | |
|
314 | 325 | # notifications |
|
315 | 326 | config.add_route( |
|
316 | 327 | name='notifications_show_all', |
|
317 | 328 | pattern=ADMIN_PREFIX + '/notifications') |
|
318 | 329 | config.add_view( |
|
319 | 330 | MyAccountNotificationsView, |
|
320 | 331 | attr='notifications_show_all', |
|
321 | 332 | route_name='notifications_show_all', request_method='GET', |
|
322 | 333 | renderer='rhodecode:templates/admin/notifications/notifications_show_all.mako') |
|
323 | 334 | |
|
324 | 335 | # notifications |
|
325 | 336 | config.add_route( |
|
326 | 337 | name='notifications_mark_all_read', |
|
327 | 338 | pattern=ADMIN_PREFIX + '/notifications_mark_all_read') |
|
328 | 339 | config.add_view( |
|
329 | 340 | MyAccountNotificationsView, |
|
330 | 341 | attr='notifications_mark_all_read', |
|
331 | 342 | route_name='notifications_mark_all_read', request_method='POST', |
|
332 | 343 | renderer='rhodecode:templates/admin/notifications/notifications_show_all.mako') |
|
333 | 344 | |
|
334 | 345 | config.add_route( |
|
335 | 346 | name='notifications_show', |
|
336 | 347 | pattern=ADMIN_PREFIX + '/notifications/{notification_id}') |
|
337 | 348 | config.add_view( |
|
338 | 349 | MyAccountNotificationsView, |
|
339 | 350 | attr='notifications_show', |
|
340 | 351 | route_name='notifications_show', request_method='GET', |
|
341 | 352 | renderer='rhodecode:templates/admin/notifications/notifications_show.mako') |
|
342 | 353 | |
|
343 | 354 | config.add_route( |
|
344 | 355 | name='notifications_update', |
|
345 | 356 | pattern=ADMIN_PREFIX + '/notifications/{notification_id}/update') |
|
346 | 357 | config.add_view( |
|
347 | 358 | MyAccountNotificationsView, |
|
348 | 359 | attr='notification_update', |
|
349 | 360 | route_name='notifications_update', request_method='POST', |
|
350 | 361 | renderer='json_ext') |
|
351 | 362 | |
|
352 | 363 | config.add_route( |
|
353 | 364 | name='notifications_delete', |
|
354 | 365 | pattern=ADMIN_PREFIX + '/notifications/{notification_id}/delete') |
|
355 | 366 | config.add_view( |
|
356 | 367 | MyAccountNotificationsView, |
|
357 | 368 | attr='notification_delete', |
|
358 | 369 | route_name='notifications_delete', request_method='POST', |
|
359 | 370 | renderer='json_ext') |
@@ -1,818 +1,858 b'' | |||
|
1 | 1 | # Copyright (C) 2016-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | import time | |
|
19 | 20 | import logging |
|
20 | 21 | import datetime |
|
21 | 22 | import string |
|
22 | 23 | |
|
23 | 24 | import formencode |
|
24 | 25 | import formencode.htmlfill |
|
25 | 26 | import peppercorn |
|
26 | 27 | from pyramid.httpexceptions import HTTPFound, HTTPNotFound |
|
27 | 28 | |
|
28 | 29 | from rhodecode.apps._base import BaseAppView, DataGridAppView |
|
29 | 30 | from rhodecode import forms |
|
30 | 31 | from rhodecode.lib import helpers as h |
|
31 | 32 | from rhodecode.lib import audit_logger |
|
32 | 33 | from rhodecode.lib import ext_json |
|
33 | 34 | from rhodecode.lib.auth import ( |
|
34 | 35 | LoginRequired, NotAnonymous, CSRFRequired, |
|
35 | 36 | HasRepoPermissionAny, HasRepoGroupPermissionAny, AuthUser) |
|
36 | 37 | from rhodecode.lib.channelstream import ( |
|
37 | 38 | channelstream_request, ChannelstreamException) |
|
38 | 39 | from rhodecode.lib.hash_utils import md5_safe |
|
39 | 40 | from rhodecode.lib.utils2 import safe_int, md5, str2bool |
|
40 | 41 | from rhodecode.model.auth_token import AuthTokenModel |
|
41 | 42 | from rhodecode.model.comment import CommentsModel |
|
42 | 43 | from rhodecode.model.db import ( |
|
43 | 44 | IntegrityError, or_, in_filter_generator, select, |
|
44 | 45 | Repository, UserEmailMap, UserApiKeys, UserFollowing, |
|
45 | 46 | PullRequest, UserBookmark, RepoGroup, ChangesetStatus) |
|
47 | from rhodecode.model.forms import TOTPForm | |
|
46 | 48 | from rhodecode.model.meta import Session |
|
47 | 49 | from rhodecode.model.pull_request import PullRequestModel |
|
48 | 50 | from rhodecode.model.user import UserModel |
|
49 | 51 | from rhodecode.model.user_group import UserGroupModel |
|
50 | 52 | from rhodecode.model.validation_schema.schemas import user_schema |
|
51 | 53 | |
|
52 | 54 | log = logging.getLogger(__name__) |
|
53 | 55 | |
|
54 | 56 | |
|
55 | 57 | class MyAccountView(BaseAppView, DataGridAppView): |
|
56 | 58 | ALLOW_SCOPED_TOKENS = False |
|
57 | 59 | """ |
|
58 | 60 | This view has alternative version inside EE, if modified please take a look |
|
59 | 61 | in there as well. |
|
60 | 62 | """ |
|
61 | 63 | |
|
62 | 64 | def load_default_context(self): |
|
63 | 65 | c = self._get_local_tmpl_context() |
|
64 | 66 | c.user = c.auth_user.get_instance() |
|
65 | 67 | c.allow_scoped_tokens = self.ALLOW_SCOPED_TOKENS |
|
66 | 68 | return c |
|
67 | 69 | |
|
68 | 70 | @LoginRequired() |
|
69 | 71 | @NotAnonymous() |
|
70 | 72 | def my_account_profile(self): |
|
71 | 73 | c = self.load_default_context() |
|
72 | 74 | c.active = 'profile' |
|
73 | 75 | c.extern_type = c.user.extern_type |
|
74 | 76 | return self._get_template_context(c) |
|
75 | 77 | |
|
76 | 78 | @LoginRequired() |
|
77 | 79 | @NotAnonymous() |
|
78 | 80 | def my_account_edit(self): |
|
79 | 81 | c = self.load_default_context() |
|
80 | 82 | c.active = 'profile_edit' |
|
81 | 83 | c.extern_type = c.user.extern_type |
|
82 | 84 | c.extern_name = c.user.extern_name |
|
83 | 85 | |
|
84 | 86 | schema = user_schema.UserProfileSchema().bind( |
|
85 | 87 | username=c.user.username, user_emails=c.user.emails) |
|
86 | 88 | appstruct = { |
|
87 | 89 | 'username': c.user.username, |
|
88 | 90 | 'email': c.user.email, |
|
89 | 91 | 'firstname': c.user.firstname, |
|
90 | 92 | 'lastname': c.user.lastname, |
|
91 | 93 | 'description': c.user.description, |
|
92 | 94 | } |
|
93 | 95 | c.form = forms.RcForm( |
|
94 | 96 | schema, appstruct=appstruct, |
|
95 | 97 | action=h.route_path('my_account_update'), |
|
96 | 98 | buttons=(forms.buttons.save, forms.buttons.reset)) |
|
97 | 99 | |
|
98 | 100 | return self._get_template_context(c) |
|
99 | 101 | |
|
100 | 102 | @LoginRequired() |
|
101 | 103 | @NotAnonymous() |
|
102 | 104 | @CSRFRequired() |
|
103 | 105 | def my_account_update(self): |
|
104 | 106 | _ = self.request.translate |
|
105 | 107 | c = self.load_default_context() |
|
106 | 108 | c.active = 'profile_edit' |
|
107 | 109 | c.perm_user = c.auth_user |
|
108 | 110 | c.extern_type = c.user.extern_type |
|
109 | 111 | c.extern_name = c.user.extern_name |
|
110 | 112 | |
|
111 | 113 | schema = user_schema.UserProfileSchema().bind( |
|
112 | 114 | username=c.user.username, user_emails=c.user.emails) |
|
113 | 115 | form = forms.RcForm( |
|
114 | 116 | schema, buttons=(forms.buttons.save, forms.buttons.reset)) |
|
115 | 117 | |
|
116 | 118 | controls = list(self.request.POST.items()) |
|
117 | 119 | try: |
|
118 | 120 | valid_data = form.validate(controls) |
|
119 | 121 | skip_attrs = ['admin', 'active', 'extern_type', 'extern_name', |
|
120 | 122 | 'new_password', 'password_confirmation'] |
|
121 | 123 | if c.extern_type != "rhodecode": |
|
122 | 124 | # forbid updating username for external accounts |
|
123 | 125 | skip_attrs.append('username') |
|
124 | 126 | old_email = c.user.email |
|
125 | 127 | UserModel().update_user( |
|
126 | 128 | self._rhodecode_user.user_id, skip_attrs=skip_attrs, |
|
127 | 129 | **valid_data) |
|
128 | 130 | if old_email != valid_data['email']: |
|
129 | 131 | old = UserEmailMap.query() \ |
|
130 | 132 | .filter(UserEmailMap.user == c.user)\ |
|
131 | 133 | .filter(UserEmailMap.email == valid_data['email'])\ |
|
132 | 134 | .first() |
|
133 | 135 | old.email = old_email |
|
134 | 136 | h.flash(_('Your account was updated successfully'), category='success') |
|
135 | 137 | Session().commit() |
|
136 | 138 | except forms.ValidationFailure as e: |
|
137 | 139 | c.form = e |
|
138 | 140 | return self._get_template_context(c) |
|
139 | 141 | |
|
140 | 142 | except Exception: |
|
141 | 143 | log.exception("Exception updating user") |
|
142 | 144 | h.flash(_('Error occurred during update of user'), |
|
143 | 145 | category='error') |
|
144 | 146 | raise HTTPFound(h.route_path('my_account_profile')) |
|
145 | 147 | |
|
146 | 148 | @LoginRequired() |
|
147 | 149 | @NotAnonymous() |
|
148 | 150 | def my_account_password(self): |
|
149 | 151 | c = self.load_default_context() |
|
150 | 152 | c.active = 'password' |
|
151 | 153 | c.extern_type = c.user.extern_type |
|
152 | 154 | |
|
153 | 155 | schema = user_schema.ChangePasswordSchema().bind( |
|
154 | 156 | username=c.user.username) |
|
155 | 157 | |
|
156 | 158 | form = forms.Form( |
|
157 | 159 | schema, |
|
158 | 160 | action=h.route_path('my_account_password_update'), |
|
159 | 161 | buttons=(forms.buttons.save, forms.buttons.reset)) |
|
160 | 162 | |
|
161 | 163 | c.form = form |
|
162 | 164 | return self._get_template_context(c) |
|
163 | 165 | |
|
164 | 166 | @LoginRequired() |
|
165 | 167 | @NotAnonymous() |
|
166 | 168 | @CSRFRequired() |
|
167 | 169 | def my_account_password_update(self): |
|
168 | 170 | _ = self.request.translate |
|
169 | 171 | c = self.load_default_context() |
|
170 | 172 | c.active = 'password' |
|
171 | 173 | c.extern_type = c.user.extern_type |
|
172 | 174 | |
|
173 | 175 | schema = user_schema.ChangePasswordSchema().bind( |
|
174 | 176 | username=c.user.username) |
|
175 | 177 | |
|
176 | 178 | form = forms.Form( |
|
177 | 179 | schema, buttons=(forms.buttons.save, forms.buttons.reset)) |
|
178 | 180 | |
|
179 | 181 | if c.extern_type != 'rhodecode': |
|
180 | 182 | raise HTTPFound(self.request.route_path('my_account_password')) |
|
181 | 183 | |
|
182 | 184 | controls = list(self.request.POST.items()) |
|
183 | 185 | try: |
|
184 | 186 | valid_data = form.validate(controls) |
|
185 | 187 | UserModel().update_user(c.user.user_id, **valid_data) |
|
186 | 188 | c.user.update_userdata(force_password_change=False) |
|
187 | 189 | Session().commit() |
|
188 | 190 | except forms.ValidationFailure as e: |
|
189 | 191 | c.form = e |
|
190 | 192 | return self._get_template_context(c) |
|
191 | 193 | |
|
192 | 194 | except Exception: |
|
193 | 195 | log.exception("Exception updating password") |
|
194 | 196 | h.flash(_('Error occurred during update of user password'), |
|
195 | 197 | category='error') |
|
196 | 198 | else: |
|
197 | 199 | instance = c.auth_user.get_instance() |
|
198 | 200 | self.session.setdefault('rhodecode_user', {}).update( |
|
199 | 201 | {'password': md5_safe(instance.password)}) |
|
200 | 202 | self.session.save() |
|
201 | 203 | h.flash(_("Successfully updated password"), category='success') |
|
202 | 204 | |
|
203 | 205 | raise HTTPFound(self.request.route_path('my_account_password')) |
|
204 | 206 | |
|
205 | 207 | @LoginRequired() |
|
206 | 208 | @NotAnonymous() |
|
207 | 209 | def my_account_2fa(self): |
|
208 | 210 | _ = self.request.translate |
|
209 | 211 | c = self.load_default_context() |
|
210 |
c.active = '2 |
|
|
211 | from rhodecode.model.settings import SettingsModel | |
|
212 | user_instance = self._rhodecode_db_user | |
|
212 | c.active = '2FA' | |
|
213 | user_instance = c.auth_user.get_instance() | |
|
213 | 214 | locked_by_admin = user_instance.has_forced_2fa |
|
214 | 215 | c.state_of_2fa = user_instance.has_enabled_2fa |
|
216 | c.user_seen_2fa_recovery_codes = user_instance.has_seen_2fa_codes | |
|
215 | 217 | c.locked_2fa = str2bool(locked_by_admin) |
|
216 | 218 | return self._get_template_context(c) |
|
217 | 219 | |
|
218 | 220 | @LoginRequired() |
|
219 | 221 | @NotAnonymous() |
|
220 | 222 | @CSRFRequired() |
|
221 |
def my_account_2fa_ |
|
|
222 |
|
|
|
223 | self._rhodecode_db_user.has_enabled_2fa = state | |
|
224 | return {'state_of_2fa': state} | |
|
223 | def my_account_2fa_update(self): | |
|
224 | _ = self.request.translate | |
|
225 | c = self.load_default_context() | |
|
226 | c.active = '2FA' | |
|
227 | user_instance = c.auth_user.get_instance() | |
|
228 | ||
|
229 | state = self.request.POST.get('2fa_status') == '1' | |
|
230 | user_instance.has_enabled_2fa = state | |
|
231 | user_instance.update_userdata(update_2fa=time.time()) | |
|
232 | Session().commit() | |
|
233 | h.flash(_("Successfully saved 2FA settings"), category='success') | |
|
234 | raise HTTPFound(self.request.route_path('my_account_enable_2fa')) | |
|
235 | ||
|
236 | @LoginRequired() | |
|
237 | @NotAnonymous() | |
|
238 | @CSRFRequired() | |
|
239 | def my_account_2fa_show_recovery_codes(self): | |
|
240 | c = self.load_default_context() | |
|
241 | user_instance = c.auth_user.get_instance() | |
|
242 | user_instance.has_seen_2fa_codes = True | |
|
243 | Session().commit() | |
|
244 | return {'recovery_codes': user_instance.get_2fa_recovery_codes()} | |
|
225 | 245 | |
|
226 | 246 | @LoginRequired() |
|
227 | 247 | @NotAnonymous() |
|
228 | 248 | @CSRFRequired() |
|
229 | 249 | def my_account_2fa_regenerate_recovery_codes(self): |
|
230 | return {'recovery_codes': self._rhodecode_db_user.regenerate_2fa_recovery_codes()} | |
|
250 | _ = self.request.translate | |
|
251 | c = self.load_default_context() | |
|
252 | user_instance = c.auth_user.get_instance() | |
|
253 | ||
|
254 | totp_form = TOTPForm(_, user_instance, allow_recovery_code_use=True)() | |
|
255 | ||
|
256 | post_items = dict(self.request.POST) | |
|
257 | # NOTE: inject secret, as it's a post configured saved item. | |
|
258 | post_items['secret_totp'] = user_instance.get_secret_2fa() | |
|
259 | try: | |
|
260 | totp_form.to_python(post_items) | |
|
261 | user_instance.regenerate_2fa_recovery_codes() | |
|
262 | Session().commit() | |
|
263 | except formencode.Invalid as errors: | |
|
264 | h.flash(_("Failed to generate new recovery codes: {}").format(errors), category='error') | |
|
265 | raise HTTPFound(self.request.route_path('my_account_enable_2fa')) | |
|
266 | except Exception as e: | |
|
267 | h.flash(_("Failed to generate new recovery codes: {}").format(e), category='error') | |
|
268 | raise HTTPFound(self.request.route_path('my_account_enable_2fa')) | |
|
269 | ||
|
270 | raise HTTPFound(self.request.route_path('my_account_enable_2fa', _query={'show-recovery-codes': 1})) | |
|
231 | 271 | |
|
232 | 272 | @LoginRequired() |
|
233 | 273 | @NotAnonymous() |
|
234 | 274 | def my_account_auth_tokens(self): |
|
235 | 275 | _ = self.request.translate |
|
236 | 276 | |
|
237 | 277 | c = self.load_default_context() |
|
238 | 278 | c.active = 'auth_tokens' |
|
239 | 279 | c.lifetime_values = AuthTokenModel.get_lifetime_values(translator=_) |
|
240 | 280 | c.role_values = [ |
|
241 | 281 | (x, AuthTokenModel.cls._get_role_name(x)) |
|
242 | 282 | for x in AuthTokenModel.cls.ROLES] |
|
243 | 283 | c.role_options = [(c.role_values, _("Role"))] |
|
244 | 284 | c.user_auth_tokens = AuthTokenModel().get_auth_tokens( |
|
245 | 285 | c.user.user_id, show_expired=True) |
|
246 | 286 | c.role_vcs = AuthTokenModel.cls.ROLE_VCS |
|
247 | 287 | return self._get_template_context(c) |
|
248 | 288 | |
|
249 | 289 | @LoginRequired() |
|
250 | 290 | @NotAnonymous() |
|
251 | 291 | @CSRFRequired() |
|
252 | 292 | def my_account_auth_tokens_view(self): |
|
253 | 293 | _ = self.request.translate |
|
254 | 294 | c = self.load_default_context() |
|
255 | 295 | |
|
256 | 296 | auth_token_id = self.request.POST.get('auth_token_id') |
|
257 | 297 | |
|
258 | 298 | if auth_token_id: |
|
259 | 299 | token = UserApiKeys.get_or_404(auth_token_id) |
|
260 | 300 | if token.user.user_id != c.user.user_id: |
|
261 | 301 | raise HTTPNotFound() |
|
262 | 302 | |
|
263 | 303 | return { |
|
264 | 304 | 'auth_token': token.api_key |
|
265 | 305 | } |
|
266 | 306 | |
|
267 | 307 | def maybe_attach_token_scope(self, token): |
|
268 | 308 | # implemented in EE edition |
|
269 | 309 | pass |
|
270 | 310 | |
|
271 | 311 | @LoginRequired() |
|
272 | 312 | @NotAnonymous() |
|
273 | 313 | @CSRFRequired() |
|
274 | 314 | def my_account_auth_tokens_add(self): |
|
275 | 315 | _ = self.request.translate |
|
276 | 316 | c = self.load_default_context() |
|
277 | 317 | |
|
278 | 318 | lifetime = safe_int(self.request.POST.get('lifetime'), -1) |
|
279 | 319 | description = self.request.POST.get('description') |
|
280 | 320 | role = self.request.POST.get('role') |
|
281 | 321 | |
|
282 | 322 | token = UserModel().add_auth_token( |
|
283 | 323 | user=c.user.user_id, |
|
284 | 324 | lifetime_minutes=lifetime, role=role, description=description, |
|
285 | 325 | scope_callback=self.maybe_attach_token_scope) |
|
286 | 326 | token_data = token.get_api_data() |
|
287 | 327 | |
|
288 | 328 | audit_logger.store_web( |
|
289 | 329 | 'user.edit.token.add', action_data={ |
|
290 | 330 | 'data': {'token': token_data, 'user': 'self'}}, |
|
291 | 331 | user=self._rhodecode_user, ) |
|
292 | 332 | Session().commit() |
|
293 | 333 | |
|
294 | 334 | h.flash(_("Auth token successfully created"), category='success') |
|
295 | 335 | return HTTPFound(h.route_path('my_account_auth_tokens')) |
|
296 | 336 | |
|
297 | 337 | @LoginRequired() |
|
298 | 338 | @NotAnonymous() |
|
299 | 339 | @CSRFRequired() |
|
300 | 340 | def my_account_auth_tokens_delete(self): |
|
301 | 341 | _ = self.request.translate |
|
302 | 342 | c = self.load_default_context() |
|
303 | 343 | |
|
304 | 344 | del_auth_token = self.request.POST.get('del_auth_token') |
|
305 | 345 | |
|
306 | 346 | if del_auth_token: |
|
307 | 347 | token = UserApiKeys.get_or_404(del_auth_token) |
|
308 | 348 | token_data = token.get_api_data() |
|
309 | 349 | |
|
310 | 350 | AuthTokenModel().delete(del_auth_token, c.user.user_id) |
|
311 | 351 | audit_logger.store_web( |
|
312 | 352 | 'user.edit.token.delete', action_data={ |
|
313 | 353 | 'data': {'token': token_data, 'user': 'self'}}, |
|
314 | 354 | user=self._rhodecode_user,) |
|
315 | 355 | Session().commit() |
|
316 | 356 | h.flash(_("Auth token successfully deleted"), category='success') |
|
317 | 357 | |
|
318 | 358 | return HTTPFound(h.route_path('my_account_auth_tokens')) |
|
319 | 359 | |
|
320 | 360 | @LoginRequired() |
|
321 | 361 | @NotAnonymous() |
|
322 | 362 | def my_account_emails(self): |
|
323 | 363 | _ = self.request.translate |
|
324 | 364 | |
|
325 | 365 | c = self.load_default_context() |
|
326 | 366 | c.active = 'emails' |
|
327 | 367 | |
|
328 | 368 | c.user_email_map = UserEmailMap.query()\ |
|
329 | 369 | .filter(UserEmailMap.user == c.user).all() |
|
330 | 370 | |
|
331 | 371 | schema = user_schema.AddEmailSchema().bind( |
|
332 | 372 | username=c.user.username, user_emails=c.user.emails) |
|
333 | 373 | |
|
334 | 374 | form = forms.RcForm(schema, |
|
335 | 375 | action=h.route_path('my_account_emails_add'), |
|
336 | 376 | buttons=(forms.buttons.save, forms.buttons.reset)) |
|
337 | 377 | |
|
338 | 378 | c.form = form |
|
339 | 379 | return self._get_template_context(c) |
|
340 | 380 | |
|
341 | 381 | @LoginRequired() |
|
342 | 382 | @NotAnonymous() |
|
343 | 383 | @CSRFRequired() |
|
344 | 384 | def my_account_emails_add(self): |
|
345 | 385 | _ = self.request.translate |
|
346 | 386 | c = self.load_default_context() |
|
347 | 387 | c.active = 'emails' |
|
348 | 388 | |
|
349 | 389 | schema = user_schema.AddEmailSchema().bind( |
|
350 | 390 | username=c.user.username, user_emails=c.user.emails) |
|
351 | 391 | |
|
352 | 392 | form = forms.RcForm( |
|
353 | 393 | schema, action=h.route_path('my_account_emails_add'), |
|
354 | 394 | buttons=(forms.buttons.save, forms.buttons.reset)) |
|
355 | 395 | |
|
356 | 396 | controls = list(self.request.POST.items()) |
|
357 | 397 | try: |
|
358 | 398 | valid_data = form.validate(controls) |
|
359 | 399 | UserModel().add_extra_email(c.user.user_id, valid_data['email']) |
|
360 | 400 | audit_logger.store_web( |
|
361 | 401 | 'user.edit.email.add', action_data={ |
|
362 | 402 | 'data': {'email': valid_data['email'], 'user': 'self'}}, |
|
363 | 403 | user=self._rhodecode_user,) |
|
364 | 404 | Session().commit() |
|
365 | 405 | except formencode.Invalid as error: |
|
366 | 406 | h.flash(h.escape(error.error_dict['email']), category='error') |
|
367 | 407 | except forms.ValidationFailure as e: |
|
368 | 408 | c.user_email_map = UserEmailMap.query() \ |
|
369 | 409 | .filter(UserEmailMap.user == c.user).all() |
|
370 | 410 | c.form = e |
|
371 | 411 | return self._get_template_context(c) |
|
372 | 412 | except Exception: |
|
373 | 413 | log.exception("Exception adding email") |
|
374 | 414 | h.flash(_('Error occurred during adding email'), |
|
375 | 415 | category='error') |
|
376 | 416 | else: |
|
377 | 417 | h.flash(_("Successfully added email"), category='success') |
|
378 | 418 | |
|
379 | 419 | raise HTTPFound(self.request.route_path('my_account_emails')) |
|
380 | 420 | |
|
381 | 421 | @LoginRequired() |
|
382 | 422 | @NotAnonymous() |
|
383 | 423 | @CSRFRequired() |
|
384 | 424 | def my_account_emails_delete(self): |
|
385 | 425 | _ = self.request.translate |
|
386 | 426 | c = self.load_default_context() |
|
387 | 427 | |
|
388 | 428 | del_email_id = self.request.POST.get('del_email_id') |
|
389 | 429 | if del_email_id: |
|
390 | 430 | email = UserEmailMap.get_or_404(del_email_id).email |
|
391 | 431 | UserModel().delete_extra_email(c.user.user_id, del_email_id) |
|
392 | 432 | audit_logger.store_web( |
|
393 | 433 | 'user.edit.email.delete', action_data={ |
|
394 | 434 | 'data': {'email': email, 'user': 'self'}}, |
|
395 | 435 | user=self._rhodecode_user,) |
|
396 | 436 | Session().commit() |
|
397 | 437 | h.flash(_("Email successfully deleted"), |
|
398 | 438 | category='success') |
|
399 | 439 | return HTTPFound(h.route_path('my_account_emails')) |
|
400 | 440 | |
|
401 | 441 | @LoginRequired() |
|
402 | 442 | @NotAnonymous() |
|
403 | 443 | @CSRFRequired() |
|
404 | 444 | def my_account_notifications_test_channelstream(self): |
|
405 | 445 | message = 'Test message sent via Channelstream by user: {}, on {}'.format( |
|
406 | 446 | self._rhodecode_user.username, datetime.datetime.now()) |
|
407 | 447 | payload = { |
|
408 | 448 | # 'channel': 'broadcast', |
|
409 | 449 | 'type': 'message', |
|
410 | 450 | 'timestamp': datetime.datetime.utcnow(), |
|
411 | 451 | 'user': 'system', |
|
412 | 452 | 'pm_users': [self._rhodecode_user.username], |
|
413 | 453 | 'message': { |
|
414 | 454 | 'message': message, |
|
415 | 455 | 'level': 'info', |
|
416 | 456 | 'topic': '/notifications' |
|
417 | 457 | } |
|
418 | 458 | } |
|
419 | 459 | |
|
420 | 460 | registry = self.request.registry |
|
421 | 461 | rhodecode_plugins = getattr(registry, 'rhodecode_plugins', {}) |
|
422 | 462 | channelstream_config = rhodecode_plugins.get('channelstream', {}) |
|
423 | 463 | |
|
424 | 464 | try: |
|
425 | 465 | channelstream_request(channelstream_config, [payload], '/message') |
|
426 | 466 | except ChannelstreamException as e: |
|
427 | 467 | log.exception('Failed to send channelstream data') |
|
428 | 468 | return {"response": f'ERROR: {e.__class__.__name__}'} |
|
429 | 469 | return {"response": 'Channelstream data sent. ' |
|
430 | 470 | 'You should see a new live message now.'} |
|
431 | 471 | |
|
432 | 472 | def _load_my_repos_data(self, watched=False): |
|
433 | 473 | |
|
434 | 474 | allowed_ids = [-1] + self._rhodecode_user.repo_acl_ids_from_stack(AuthUser.repo_read_perms) |
|
435 | 475 | |
|
436 | 476 | if watched: |
|
437 | 477 | # repos user watch |
|
438 | 478 | repo_list = Session().query( |
|
439 | 479 | Repository |
|
440 | 480 | ) \ |
|
441 | 481 | .join( |
|
442 | 482 | (UserFollowing, UserFollowing.follows_repo_id == Repository.repo_id) |
|
443 | 483 | ) \ |
|
444 | 484 | .filter( |
|
445 | 485 | UserFollowing.user_id == self._rhodecode_user.user_id |
|
446 | 486 | ) \ |
|
447 | 487 | .filter(or_( |
|
448 | 488 | # generate multiple IN to fix limitation problems |
|
449 | 489 | *in_filter_generator(Repository.repo_id, allowed_ids)) |
|
450 | 490 | ) \ |
|
451 | 491 | .order_by(Repository.repo_name) \ |
|
452 | 492 | .all() |
|
453 | 493 | |
|
454 | 494 | else: |
|
455 | 495 | # repos user is owner of |
|
456 | 496 | repo_list = Session().query( |
|
457 | 497 | Repository |
|
458 | 498 | ) \ |
|
459 | 499 | .filter( |
|
460 | 500 | Repository.user_id == self._rhodecode_user.user_id |
|
461 | 501 | ) \ |
|
462 | 502 | .filter(or_( |
|
463 | 503 | # generate multiple IN to fix limitation problems |
|
464 | 504 | *in_filter_generator(Repository.repo_id, allowed_ids)) |
|
465 | 505 | ) \ |
|
466 | 506 | .order_by(Repository.repo_name) \ |
|
467 | 507 | .all() |
|
468 | 508 | |
|
469 | 509 | _render = self.request.get_partial_renderer( |
|
470 | 510 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
471 | 511 | |
|
472 | 512 | def repo_lnk(name, rtype, rstate, private, archived, fork_of): |
|
473 | 513 | return _render('repo_name', name, rtype, rstate, private, archived, fork_of, |
|
474 | 514 | short_name=False, admin=False) |
|
475 | 515 | |
|
476 | 516 | repos_data = [] |
|
477 | 517 | for repo in repo_list: |
|
478 | 518 | row = { |
|
479 | 519 | "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state, |
|
480 | 520 | repo.private, repo.archived, repo.fork), |
|
481 | 521 | "name_raw": repo.repo_name.lower(), |
|
482 | 522 | } |
|
483 | 523 | |
|
484 | 524 | repos_data.append(row) |
|
485 | 525 | |
|
486 | 526 | # json used to render the grid |
|
487 | 527 | return ext_json.str_json(repos_data) |
|
488 | 528 | |
|
489 | 529 | @LoginRequired() |
|
490 | 530 | @NotAnonymous() |
|
491 | 531 | def my_account_repos(self): |
|
492 | 532 | c = self.load_default_context() |
|
493 | 533 | c.active = 'repos' |
|
494 | 534 | |
|
495 | 535 | # json used to render the grid |
|
496 | 536 | c.data = self._load_my_repos_data() |
|
497 | 537 | return self._get_template_context(c) |
|
498 | 538 | |
|
499 | 539 | @LoginRequired() |
|
500 | 540 | @NotAnonymous() |
|
501 | 541 | def my_account_watched(self): |
|
502 | 542 | c = self.load_default_context() |
|
503 | 543 | c.active = 'watched' |
|
504 | 544 | |
|
505 | 545 | # json used to render the grid |
|
506 | 546 | c.data = self._load_my_repos_data(watched=True) |
|
507 | 547 | return self._get_template_context(c) |
|
508 | 548 | |
|
509 | 549 | @LoginRequired() |
|
510 | 550 | @NotAnonymous() |
|
511 | 551 | def my_account_bookmarks(self): |
|
512 | 552 | c = self.load_default_context() |
|
513 | 553 | c.active = 'bookmarks' |
|
514 | 554 | |
|
515 | 555 | user_bookmarks = \ |
|
516 | 556 | select(UserBookmark, Repository, RepoGroup) \ |
|
517 | 557 | .where(UserBookmark.user_id == self._rhodecode_user.user_id) \ |
|
518 | 558 | .outerjoin(Repository, Repository.repo_id == UserBookmark.bookmark_repo_id) \ |
|
519 | 559 | .outerjoin(RepoGroup, RepoGroup.group_id == UserBookmark.bookmark_repo_group_id) \ |
|
520 | 560 | .order_by(UserBookmark.position.asc()) |
|
521 | 561 | |
|
522 | 562 | c.user_bookmark_items = Session().execute(user_bookmarks).all() |
|
523 | 563 | return self._get_template_context(c) |
|
524 | 564 | |
|
525 | 565 | def _process_bookmark_entry(self, entry, user_id): |
|
526 | 566 | position = safe_int(entry.get('position')) |
|
527 | 567 | cur_position = safe_int(entry.get('cur_position')) |
|
528 | 568 | if position is None: |
|
529 | 569 | return |
|
530 | 570 | |
|
531 | 571 | # check if this is an existing entry |
|
532 | 572 | is_new = False |
|
533 | 573 | db_entry = UserBookmark().get_by_position_for_user(cur_position, user_id) |
|
534 | 574 | |
|
535 | 575 | if db_entry and str2bool(entry.get('remove')): |
|
536 | 576 | log.debug('Marked bookmark %s for deletion', db_entry) |
|
537 | 577 | Session().delete(db_entry) |
|
538 | 578 | return |
|
539 | 579 | |
|
540 | 580 | if not db_entry: |
|
541 | 581 | # new |
|
542 | 582 | db_entry = UserBookmark() |
|
543 | 583 | is_new = True |
|
544 | 584 | |
|
545 | 585 | should_save = False |
|
546 | 586 | default_redirect_url = '' |
|
547 | 587 | |
|
548 | 588 | # save repo |
|
549 | 589 | if entry.get('bookmark_repo') and safe_int(entry.get('bookmark_repo')): |
|
550 | 590 | repo = Repository.get(entry['bookmark_repo']) |
|
551 | 591 | perm_check = HasRepoPermissionAny( |
|
552 | 592 | 'repository.read', 'repository.write', 'repository.admin') |
|
553 | 593 | if repo and perm_check(repo_name=repo.repo_name): |
|
554 | 594 | db_entry.repository = repo |
|
555 | 595 | should_save = True |
|
556 | 596 | default_redirect_url = '${repo_url}' |
|
557 | 597 | # save repo group |
|
558 | 598 | elif entry.get('bookmark_repo_group') and safe_int(entry.get('bookmark_repo_group')): |
|
559 | 599 | repo_group = RepoGroup.get(entry['bookmark_repo_group']) |
|
560 | 600 | perm_check = HasRepoGroupPermissionAny( |
|
561 | 601 | 'group.read', 'group.write', 'group.admin') |
|
562 | 602 | |
|
563 | 603 | if repo_group and perm_check(group_name=repo_group.group_name): |
|
564 | 604 | db_entry.repository_group = repo_group |
|
565 | 605 | should_save = True |
|
566 | 606 | default_redirect_url = '${repo_group_url}' |
|
567 | 607 | # save generic info |
|
568 | 608 | elif entry.get('title') and entry.get('redirect_url'): |
|
569 | 609 | should_save = True |
|
570 | 610 | |
|
571 | 611 | if should_save: |
|
572 | 612 | # mark user and position |
|
573 | 613 | db_entry.user_id = user_id |
|
574 | 614 | db_entry.position = position |
|
575 | 615 | db_entry.title = entry.get('title') |
|
576 | 616 | db_entry.redirect_url = entry.get('redirect_url') or default_redirect_url |
|
577 | 617 | log.debug('Saving bookmark %s, new:%s', db_entry, is_new) |
|
578 | 618 | |
|
579 | 619 | Session().add(db_entry) |
|
580 | 620 | |
|
581 | 621 | @LoginRequired() |
|
582 | 622 | @NotAnonymous() |
|
583 | 623 | @CSRFRequired() |
|
584 | 624 | def my_account_bookmarks_update(self): |
|
585 | 625 | _ = self.request.translate |
|
586 | 626 | c = self.load_default_context() |
|
587 | 627 | c.active = 'bookmarks' |
|
588 | 628 | |
|
589 | 629 | controls = peppercorn.parse(self.request.POST.items()) |
|
590 | 630 | user_id = c.user.user_id |
|
591 | 631 | |
|
592 | 632 | # validate positions |
|
593 | 633 | positions = {} |
|
594 | 634 | for entry in controls.get('bookmarks', []): |
|
595 | 635 | position = safe_int(entry['position']) |
|
596 | 636 | if position is None: |
|
597 | 637 | continue |
|
598 | 638 | |
|
599 | 639 | if position in positions: |
|
600 | 640 | h.flash(_("Position {} is defined twice. " |
|
601 | 641 | "Please correct this error.").format(position), category='error') |
|
602 | 642 | return HTTPFound(h.route_path('my_account_bookmarks')) |
|
603 | 643 | |
|
604 | 644 | entry['position'] = position |
|
605 | 645 | entry['cur_position'] = safe_int(entry.get('cur_position')) |
|
606 | 646 | positions[position] = entry |
|
607 | 647 | |
|
608 | 648 | try: |
|
609 | 649 | for entry in positions.values(): |
|
610 | 650 | self._process_bookmark_entry(entry, user_id) |
|
611 | 651 | |
|
612 | 652 | Session().commit() |
|
613 | 653 | h.flash(_("Update Bookmarks"), category='success') |
|
614 | 654 | except IntegrityError: |
|
615 | 655 | h.flash(_("Failed to update bookmarks. " |
|
616 | 656 | "Make sure an unique position is used."), category='error') |
|
617 | 657 | |
|
618 | 658 | return HTTPFound(h.route_path('my_account_bookmarks')) |
|
619 | 659 | |
|
620 | 660 | @LoginRequired() |
|
621 | 661 | @NotAnonymous() |
|
622 | 662 | def my_account_goto_bookmark(self): |
|
623 | 663 | |
|
624 | 664 | bookmark_id = self.request.matchdict['bookmark_id'] |
|
625 | 665 | user_bookmark = UserBookmark().query()\ |
|
626 | 666 | .filter(UserBookmark.user_id == self.request.user.user_id) \ |
|
627 | 667 | .filter(UserBookmark.position == bookmark_id).scalar() |
|
628 | 668 | |
|
629 | 669 | redirect_url = h.route_path('my_account_bookmarks') |
|
630 | 670 | if not user_bookmark: |
|
631 | 671 | raise HTTPFound(redirect_url) |
|
632 | 672 | |
|
633 | 673 | # repository set |
|
634 | 674 | if user_bookmark.repository: |
|
635 | 675 | repo_name = user_bookmark.repository.repo_name |
|
636 | 676 | base_redirect_url = h.route_path( |
|
637 | 677 | 'repo_summary', repo_name=repo_name) |
|
638 | 678 | if user_bookmark.redirect_url and \ |
|
639 | 679 | '${repo_url}' in user_bookmark.redirect_url: |
|
640 | 680 | redirect_url = string.Template(user_bookmark.redirect_url)\ |
|
641 | 681 | .safe_substitute({'repo_url': base_redirect_url}) |
|
642 | 682 | else: |
|
643 | 683 | redirect_url = base_redirect_url |
|
644 | 684 | # repository group set |
|
645 | 685 | elif user_bookmark.repository_group: |
|
646 | 686 | repo_group_name = user_bookmark.repository_group.group_name |
|
647 | 687 | base_redirect_url = h.route_path( |
|
648 | 688 | 'repo_group_home', repo_group_name=repo_group_name) |
|
649 | 689 | if user_bookmark.redirect_url and \ |
|
650 | 690 | '${repo_group_url}' in user_bookmark.redirect_url: |
|
651 | 691 | redirect_url = string.Template(user_bookmark.redirect_url)\ |
|
652 | 692 | .safe_substitute({'repo_group_url': base_redirect_url}) |
|
653 | 693 | else: |
|
654 | 694 | redirect_url = base_redirect_url |
|
655 | 695 | # custom URL set |
|
656 | 696 | elif user_bookmark.redirect_url: |
|
657 | 697 | server_url = h.route_url('home').rstrip('/') |
|
658 | 698 | redirect_url = string.Template(user_bookmark.redirect_url) \ |
|
659 | 699 | .safe_substitute({'server_url': server_url}) |
|
660 | 700 | |
|
661 | 701 | log.debug('Redirecting bookmark %s to %s', user_bookmark, redirect_url) |
|
662 | 702 | raise HTTPFound(redirect_url) |
|
663 | 703 | |
|
664 | 704 | @LoginRequired() |
|
665 | 705 | @NotAnonymous() |
|
666 | 706 | def my_account_perms(self): |
|
667 | 707 | c = self.load_default_context() |
|
668 | 708 | c.active = 'perms' |
|
669 | 709 | |
|
670 | 710 | c.perm_user = c.auth_user |
|
671 | 711 | return self._get_template_context(c) |
|
672 | 712 | |
|
673 | 713 | @LoginRequired() |
|
674 | 714 | @NotAnonymous() |
|
675 | 715 | def my_notifications(self): |
|
676 | 716 | c = self.load_default_context() |
|
677 | 717 | c.active = 'notifications' |
|
678 | 718 | |
|
679 | 719 | return self._get_template_context(c) |
|
680 | 720 | |
|
681 | 721 | @LoginRequired() |
|
682 | 722 | @NotAnonymous() |
|
683 | 723 | @CSRFRequired() |
|
684 | 724 | def my_notifications_toggle_visibility(self): |
|
685 | 725 | user = self._rhodecode_db_user |
|
686 | 726 | new_status = not user.user_data.get('notification_status', True) |
|
687 | 727 | user.update_userdata(notification_status=new_status) |
|
688 | 728 | Session().commit() |
|
689 | 729 | return user.user_data['notification_status'] |
|
690 | 730 | |
|
691 | 731 | def _get_pull_requests_list(self, statuses, filter_type=None): |
|
692 | 732 | draw, start, limit = self._extract_chunk(self.request) |
|
693 | 733 | search_q, order_by, order_dir = self._extract_ordering(self.request) |
|
694 | 734 | |
|
695 | 735 | _render = self.request.get_partial_renderer( |
|
696 | 736 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
697 | 737 | |
|
698 | 738 | if filter_type == 'awaiting_my_review': |
|
699 | 739 | pull_requests = PullRequestModel().get_im_participating_in_for_review( |
|
700 | 740 | user_id=self._rhodecode_user.user_id, |
|
701 | 741 | statuses=statuses, query=search_q, |
|
702 | 742 | offset=start, length=limit, order_by=order_by, |
|
703 | 743 | order_dir=order_dir) |
|
704 | 744 | |
|
705 | 745 | pull_requests_total_count = PullRequestModel().count_im_participating_in_for_review( |
|
706 | 746 | user_id=self._rhodecode_user.user_id, statuses=statuses, query=search_q) |
|
707 | 747 | else: |
|
708 | 748 | pull_requests = PullRequestModel().get_im_participating_in( |
|
709 | 749 | user_id=self._rhodecode_user.user_id, |
|
710 | 750 | statuses=statuses, query=search_q, |
|
711 | 751 | offset=start, length=limit, order_by=order_by, |
|
712 | 752 | order_dir=order_dir) |
|
713 | 753 | |
|
714 | 754 | pull_requests_total_count = PullRequestModel().count_im_participating_in( |
|
715 | 755 | user_id=self._rhodecode_user.user_id, statuses=statuses, query=search_q) |
|
716 | 756 | |
|
717 | 757 | data = [] |
|
718 | 758 | comments_model = CommentsModel() |
|
719 | 759 | for pr in pull_requests: |
|
720 | 760 | repo_id = pr.target_repo_id |
|
721 | 761 | comments_count = comments_model.get_all_comments( |
|
722 | 762 | repo_id, pull_request=pr, include_drafts=False, count_only=True) |
|
723 | 763 | owned = pr.user_id == self._rhodecode_user.user_id |
|
724 | 764 | |
|
725 | 765 | review_statuses = pr.reviewers_statuses(user=self._rhodecode_db_user) |
|
726 | 766 | my_review_status = ChangesetStatus.STATUS_NOT_REVIEWED |
|
727 | 767 | if review_statuses and review_statuses[4]: |
|
728 | 768 | _review_obj, _user, _reasons, _mandatory, statuses = review_statuses |
|
729 | 769 | my_review_status = statuses[0][1].status |
|
730 | 770 | |
|
731 | 771 | data.append({ |
|
732 | 772 | 'target_repo': _render('pullrequest_target_repo', |
|
733 | 773 | pr.target_repo.repo_name), |
|
734 | 774 | 'name': _render('pullrequest_name', |
|
735 | 775 | pr.pull_request_id, pr.pull_request_state, |
|
736 | 776 | pr.work_in_progress, pr.target_repo.repo_name, |
|
737 | 777 | short=True), |
|
738 | 778 | 'name_raw': pr.pull_request_id, |
|
739 | 779 | 'status': _render('pullrequest_status', |
|
740 | 780 | pr.calculated_review_status()), |
|
741 | 781 | 'my_status': _render('pullrequest_status', |
|
742 | 782 | my_review_status), |
|
743 | 783 | 'title': _render('pullrequest_title', pr.title, pr.description), |
|
744 | 784 | 'pr_flow': _render('pullrequest_commit_flow', pr), |
|
745 | 785 | 'description': h.escape(pr.description), |
|
746 | 786 | 'updated_on': _render('pullrequest_updated_on', |
|
747 | 787 | h.datetime_to_time(pr.updated_on), |
|
748 | 788 | pr.versions_count), |
|
749 | 789 | 'updated_on_raw': h.datetime_to_time(pr.updated_on), |
|
750 | 790 | 'created_on': _render('pullrequest_updated_on', |
|
751 | 791 | h.datetime_to_time(pr.created_on)), |
|
752 | 792 | 'created_on_raw': h.datetime_to_time(pr.created_on), |
|
753 | 793 | 'state': pr.pull_request_state, |
|
754 | 794 | 'author': _render('pullrequest_author', |
|
755 | 795 | pr.author.full_contact, ), |
|
756 | 796 | 'author_raw': pr.author.full_name, |
|
757 | 797 | 'comments': _render('pullrequest_comments', comments_count), |
|
758 | 798 | 'comments_raw': comments_count, |
|
759 | 799 | 'closed': pr.is_closed(), |
|
760 | 800 | 'owned': owned |
|
761 | 801 | }) |
|
762 | 802 | |
|
763 | 803 | # json used to render the grid |
|
764 | 804 | data = ({ |
|
765 | 805 | 'draw': draw, |
|
766 | 806 | 'data': data, |
|
767 | 807 | 'recordsTotal': pull_requests_total_count, |
|
768 | 808 | 'recordsFiltered': pull_requests_total_count, |
|
769 | 809 | }) |
|
770 | 810 | return data |
|
771 | 811 | |
|
772 | 812 | @LoginRequired() |
|
773 | 813 | @NotAnonymous() |
|
774 | 814 | def my_account_pullrequests(self): |
|
775 | 815 | c = self.load_default_context() |
|
776 | 816 | c.active = 'pullrequests' |
|
777 | 817 | req_get = self.request.GET |
|
778 | 818 | |
|
779 | 819 | c.closed = str2bool(req_get.get('closed')) |
|
780 | 820 | c.awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
781 | 821 | |
|
782 | 822 | c.selected_filter = 'all' |
|
783 | 823 | if c.closed: |
|
784 | 824 | c.selected_filter = 'all_closed' |
|
785 | 825 | if c.awaiting_my_review: |
|
786 | 826 | c.selected_filter = 'awaiting_my_review' |
|
787 | 827 | |
|
788 | 828 | return self._get_template_context(c) |
|
789 | 829 | |
|
790 | 830 | @LoginRequired() |
|
791 | 831 | @NotAnonymous() |
|
792 | 832 | def my_account_pullrequests_data(self): |
|
793 | 833 | self.load_default_context() |
|
794 | 834 | req_get = self.request.GET |
|
795 | 835 | |
|
796 | 836 | awaiting_my_review = str2bool(req_get.get('awaiting_my_review')) |
|
797 | 837 | closed = str2bool(req_get.get('closed')) |
|
798 | 838 | |
|
799 | 839 | statuses = [PullRequest.STATUS_NEW, PullRequest.STATUS_OPEN] |
|
800 | 840 | if closed: |
|
801 | 841 | statuses += [PullRequest.STATUS_CLOSED] |
|
802 | 842 | |
|
803 | 843 | filter_type = \ |
|
804 | 844 | 'awaiting_my_review' if awaiting_my_review \ |
|
805 | 845 | else None |
|
806 | 846 | |
|
807 | 847 | data = self._get_pull_requests_list(statuses=statuses, filter_type=filter_type) |
|
808 | 848 | return data |
|
809 | 849 | |
|
810 | 850 | @LoginRequired() |
|
811 | 851 | @NotAnonymous() |
|
812 | 852 | def my_account_user_group_membership(self): |
|
813 | 853 | c = self.load_default_context() |
|
814 | 854 | c.active = 'user_group_membership' |
|
815 | 855 | groups = [UserGroupModel.get_user_groups_as_dict(group.users_group) |
|
816 | 856 | for group in self._rhodecode_db_user.group_member] |
|
817 | 857 | c.user_groups = ext_json.str_json(groups) |
|
818 | 858 | return self._get_template_context(c) |
@@ -1,5985 +1,6038 b'' | |||
|
1 | 1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | """ |
|
20 | 20 | Database Models for RhodeCode Enterprise |
|
21 | 21 | """ |
|
22 | 22 | |
|
23 | 23 | import re |
|
24 | 24 | import os |
|
25 | 25 | import time |
|
26 | 26 | import string |
|
27 | 27 | import logging |
|
28 | 28 | import datetime |
|
29 | 29 | import uuid |
|
30 | 30 | import warnings |
|
31 | 31 | import ipaddress |
|
32 | 32 | import functools |
|
33 | 33 | import traceback |
|
34 | 34 | import collections |
|
35 | 35 | |
|
36 | 36 | import pyotp |
|
37 | 37 | from sqlalchemy import ( |
|
38 | 38 | or_, and_, not_, func, cast, TypeDecorator, event, select, |
|
39 | 39 | true, false, null, union_all, |
|
40 | 40 | Index, Sequence, UniqueConstraint, ForeignKey, CheckConstraint, Column, |
|
41 | 41 | Boolean, String, Unicode, UnicodeText, DateTime, Integer, LargeBinary, |
|
42 | 42 | Text, Float, PickleType, BigInteger) |
|
43 | 43 | from sqlalchemy.sql.expression import case |
|
44 | 44 | from sqlalchemy.sql.functions import coalesce, count # pragma: no cover |
|
45 | 45 | from sqlalchemy.orm import ( |
|
46 | 46 | relationship, lazyload, joinedload, class_mapper, validates, aliased, load_only) |
|
47 | 47 | from sqlalchemy.ext.declarative import declared_attr |
|
48 | 48 | from sqlalchemy.ext.hybrid import hybrid_property |
|
49 | 49 | from sqlalchemy.exc import IntegrityError # pragma: no cover |
|
50 | 50 | from sqlalchemy.dialects.mysql import LONGTEXT |
|
51 | 51 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
52 | 52 | from pyramid.threadlocal import get_current_request |
|
53 | 53 | from webhelpers2.text import remove_formatting |
|
54 | 54 | |
|
55 | 55 | from rhodecode import ConfigGet |
|
56 | 56 | from rhodecode.lib.str_utils import safe_bytes |
|
57 | 57 | from rhodecode.translation import _ |
|
58 | 58 | from rhodecode.lib.vcs import get_vcs_instance, VCSError |
|
59 | 59 | from rhodecode.lib.vcs.backends.base import ( |
|
60 | 60 | EmptyCommit, Reference, unicode_to_reference, reference_to_unicode) |
|
61 | 61 | from rhodecode.lib.utils2 import ( |
|
62 | 62 | str2bool, safe_str, get_commit_safe, sha1_safe, |
|
63 | 63 | time_to_datetime, aslist, Optional, safe_int, get_clone_url, AttributeDict, |
|
64 | 64 | glob2re, StrictAttributeDict, cleaned_uri, datetime_to_time) |
|
65 | 65 | from rhodecode.lib.jsonalchemy import ( |
|
66 | 66 | MutationObj, MutationList, JsonType, JsonRaw) |
|
67 | 67 | from rhodecode.lib.hash_utils import sha1 |
|
68 | 68 | from rhodecode.lib import ext_json |
|
69 | 69 | from rhodecode.lib import enc_utils |
|
70 | 70 | from rhodecode.lib.ext_json import json, str_json |
|
71 | 71 | from rhodecode.lib.caching_query import FromCache |
|
72 | 72 | from rhodecode.lib.exceptions import ( |
|
73 | 73 | ArtifactMetadataDuplicate, ArtifactMetadataBadValueType) |
|
74 | 74 | from rhodecode.model.meta import Base, Session |
|
75 | 75 | |
|
76 | 76 | URL_SEP = '/' |
|
77 | 77 | log = logging.getLogger(__name__) |
|
78 | 78 | |
|
79 | 79 | # ============================================================================= |
|
80 | 80 | # BASE CLASSES |
|
81 | 81 | # ============================================================================= |
|
82 | 82 | |
|
83 | 83 | # this is propagated from .ini file rhodecode.encrypted_values.secret or |
|
84 | 84 | # beaker.session.secret if first is not set. |
|
85 | 85 | # and initialized at environment.py |
|
86 | 86 | ENCRYPTION_KEY: bytes = b'' |
|
87 | 87 | |
|
88 | 88 | # used to sort permissions by types, '#' used here is not allowed to be in |
|
89 | 89 | # usernames, and it's very early in sorted string.printable table. |
|
90 | 90 | PERMISSION_TYPE_SORT = { |
|
91 | 91 | 'admin': '####', |
|
92 | 92 | 'write': '###', |
|
93 | 93 | 'read': '##', |
|
94 | 94 | 'none': '#', |
|
95 | 95 | } |
|
96 | 96 | |
|
97 | 97 | |
|
98 | 98 | def display_user_sort(obj): |
|
99 | 99 | """ |
|
100 | 100 | Sort function used to sort permissions in .permissions() function of |
|
101 | 101 | Repository, RepoGroup, UserGroup. Also it put the default user in front |
|
102 | 102 | of all other resources |
|
103 | 103 | """ |
|
104 | 104 | |
|
105 | 105 | if obj.username == User.DEFAULT_USER: |
|
106 | 106 | return '#####' |
|
107 | 107 | prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') |
|
108 | 108 | extra_sort_num = '1' # default |
|
109 | 109 | |
|
110 | 110 | # NOTE(dan): inactive duplicates goes last |
|
111 | 111 | if getattr(obj, 'duplicate_perm', None): |
|
112 | 112 | extra_sort_num = '9' |
|
113 | 113 | return prefix + extra_sort_num + obj.username |
|
114 | 114 | |
|
115 | 115 | |
|
116 | 116 | def display_user_group_sort(obj): |
|
117 | 117 | """ |
|
118 | 118 | Sort function used to sort permissions in .permissions() function of |
|
119 | 119 | Repository, RepoGroup, UserGroup. Also it put the default user in front |
|
120 | 120 | of all other resources |
|
121 | 121 | """ |
|
122 | 122 | |
|
123 | 123 | prefix = PERMISSION_TYPE_SORT.get(obj.permission.split('.')[-1], '') |
|
124 | 124 | return prefix + obj.users_group_name |
|
125 | 125 | |
|
126 | 126 | |
|
127 | 127 | def _hash_key(k): |
|
128 | 128 | return sha1_safe(k) |
|
129 | 129 | |
|
130 | 130 | |
|
131 | 131 | def in_filter_generator(qry, items, limit=500): |
|
132 | 132 | """ |
|
133 | 133 | Splits IN() into multiple with OR |
|
134 | 134 | e.g.:: |
|
135 | 135 | cnt = Repository.query().filter( |
|
136 | 136 | or_( |
|
137 | 137 | *in_filter_generator(Repository.repo_id, range(100000)) |
|
138 | 138 | )).count() |
|
139 | 139 | """ |
|
140 | 140 | if not items: |
|
141 | 141 | # empty list will cause empty query which might cause security issues |
|
142 | 142 | # this can lead to hidden unpleasant results |
|
143 | 143 | items = [-1] |
|
144 | 144 | |
|
145 | 145 | parts = [] |
|
146 | 146 | for chunk in range(0, len(items), limit): |
|
147 | 147 | parts.append( |
|
148 | 148 | qry.in_(items[chunk: chunk + limit]) |
|
149 | 149 | ) |
|
150 | 150 | |
|
151 | 151 | return parts |
|
152 | 152 | |
|
153 | 153 | |
|
154 | 154 | base_table_args = { |
|
155 | 155 | 'extend_existing': True, |
|
156 | 156 | 'mysql_engine': 'InnoDB', |
|
157 | 157 | 'mysql_charset': 'utf8', |
|
158 | 158 | 'sqlite_autoincrement': True |
|
159 | 159 | } |
|
160 | 160 | |
|
161 | 161 | |
|
162 | 162 | class EncryptedTextValue(TypeDecorator): |
|
163 | 163 | """ |
|
164 | 164 | Special column for encrypted long text data, use like:: |
|
165 | 165 | |
|
166 | 166 | value = Column("encrypted_value", EncryptedValue(), nullable=False) |
|
167 | 167 | |
|
168 | 168 | This column is intelligent so if value is in unencrypted form it return |
|
169 | 169 | unencrypted form, but on save it always encrypts |
|
170 | 170 | """ |
|
171 | 171 | cache_ok = True |
|
172 | 172 | impl = Text |
|
173 | 173 | |
|
174 | 174 | def process_bind_param(self, value, dialect): |
|
175 | 175 | """ |
|
176 | 176 | Setter for storing value |
|
177 | 177 | """ |
|
178 | 178 | import rhodecode |
|
179 | 179 | if not value: |
|
180 | 180 | return value |
|
181 | 181 | |
|
182 | 182 | # protect against double encrypting if values is already encrypted |
|
183 | 183 | if value.startswith('enc$aes$') \ |
|
184 | 184 | or value.startswith('enc$aes_hmac$') \ |
|
185 | 185 | or value.startswith('enc2$'): |
|
186 | 186 | raise ValueError('value needs to be in unencrypted format, ' |
|
187 | 187 | 'ie. not starting with enc$ or enc2$') |
|
188 | 188 | |
|
189 | 189 | algo = rhodecode.CONFIG.get('rhodecode.encrypted_values.algorithm') or 'aes' |
|
190 | 190 | bytes_val = enc_utils.encrypt_value(value, enc_key=ENCRYPTION_KEY, algo=algo) |
|
191 | 191 | return safe_str(bytes_val) |
|
192 | 192 | |
|
193 | 193 | def process_result_value(self, value, dialect): |
|
194 | 194 | """ |
|
195 | 195 | Getter for retrieving value |
|
196 | 196 | """ |
|
197 | 197 | |
|
198 | 198 | import rhodecode |
|
199 | 199 | if not value: |
|
200 | 200 | return value |
|
201 | 201 | |
|
202 | 202 | enc_strict_mode = rhodecode.ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True) |
|
203 | 203 | |
|
204 | 204 | bytes_val = enc_utils.decrypt_value(value, enc_key=ENCRYPTION_KEY, strict_mode=enc_strict_mode) |
|
205 | 205 | |
|
206 | 206 | return safe_str(bytes_val) |
|
207 | 207 | |
|
208 | 208 | |
|
209 | 209 | class BaseModel(object): |
|
210 | 210 | """ |
|
211 | 211 | Base Model for all classes |
|
212 | 212 | """ |
|
213 | 213 | |
|
214 | 214 | @classmethod |
|
215 | 215 | def _get_keys(cls): |
|
216 | 216 | """return column names for this model """ |
|
217 | 217 | return class_mapper(cls).c.keys() |
|
218 | 218 | |
|
219 | 219 | def get_dict(self): |
|
220 | 220 | """ |
|
221 | 221 | return dict with keys and values corresponding |
|
222 | 222 | to this model data """ |
|
223 | 223 | |
|
224 | 224 | d = {} |
|
225 | 225 | for k in self._get_keys(): |
|
226 | 226 | d[k] = getattr(self, k) |
|
227 | 227 | |
|
228 | 228 | # also use __json__() if present to get additional fields |
|
229 | 229 | _json_attr = getattr(self, '__json__', None) |
|
230 | 230 | if _json_attr: |
|
231 | 231 | # update with attributes from __json__ |
|
232 | 232 | if callable(_json_attr): |
|
233 | 233 | _json_attr = _json_attr() |
|
234 | 234 | for k, val in _json_attr.items(): |
|
235 | 235 | d[k] = val |
|
236 | 236 | return d |
|
237 | 237 | |
|
238 | 238 | def get_appstruct(self): |
|
239 | 239 | """return list with keys and values tuples corresponding |
|
240 | 240 | to this model data """ |
|
241 | 241 | |
|
242 | 242 | lst = [] |
|
243 | 243 | for k in self._get_keys(): |
|
244 | 244 | lst.append((k, getattr(self, k),)) |
|
245 | 245 | return lst |
|
246 | 246 | |
|
247 | 247 | def populate_obj(self, populate_dict): |
|
248 | 248 | """populate model with data from given populate_dict""" |
|
249 | 249 | |
|
250 | 250 | for k in self._get_keys(): |
|
251 | 251 | if k in populate_dict: |
|
252 | 252 | setattr(self, k, populate_dict[k]) |
|
253 | 253 | |
|
254 | 254 | @classmethod |
|
255 | 255 | def query(cls): |
|
256 | 256 | return Session().query(cls) |
|
257 | 257 | |
|
258 | 258 | @classmethod |
|
259 | 259 | def select(cls, custom_cls=None): |
|
260 | 260 | """ |
|
261 | 261 | stmt = cls.select().where(cls.user_id==1) |
|
262 | 262 | # optionally |
|
263 | 263 | stmt = cls.select(User.user_id).where(cls.user_id==1) |
|
264 | 264 | result = cls.execute(stmt) | cls.scalars(stmt) |
|
265 | 265 | """ |
|
266 | 266 | |
|
267 | 267 | if custom_cls: |
|
268 | 268 | stmt = select(custom_cls) |
|
269 | 269 | else: |
|
270 | 270 | stmt = select(cls) |
|
271 | 271 | return stmt |
|
272 | 272 | |
|
273 | 273 | @classmethod |
|
274 | 274 | def execute(cls, stmt): |
|
275 | 275 | return Session().execute(stmt) |
|
276 | 276 | |
|
277 | 277 | @classmethod |
|
278 | 278 | def scalars(cls, stmt): |
|
279 | 279 | return Session().scalars(stmt) |
|
280 | 280 | |
|
281 | 281 | @classmethod |
|
282 | 282 | def get(cls, id_): |
|
283 | 283 | if id_: |
|
284 | 284 | return cls.query().get(id_) |
|
285 | 285 | |
|
286 | 286 | @classmethod |
|
287 | 287 | def get_or_404(cls, id_): |
|
288 | 288 | from pyramid.httpexceptions import HTTPNotFound |
|
289 | 289 | |
|
290 | 290 | try: |
|
291 | 291 | id_ = int(id_) |
|
292 | 292 | except (TypeError, ValueError): |
|
293 | 293 | raise HTTPNotFound() |
|
294 | 294 | |
|
295 | 295 | res = cls.query().get(id_) |
|
296 | 296 | if not res: |
|
297 | 297 | raise HTTPNotFound() |
|
298 | 298 | return res |
|
299 | 299 | |
|
300 | 300 | @classmethod |
|
301 | 301 | def getAll(cls): |
|
302 | 302 | # deprecated and left for backward compatibility |
|
303 | 303 | return cls.get_all() |
|
304 | 304 | |
|
305 | 305 | @classmethod |
|
306 | 306 | def get_all(cls): |
|
307 | 307 | return cls.query().all() |
|
308 | 308 | |
|
309 | 309 | @classmethod |
|
310 | 310 | def delete(cls, id_): |
|
311 | 311 | obj = cls.query().get(id_) |
|
312 | 312 | Session().delete(obj) |
|
313 | 313 | |
|
314 | 314 | @classmethod |
|
315 | 315 | def identity_cache(cls, session, attr_name, value): |
|
316 | 316 | exist_in_session = [] |
|
317 | 317 | for (item_cls, pkey), instance in session.identity_map.items(): |
|
318 | 318 | if cls == item_cls and getattr(instance, attr_name) == value: |
|
319 | 319 | exist_in_session.append(instance) |
|
320 | 320 | if exist_in_session: |
|
321 | 321 | if len(exist_in_session) == 1: |
|
322 | 322 | return exist_in_session[0] |
|
323 | 323 | log.exception( |
|
324 | 324 | 'multiple objects with attr %s and ' |
|
325 | 325 | 'value %s found with same name: %r', |
|
326 | 326 | attr_name, value, exist_in_session) |
|
327 | 327 | |
|
328 | 328 | @property |
|
329 | 329 | def cls_name(self): |
|
330 | 330 | return self.__class__.__name__ |
|
331 | 331 | |
|
332 | 332 | def __repr__(self): |
|
333 | 333 | return f'<DB:{self.cls_name}>' |
|
334 | 334 | |
|
335 | 335 | |
|
336 | 336 | class RhodeCodeSetting(Base, BaseModel): |
|
337 | 337 | __tablename__ = 'rhodecode_settings' |
|
338 | 338 | __table_args__ = ( |
|
339 | 339 | UniqueConstraint('app_settings_name'), |
|
340 | 340 | base_table_args |
|
341 | 341 | ) |
|
342 | 342 | |
|
343 | 343 | SETTINGS_TYPES = { |
|
344 | 344 | 'str': safe_str, |
|
345 | 345 | 'int': safe_int, |
|
346 | 346 | 'unicode': safe_str, |
|
347 | 347 | 'bool': str2bool, |
|
348 | 348 | 'list': functools.partial(aslist, sep=',') |
|
349 | 349 | } |
|
350 | 350 | DEFAULT_UPDATE_URL = 'https://rhodecode.com/api/v1/info/versions' |
|
351 | 351 | GLOBAL_CONF_KEY = 'app_settings' |
|
352 | 352 | |
|
353 | 353 | app_settings_id = Column("app_settings_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
354 | 354 | app_settings_name = Column("app_settings_name", String(255), nullable=True, unique=None, default=None) |
|
355 | 355 | _app_settings_value = Column("app_settings_value", String(4096), nullable=True, unique=None, default=None) |
|
356 | 356 | _app_settings_type = Column("app_settings_type", String(255), nullable=True, unique=None, default=None) |
|
357 | 357 | |
|
358 | 358 | def __init__(self, key='', val='', type='unicode'): |
|
359 | 359 | self.app_settings_name = key |
|
360 | 360 | self.app_settings_type = type |
|
361 | 361 | self.app_settings_value = val |
|
362 | 362 | |
|
363 | 363 | @validates('_app_settings_value') |
|
364 | 364 | def validate_settings_value(self, key, val): |
|
365 | 365 | assert type(val) == str |
|
366 | 366 | return val |
|
367 | 367 | |
|
368 | 368 | @hybrid_property |
|
369 | 369 | def app_settings_value(self): |
|
370 | 370 | v = self._app_settings_value |
|
371 | 371 | _type = self.app_settings_type |
|
372 | 372 | if _type: |
|
373 | 373 | _type = self.app_settings_type.split('.')[0] |
|
374 | 374 | # decode the encrypted value |
|
375 | 375 | if 'encrypted' in self.app_settings_type: |
|
376 | 376 | cipher = EncryptedTextValue() |
|
377 | 377 | v = safe_str(cipher.process_result_value(v, None)) |
|
378 | 378 | |
|
379 | 379 | converter = self.SETTINGS_TYPES.get(_type) or \ |
|
380 | 380 | self.SETTINGS_TYPES['unicode'] |
|
381 | 381 | return converter(v) |
|
382 | 382 | |
|
383 | 383 | @app_settings_value.setter |
|
384 | 384 | def app_settings_value(self, val): |
|
385 | 385 | """ |
|
386 | 386 | Setter that will always make sure we use unicode in app_settings_value |
|
387 | 387 | |
|
388 | 388 | :param val: |
|
389 | 389 | """ |
|
390 | 390 | val = safe_str(val) |
|
391 | 391 | # encode the encrypted value |
|
392 | 392 | if 'encrypted' in self.app_settings_type: |
|
393 | 393 | cipher = EncryptedTextValue() |
|
394 | 394 | val = safe_str(cipher.process_bind_param(val, None)) |
|
395 | 395 | self._app_settings_value = val |
|
396 | 396 | |
|
397 | 397 | @hybrid_property |
|
398 | 398 | def app_settings_type(self): |
|
399 | 399 | return self._app_settings_type |
|
400 | 400 | |
|
401 | 401 | @app_settings_type.setter |
|
402 | 402 | def app_settings_type(self, val): |
|
403 | 403 | if val.split('.')[0] not in self.SETTINGS_TYPES: |
|
404 | 404 | raise Exception('type must be one of %s got %s' |
|
405 | 405 | % (self.SETTINGS_TYPES.keys(), val)) |
|
406 | 406 | self._app_settings_type = val |
|
407 | 407 | |
|
408 | 408 | @classmethod |
|
409 | 409 | def get_by_prefix(cls, prefix): |
|
410 | 410 | return RhodeCodeSetting.query()\ |
|
411 | 411 | .filter(RhodeCodeSetting.app_settings_name.startswith(prefix))\ |
|
412 | 412 | .all() |
|
413 | 413 | |
|
414 | 414 | def __repr__(self): |
|
415 | 415 | return "<%s('%s:%s[%s]')>" % ( |
|
416 | 416 | self.cls_name, |
|
417 | 417 | self.app_settings_name, self.app_settings_value, |
|
418 | 418 | self.app_settings_type |
|
419 | 419 | ) |
|
420 | 420 | |
|
421 | 421 | |
|
422 | 422 | class RhodeCodeUi(Base, BaseModel): |
|
423 | 423 | __tablename__ = 'rhodecode_ui' |
|
424 | 424 | __table_args__ = ( |
|
425 | 425 | UniqueConstraint('ui_key'), |
|
426 | 426 | base_table_args |
|
427 | 427 | ) |
|
428 | 428 | # Sync those values with vcsserver.config.hooks |
|
429 | 429 | |
|
430 | 430 | HOOK_REPO_SIZE = 'changegroup.repo_size' |
|
431 | 431 | # HG |
|
432 | 432 | HOOK_PRE_PULL = 'preoutgoing.pre_pull' |
|
433 | 433 | HOOK_PULL = 'outgoing.pull_logger' |
|
434 | 434 | HOOK_PRE_PUSH = 'prechangegroup.pre_push' |
|
435 | 435 | HOOK_PRETX_PUSH = 'pretxnchangegroup.pre_push' |
|
436 | 436 | HOOK_PUSH = 'changegroup.push_logger' |
|
437 | 437 | HOOK_PUSH_KEY = 'pushkey.key_push' |
|
438 | 438 | |
|
439 | 439 | HOOKS_BUILTIN = [ |
|
440 | 440 | HOOK_PRE_PULL, |
|
441 | 441 | HOOK_PULL, |
|
442 | 442 | HOOK_PRE_PUSH, |
|
443 | 443 | HOOK_PRETX_PUSH, |
|
444 | 444 | HOOK_PUSH, |
|
445 | 445 | HOOK_PUSH_KEY, |
|
446 | 446 | ] |
|
447 | 447 | |
|
448 | 448 | # TODO: johbo: Unify way how hooks are configured for git and hg, |
|
449 | 449 | # git part is currently hardcoded. |
|
450 | 450 | |
|
451 | 451 | # SVN PATTERNS |
|
452 | 452 | SVN_BRANCH_ID = 'vcs_svn_branch' |
|
453 | 453 | SVN_TAG_ID = 'vcs_svn_tag' |
|
454 | 454 | |
|
455 | 455 | ui_id = Column( |
|
456 | 456 | "ui_id", Integer(), nullable=False, unique=True, default=None, |
|
457 | 457 | primary_key=True) |
|
458 | 458 | ui_section = Column( |
|
459 | 459 | "ui_section", String(255), nullable=True, unique=None, default=None) |
|
460 | 460 | ui_key = Column( |
|
461 | 461 | "ui_key", String(255), nullable=True, unique=None, default=None) |
|
462 | 462 | ui_value = Column( |
|
463 | 463 | "ui_value", String(255), nullable=True, unique=None, default=None) |
|
464 | 464 | ui_active = Column( |
|
465 | 465 | "ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
466 | 466 | |
|
467 | 467 | def __repr__(self): |
|
468 | 468 | return '<%s[%s]%s=>%s]>' % (self.cls_name, self.ui_section, |
|
469 | 469 | self.ui_key, self.ui_value) |
|
470 | 470 | |
|
471 | 471 | |
|
472 | 472 | class RepoRhodeCodeSetting(Base, BaseModel): |
|
473 | 473 | __tablename__ = 'repo_rhodecode_settings' |
|
474 | 474 | __table_args__ = ( |
|
475 | 475 | UniqueConstraint( |
|
476 | 476 | 'app_settings_name', 'repository_id', |
|
477 | 477 | name='uq_repo_rhodecode_setting_name_repo_id'), |
|
478 | 478 | base_table_args |
|
479 | 479 | ) |
|
480 | 480 | |
|
481 | 481 | repository_id = Column( |
|
482 | 482 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), |
|
483 | 483 | nullable=False) |
|
484 | 484 | app_settings_id = Column( |
|
485 | 485 | "app_settings_id", Integer(), nullable=False, unique=True, |
|
486 | 486 | default=None, primary_key=True) |
|
487 | 487 | app_settings_name = Column( |
|
488 | 488 | "app_settings_name", String(255), nullable=True, unique=None, |
|
489 | 489 | default=None) |
|
490 | 490 | _app_settings_value = Column( |
|
491 | 491 | "app_settings_value", String(4096), nullable=True, unique=None, |
|
492 | 492 | default=None) |
|
493 | 493 | _app_settings_type = Column( |
|
494 | 494 | "app_settings_type", String(255), nullable=True, unique=None, |
|
495 | 495 | default=None) |
|
496 | 496 | |
|
497 | 497 | repository = relationship('Repository', viewonly=True) |
|
498 | 498 | |
|
499 | 499 | def __init__(self, repository_id, key='', val='', type='unicode'): |
|
500 | 500 | self.repository_id = repository_id |
|
501 | 501 | self.app_settings_name = key |
|
502 | 502 | self.app_settings_type = type |
|
503 | 503 | self.app_settings_value = val |
|
504 | 504 | |
|
505 | 505 | @validates('_app_settings_value') |
|
506 | 506 | def validate_settings_value(self, key, val): |
|
507 | 507 | assert type(val) == str |
|
508 | 508 | return val |
|
509 | 509 | |
|
510 | 510 | @hybrid_property |
|
511 | 511 | def app_settings_value(self): |
|
512 | 512 | v = self._app_settings_value |
|
513 | 513 | type_ = self.app_settings_type |
|
514 | 514 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES |
|
515 | 515 | converter = SETTINGS_TYPES.get(type_) or SETTINGS_TYPES['unicode'] |
|
516 | 516 | return converter(v) |
|
517 | 517 | |
|
518 | 518 | @app_settings_value.setter |
|
519 | 519 | def app_settings_value(self, val): |
|
520 | 520 | """ |
|
521 | 521 | Setter that will always make sure we use unicode in app_settings_value |
|
522 | 522 | |
|
523 | 523 | :param val: |
|
524 | 524 | """ |
|
525 | 525 | self._app_settings_value = safe_str(val) |
|
526 | 526 | |
|
527 | 527 | @hybrid_property |
|
528 | 528 | def app_settings_type(self): |
|
529 | 529 | return self._app_settings_type |
|
530 | 530 | |
|
531 | 531 | @app_settings_type.setter |
|
532 | 532 | def app_settings_type(self, val): |
|
533 | 533 | SETTINGS_TYPES = RhodeCodeSetting.SETTINGS_TYPES |
|
534 | 534 | if val not in SETTINGS_TYPES: |
|
535 | 535 | raise Exception('type must be one of %s got %s' |
|
536 | 536 | % (SETTINGS_TYPES.keys(), val)) |
|
537 | 537 | self._app_settings_type = val |
|
538 | 538 | |
|
539 | 539 | def __repr__(self): |
|
540 | 540 | return "<%s('%s:%s:%s[%s]')>" % ( |
|
541 | 541 | self.cls_name, self.repository.repo_name, |
|
542 | 542 | self.app_settings_name, self.app_settings_value, |
|
543 | 543 | self.app_settings_type |
|
544 | 544 | ) |
|
545 | 545 | |
|
546 | 546 | |
|
547 | 547 | class RepoRhodeCodeUi(Base, BaseModel): |
|
548 | 548 | __tablename__ = 'repo_rhodecode_ui' |
|
549 | 549 | __table_args__ = ( |
|
550 | 550 | UniqueConstraint( |
|
551 | 551 | 'repository_id', 'ui_section', 'ui_key', |
|
552 | 552 | name='uq_repo_rhodecode_ui_repository_id_section_key'), |
|
553 | 553 | base_table_args |
|
554 | 554 | ) |
|
555 | 555 | |
|
556 | 556 | repository_id = Column( |
|
557 | 557 | "repository_id", Integer(), ForeignKey('repositories.repo_id'), |
|
558 | 558 | nullable=False) |
|
559 | 559 | ui_id = Column( |
|
560 | 560 | "ui_id", Integer(), nullable=False, unique=True, default=None, |
|
561 | 561 | primary_key=True) |
|
562 | 562 | ui_section = Column( |
|
563 | 563 | "ui_section", String(255), nullable=True, unique=None, default=None) |
|
564 | 564 | ui_key = Column( |
|
565 | 565 | "ui_key", String(255), nullable=True, unique=None, default=None) |
|
566 | 566 | ui_value = Column( |
|
567 | 567 | "ui_value", String(255), nullable=True, unique=None, default=None) |
|
568 | 568 | ui_active = Column( |
|
569 | 569 | "ui_active", Boolean(), nullable=True, unique=None, default=True) |
|
570 | 570 | |
|
571 | 571 | repository = relationship('Repository', viewonly=True) |
|
572 | 572 | |
|
573 | 573 | def __repr__(self): |
|
574 | 574 | return '<%s[%s:%s]%s=>%s]>' % ( |
|
575 | 575 | self.cls_name, self.repository.repo_name, |
|
576 | 576 | self.ui_section, self.ui_key, self.ui_value) |
|
577 | 577 | |
|
578 | 578 | |
|
579 | 579 | class User(Base, BaseModel): |
|
580 | 580 | __tablename__ = 'users' |
|
581 | 581 | __table_args__ = ( |
|
582 | 582 | UniqueConstraint('username'), UniqueConstraint('email'), |
|
583 | 583 | Index('u_username_idx', 'username'), |
|
584 | 584 | Index('u_email_idx', 'email'), |
|
585 | 585 | base_table_args |
|
586 | 586 | ) |
|
587 | 587 | |
|
588 | 588 | DEFAULT_USER = 'default' |
|
589 | 589 | DEFAULT_USER_EMAIL = 'anonymous@rhodecode.org' |
|
590 | 590 | DEFAULT_GRAVATAR_URL = 'https://secure.gravatar.com/avatar/{md5email}?d=identicon&s={size}' |
|
591 | 591 | RECOVERY_CODES_COUNT = 10 |
|
592 | 592 | |
|
593 | 593 | user_id = Column("user_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
594 | 594 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
595 | 595 | password = Column("password", String(255), nullable=True, unique=None, default=None) |
|
596 | 596 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
597 | 597 | admin = Column("admin", Boolean(), nullable=True, unique=None, default=False) |
|
598 | 598 | name = Column("firstname", String(255), nullable=True, unique=None, default=None) |
|
599 | 599 | lastname = Column("lastname", String(255), nullable=True, unique=None, default=None) |
|
600 | 600 | _email = Column("email", String(255), nullable=True, unique=None, default=None) |
|
601 | 601 | last_login = Column("last_login", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
602 | 602 | last_activity = Column('last_activity', DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
603 | 603 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) |
|
604 | 604 | |
|
605 | 605 | extern_type = Column("extern_type", String(255), nullable=True, unique=None, default=None) |
|
606 | 606 | extern_name = Column("extern_name", String(255), nullable=True, unique=None, default=None) |
|
607 | 607 | _api_key = Column("api_key", String(255), nullable=True, unique=None, default=None) |
|
608 | 608 | inherit_default_permissions = Column("inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
609 | 609 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
610 | 610 | _user_data = Column("user_data", LargeBinary(), nullable=True) # JSON data |
|
611 | 611 | |
|
612 | 612 | user_log = relationship('UserLog', back_populates='user') |
|
613 | 613 | user_perms = relationship('UserToPerm', primaryjoin="User.user_id==UserToPerm.user_id", cascade='all, delete-orphan') |
|
614 | 614 | |
|
615 | 615 | repositories = relationship('Repository', back_populates='user') |
|
616 | 616 | repository_groups = relationship('RepoGroup', back_populates='user') |
|
617 | 617 | user_groups = relationship('UserGroup', back_populates='user') |
|
618 | 618 | |
|
619 | 619 | user_followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_user_id==User.user_id', cascade='all', back_populates='follows_user') |
|
620 | 620 | followings = relationship('UserFollowing', primaryjoin='UserFollowing.user_id==User.user_id', cascade='all', back_populates='user') |
|
621 | 621 | |
|
622 | 622 | repo_to_perm = relationship('UserRepoToPerm', primaryjoin='UserRepoToPerm.user_id==User.user_id', cascade='all, delete-orphan') |
|
623 | 623 | repo_group_to_perm = relationship('UserRepoGroupToPerm', primaryjoin='UserRepoGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user') |
|
624 | 624 | user_group_to_perm = relationship('UserUserGroupToPerm', primaryjoin='UserUserGroupToPerm.user_id==User.user_id', cascade='all, delete-orphan', back_populates='user') |
|
625 | 625 | |
|
626 | 626 | group_member = relationship('UserGroupMember', cascade='all', back_populates='user') |
|
627 | 627 | |
|
628 | 628 | notifications = relationship('UserNotification', cascade='all', back_populates='user') |
|
629 | 629 | # notifications assigned to this user |
|
630 | 630 | user_created_notifications = relationship('Notification', cascade='all', back_populates='created_by_user') |
|
631 | 631 | # comments created by this user |
|
632 | 632 | user_comments = relationship('ChangesetComment', cascade='all', back_populates='author') |
|
633 | 633 | # user profile extra info |
|
634 | 634 | user_emails = relationship('UserEmailMap', cascade='all', back_populates='user') |
|
635 | 635 | user_ip_map = relationship('UserIpMap', cascade='all', back_populates='user') |
|
636 | 636 | user_auth_tokens = relationship('UserApiKeys', cascade='all', back_populates='user') |
|
637 | 637 | user_ssh_keys = relationship('UserSshKeys', cascade='all', back_populates='user') |
|
638 | 638 | |
|
639 | 639 | # gists |
|
640 | 640 | user_gists = relationship('Gist', cascade='all', back_populates='owner') |
|
641 | 641 | # user pull requests |
|
642 | 642 | user_pull_requests = relationship('PullRequest', cascade='all', back_populates='author') |
|
643 | 643 | |
|
644 | 644 | # external identities |
|
645 | 645 | external_identities = relationship('ExternalIdentity', primaryjoin="User.user_id==ExternalIdentity.local_user_id", cascade='all') |
|
646 | 646 | # review rules |
|
647 | 647 | user_review_rules = relationship('RepoReviewRuleUser', cascade='all', back_populates='user') |
|
648 | 648 | |
|
649 | 649 | # artifacts owned |
|
650 | 650 | artifacts = relationship('FileStore', primaryjoin='FileStore.user_id==User.user_id', back_populates='upload_user') |
|
651 | 651 | |
|
652 | 652 | # no cascade, set NULL |
|
653 | 653 | scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_user_id==User.user_id', cascade='', back_populates='user') |
|
654 | 654 | |
|
655 | 655 | def __repr__(self): |
|
656 | 656 | return f"<{self.cls_name}('id={self.user_id}, username={self.username}')>" |
|
657 | 657 | |
|
658 | 658 | @hybrid_property |
|
659 | 659 | def email(self): |
|
660 | 660 | return self._email |
|
661 | 661 | |
|
662 | 662 | @email.setter |
|
663 | 663 | def email(self, val): |
|
664 | 664 | self._email = val.lower() if val else None |
|
665 | 665 | |
|
666 | 666 | @hybrid_property |
|
667 | 667 | def first_name(self): |
|
668 | 668 | from rhodecode.lib import helpers as h |
|
669 | 669 | if self.name: |
|
670 | 670 | return h.escape(self.name) |
|
671 | 671 | return self.name |
|
672 | 672 | |
|
673 | 673 | @hybrid_property |
|
674 | 674 | def last_name(self): |
|
675 | 675 | from rhodecode.lib import helpers as h |
|
676 | 676 | if self.lastname: |
|
677 | 677 | return h.escape(self.lastname) |
|
678 | 678 | return self.lastname |
|
679 | 679 | |
|
680 | 680 | @hybrid_property |
|
681 | 681 | def api_key(self): |
|
682 | 682 | """ |
|
683 | 683 | Fetch if exist an auth-token with role ALL connected to this user |
|
684 | 684 | """ |
|
685 | 685 | user_auth_token = UserApiKeys.query()\ |
|
686 | 686 | .filter(UserApiKeys.user_id == self.user_id)\ |
|
687 | 687 | .filter(or_(UserApiKeys.expires == -1, |
|
688 | 688 | UserApiKeys.expires >= time.time()))\ |
|
689 | 689 | .filter(UserApiKeys.role == UserApiKeys.ROLE_ALL).first() |
|
690 | 690 | if user_auth_token: |
|
691 | 691 | user_auth_token = user_auth_token.api_key |
|
692 | 692 | |
|
693 | 693 | return user_auth_token |
|
694 | 694 | |
|
695 | 695 | @api_key.setter |
|
696 | 696 | def api_key(self, val): |
|
697 | 697 | # don't allow to set API key this is deprecated for now |
|
698 | 698 | self._api_key = None |
|
699 | 699 | |
|
700 | 700 | @property |
|
701 | 701 | def reviewer_pull_requests(self): |
|
702 | 702 | return PullRequestReviewers.query() \ |
|
703 | 703 | .options(joinedload(PullRequestReviewers.pull_request)) \ |
|
704 | 704 | .filter(PullRequestReviewers.user_id == self.user_id) \ |
|
705 | 705 | .all() |
|
706 | 706 | |
|
707 | 707 | @property |
|
708 | 708 | def firstname(self): |
|
709 | 709 | # alias for future |
|
710 | 710 | return self.name |
|
711 | 711 | |
|
712 | 712 | @property |
|
713 | 713 | def emails(self): |
|
714 | 714 | other = UserEmailMap.query()\ |
|
715 | 715 | .filter(UserEmailMap.user == self) \ |
|
716 | 716 | .order_by(UserEmailMap.email_id.asc()) \ |
|
717 | 717 | .all() |
|
718 | 718 | return [self.email] + [x.email for x in other] |
|
719 | 719 | |
|
720 | 720 | def emails_cached(self): |
|
721 | 721 | emails = [] |
|
722 | 722 | if self.user_id != self.get_default_user_id(): |
|
723 | 723 | emails = UserEmailMap.query()\ |
|
724 | 724 | .filter(UserEmailMap.user == self) \ |
|
725 | 725 | .order_by(UserEmailMap.email_id.asc()) |
|
726 | 726 | |
|
727 | 727 | emails = emails.options( |
|
728 | 728 | FromCache("sql_cache_short", f"get_user_{self.user_id}_emails") |
|
729 | 729 | ) |
|
730 | 730 | |
|
731 | 731 | return [self.email] + [x.email for x in emails] |
|
732 | 732 | |
|
733 | 733 | @property |
|
734 | 734 | def auth_tokens(self): |
|
735 | 735 | auth_tokens = self.get_auth_tokens() |
|
736 | 736 | return [x.api_key for x in auth_tokens] |
|
737 | 737 | |
|
738 | 738 | def get_auth_tokens(self): |
|
739 | 739 | return UserApiKeys.query()\ |
|
740 | 740 | .filter(UserApiKeys.user == self)\ |
|
741 | 741 | .order_by(UserApiKeys.user_api_key_id.asc())\ |
|
742 | 742 | .all() |
|
743 | 743 | |
|
744 | 744 | @LazyProperty |
|
745 | 745 | def feed_token(self): |
|
746 | 746 | return self.get_feed_token() |
|
747 | 747 | |
|
748 | 748 | def get_feed_token(self, cache=True): |
|
749 | 749 | feed_tokens = UserApiKeys.query()\ |
|
750 | 750 | .filter(UserApiKeys.user == self)\ |
|
751 | 751 | .filter(UserApiKeys.role == UserApiKeys.ROLE_FEED) |
|
752 | 752 | if cache: |
|
753 | 753 | feed_tokens = feed_tokens.options( |
|
754 | 754 | FromCache("sql_cache_short", f"get_user_feed_token_{self.user_id}")) |
|
755 | 755 | |
|
756 | 756 | feed_tokens = feed_tokens.all() |
|
757 | 757 | if feed_tokens: |
|
758 | 758 | return feed_tokens[0].api_key |
|
759 | 759 | return 'NO_FEED_TOKEN_AVAILABLE' |
|
760 | 760 | |
|
761 | 761 | @LazyProperty |
|
762 | 762 | def artifact_token(self): |
|
763 | 763 | return self.get_artifact_token() |
|
764 | 764 | |
|
765 | 765 | def get_artifact_token(self, cache=True): |
|
766 | 766 | artifacts_tokens = UserApiKeys.query()\ |
|
767 | 767 | .filter(UserApiKeys.user == self) \ |
|
768 | 768 | .filter(or_(UserApiKeys.expires == -1, |
|
769 | 769 | UserApiKeys.expires >= time.time())) \ |
|
770 | 770 | .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD) |
|
771 | 771 | |
|
772 | 772 | if cache: |
|
773 | 773 | artifacts_tokens = artifacts_tokens.options( |
|
774 | 774 | FromCache("sql_cache_short", f"get_user_artifact_token_{self.user_id}")) |
|
775 | 775 | |
|
776 | 776 | artifacts_tokens = artifacts_tokens.all() |
|
777 | 777 | if artifacts_tokens: |
|
778 | 778 | return artifacts_tokens[0].api_key |
|
779 | 779 | return 'NO_ARTIFACT_TOKEN_AVAILABLE' |
|
780 | 780 | |
|
781 | 781 | def get_or_create_artifact_token(self): |
|
782 | 782 | artifacts_tokens = UserApiKeys.query()\ |
|
783 | 783 | .filter(UserApiKeys.user == self) \ |
|
784 | 784 | .filter(or_(UserApiKeys.expires == -1, |
|
785 | 785 | UserApiKeys.expires >= time.time())) \ |
|
786 | 786 | .filter(UserApiKeys.role == UserApiKeys.ROLE_ARTIFACT_DOWNLOAD) |
|
787 | 787 | |
|
788 | 788 | artifacts_tokens = artifacts_tokens.all() |
|
789 | 789 | if artifacts_tokens: |
|
790 | 790 | return artifacts_tokens[0].api_key |
|
791 | 791 | else: |
|
792 | 792 | from rhodecode.model.auth_token import AuthTokenModel |
|
793 | 793 | artifact_token = AuthTokenModel().create( |
|
794 | 794 | self, 'auto-generated-artifact-token', |
|
795 | 795 | lifetime=-1, role=UserApiKeys.ROLE_ARTIFACT_DOWNLOAD) |
|
796 | 796 | Session.commit() |
|
797 | 797 | return artifact_token.api_key |
|
798 | 798 | |
|
799 | @hybrid_property | |
|
800 | def secret_2fa(self): | |
|
801 | if not self.user_data.get('secret_2fa'): | |
|
802 | secret = pyotp.random_base32() | |
|
803 | self.update_userdata(secret_2fa=safe_str(enc_utils.encrypt_value(secret, enc_key=ENCRYPTION_KEY))) | |
|
804 | return secret | |
|
805 | return safe_str( | |
|
806 | enc_utils.decrypt_value(self.user_data['secret_2fa'], | |
|
807 | enc_key=ENCRYPTION_KEY, | |
|
808 | strict_mode=ConfigGet().get_bool('rhodecode.encrypted_values.strict', | |
|
809 | missing=True) | |
|
810 | ) | |
|
811 | ) | |
|
812 | ||
|
813 | def is_totp_valid(self, received_code): | |
|
814 | totp = pyotp.TOTP(self.secret_2fa) | |
|
799 | def is_totp_valid(self, received_code, secret): | |
|
800 | totp = pyotp.TOTP(secret) | |
|
815 | 801 | return totp.verify(received_code) |
|
816 | 802 | |
|
817 | def is_2fa_recovery_code_valid(self, received_code): | |
|
803 | def is_2fa_recovery_code_valid(self, received_code, secret): | |
|
818 | 804 | encrypted_recovery_codes = self.user_data.get('recovery_codes_2fa', []) |
|
819 |
recovery_codes = |
|
|
820 | lambda x: safe_str( | |
|
821 | enc_utils.decrypt_value( | |
|
822 | x, | |
|
823 | enc_key=ENCRYPTION_KEY, | |
|
824 | strict_mode=ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True) | |
|
825 | )), | |
|
826 | encrypted_recovery_codes)) | |
|
805 | recovery_codes = self.get_2fa_recovery_codes() | |
|
827 | 806 | if received_code in recovery_codes: |
|
828 | 807 | encrypted_recovery_codes.pop(recovery_codes.index(received_code)) |
|
829 | 808 | self.update_userdata(recovery_codes_2fa=encrypted_recovery_codes) |
|
830 | 809 | return True |
|
831 | 810 | return False |
|
832 | 811 | |
|
833 | 812 | @hybrid_property |
|
834 | 813 | def has_forced_2fa(self): |
|
835 | 814 | """ |
|
836 | 815 | Checks if 2fa was forced for ALL users (including current one) |
|
837 | 816 | """ |
|
838 | 817 | from rhodecode.model.settings import SettingsModel |
|
839 | 818 | # So now we're supporting only auth_rhodecode_global_2f |
|
840 | 819 | if value := SettingsModel().get_setting_by_name('auth_rhodecode_global_2fa'): |
|
841 | 820 | return value.app_settings_value |
|
842 | 821 | return False |
|
843 | 822 | |
|
844 | 823 | @hybrid_property |
|
845 | 824 | def has_enabled_2fa(self): |
|
846 | 825 | """ |
|
847 |
Checks if |
|
|
826 | Checks if user enabled 2fa | |
|
848 | 827 | """ |
|
849 | 828 | if value := self.has_forced_2fa: |
|
850 | 829 | return value |
|
851 | 830 | return self.user_data.get('enabled_2fa', False) |
|
852 | 831 | |
|
853 | 832 | @has_enabled_2fa.setter |
|
854 | 833 | def has_enabled_2fa(self, val): |
|
855 | 834 | val = str2bool(val) |
|
856 |
self.update_userdata(enabled_2fa= |
|
|
835 | self.update_userdata(enabled_2fa=val) | |
|
857 | 836 | if not val: |
|
858 | self.update_userdata(secret_2fa=None, recovery_codes_2fa=[]) | |
|
837 | # NOTE: setting to false we clear the user_data to not store any 2fa artifacts | |
|
838 | self.update_userdata(secret_2fa=None, recovery_codes_2fa=[], check_2fa=False) | |
|
839 | Session().commit() | |
|
840 | ||
|
841 | @hybrid_property | |
|
842 | def has_check_2fa_flag(self): | |
|
843 | """ | |
|
844 | Check if check 2fa flag is set for this user | |
|
845 | """ | |
|
846 | value = self.user_data.get('check_2fa', False) | |
|
847 | return value | |
|
848 | ||
|
849 | @has_check_2fa_flag.setter | |
|
850 | def has_check_2fa_flag(self, val): | |
|
851 | val = str2bool(val) | |
|
852 | self.update_userdata(check_2fa=val) | |
|
859 | 853 | Session().commit() |
|
860 | 854 | |
|
861 | def get_2fa_recovery_codes(self): | |
|
855 | @hybrid_property | |
|
856 | def has_seen_2fa_codes(self): | |
|
857 | """ | |
|
858 | get the flag about if user has seen 2fa recovery codes | |
|
859 | """ | |
|
860 | value = self.user_data.get('recovery_codes_2fa_seen', False) | |
|
861 | return value | |
|
862 | ||
|
863 | @has_seen_2fa_codes.setter | |
|
864 | def has_seen_2fa_codes(self, val): | |
|
865 | val = str2bool(val) | |
|
866 | self.update_userdata(recovery_codes_2fa_seen=val) | |
|
867 | Session().commit() | |
|
868 | ||
|
869 | @hybrid_property | |
|
870 | def needs_2fa_configure(self): | |
|
871 | """ | |
|
872 | Determines if setup2fa has completed for this user. Means he has all needed data for 2fa to work. | |
|
873 | ||
|
874 | Currently this is 2fa enabled and secret exists | |
|
875 | """ | |
|
876 | if self.has_enabled_2fa: | |
|
877 | return not self.user_data.get('secret_2fa') | |
|
878 | return False | |
|
879 | ||
|
880 | def init_2fa_recovery_codes(self, persist=True, force=False): | |
|
862 | 881 | """ |
|
863 | 882 | Creates 2fa recovery codes |
|
864 | 883 | """ |
|
865 | 884 | recovery_codes = self.user_data.get('recovery_codes_2fa', []) |
|
866 | 885 | encrypted_codes = [] |
|
867 | if not recovery_codes: | |
|
886 | if not recovery_codes or force: | |
|
868 | 887 | for _ in range(self.RECOVERY_CODES_COUNT): |
|
869 | 888 | recovery_code = pyotp.random_base32() |
|
870 | 889 | recovery_codes.append(recovery_code) |
|
871 |
encrypted_code |
|
|
872 | self.update_userdata(recovery_codes_2fa=encrypted_codes) | |
|
890 | encrypted_code = enc_utils.encrypt_value(safe_bytes(recovery_code), enc_key=ENCRYPTION_KEY) | |
|
891 | encrypted_codes.append(safe_str(encrypted_code)) | |
|
892 | if persist: | |
|
893 | self.update_userdata(recovery_codes_2fa=encrypted_codes, recovery_codes_2fa_seen=False) | |
|
873 | 894 | return recovery_codes |
|
874 | 895 | # User should not check the same recovery codes more than once |
|
875 | 896 | return [] |
|
876 | 897 | |
|
898 | def get_2fa_recovery_codes(self): | |
|
899 | encrypted_recovery_codes = self.user_data.get('recovery_codes_2fa', []) | |
|
900 | strict_mode = ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True) | |
|
901 | ||
|
902 | recovery_codes = list(map( | |
|
903 | lambda val: safe_str( | |
|
904 | enc_utils.decrypt_value( | |
|
905 | val, | |
|
906 | enc_key=ENCRYPTION_KEY, | |
|
907 | strict_mode=strict_mode | |
|
908 | )), | |
|
909 | encrypted_recovery_codes)) | |
|
910 | return recovery_codes | |
|
911 | ||
|
912 | def init_secret_2fa(self, persist=True, force=False): | |
|
913 | secret_2fa = self.user_data.get('secret_2fa') | |
|
914 | if not secret_2fa or force: | |
|
915 | secret = pyotp.random_base32() | |
|
916 | if persist: | |
|
917 | self.update_userdata(secret_2fa=safe_str(enc_utils.encrypt_value(safe_bytes(secret), enc_key=ENCRYPTION_KEY))) | |
|
918 | return secret | |
|
919 | return '' | |
|
920 | ||
|
921 | def get_secret_2fa(self) -> str: | |
|
922 | secret_2fa = self.user_data['secret_2fa'] | |
|
923 | if secret_2fa: | |
|
924 | strict_mode = ConfigGet().get_bool('rhodecode.encrypted_values.strict', missing=True) | |
|
925 | return safe_str( | |
|
926 | enc_utils.decrypt_value(secret_2fa, enc_key=ENCRYPTION_KEY, strict_mode=strict_mode)) | |
|
927 | return '' | |
|
928 | ||
|
929 | def set_2fa_secret(self, value): | |
|
930 | encrypted_value = enc_utils.encrypt_value(safe_bytes(value), enc_key=ENCRYPTION_KEY) | |
|
931 | self.update_userdata(secret_2fa=safe_str(encrypted_value)) | |
|
932 | ||
|
877 | 933 | def regenerate_2fa_recovery_codes(self): |
|
878 | 934 | """ |
|
879 | 935 | Regenerates 2fa recovery codes upon request |
|
880 | 936 | """ |
|
881 | self.update_userdata(recovery_codes_2fa=[]) | |
|
882 | Session().flush() | |
|
883 | new_recovery_codes = self.get_2fa_recovery_codes() | |
|
937 | new_recovery_codes = self.init_2fa_recovery_codes(force=True) | |
|
884 | 938 | Session().commit() |
|
885 | 939 | return new_recovery_codes |
|
886 | 940 | |
|
887 | 941 | @classmethod |
|
888 | 942 | def extra_valid_auth_tokens(cls, user, role=None): |
|
889 | 943 | tokens = UserApiKeys.query().filter(UserApiKeys.user == user)\ |
|
890 | 944 | .filter(or_(UserApiKeys.expires == -1, |
|
891 | 945 | UserApiKeys.expires >= time.time())) |
|
892 | 946 | if role: |
|
893 | 947 | tokens = tokens.filter(or_(UserApiKeys.role == role, |
|
894 | 948 | UserApiKeys.role == UserApiKeys.ROLE_ALL)) |
|
895 | 949 | return tokens.all() |
|
896 | 950 | |
|
897 | 951 | def authenticate_by_token(self, auth_token, roles=None, scope_repo_id=None): |
|
898 | 952 | from rhodecode.lib import auth |
|
899 | 953 | |
|
900 | 954 | log.debug('Trying to authenticate user: %s via auth-token, ' |
|
901 | 955 | 'and roles: %s', self, roles) |
|
902 | 956 | |
|
903 | 957 | if not auth_token: |
|
904 | 958 | return False |
|
905 | 959 | |
|
906 | 960 | roles = (roles or []) + [UserApiKeys.ROLE_ALL] |
|
907 | 961 | tokens_q = UserApiKeys.query()\ |
|
908 | 962 | .filter(UserApiKeys.user_id == self.user_id)\ |
|
909 | 963 | .filter(or_(UserApiKeys.expires == -1, |
|
910 | 964 | UserApiKeys.expires >= time.time())) |
|
911 | 965 | |
|
912 | 966 | tokens_q = tokens_q.filter(UserApiKeys.role.in_(roles)) |
|
913 | 967 | |
|
914 | 968 | crypto_backend = auth.crypto_backend() |
|
915 | 969 | enc_token_map = {} |
|
916 | 970 | plain_token_map = {} |
|
917 | 971 | for token in tokens_q: |
|
918 | 972 | if token.api_key.startswith(crypto_backend.ENC_PREF): |
|
919 | 973 | enc_token_map[token.api_key] = token |
|
920 | 974 | else: |
|
921 | 975 | plain_token_map[token.api_key] = token |
|
922 | 976 | log.debug( |
|
923 | 977 | 'Found %s plain and %s encrypted tokens to check for authentication for this user', |
|
924 | 978 | len(plain_token_map), len(enc_token_map)) |
|
925 | 979 | |
|
926 | 980 | # plain token match comes first |
|
927 | 981 | match = plain_token_map.get(auth_token) |
|
928 | 982 | |
|
929 | 983 | # check encrypted tokens now |
|
930 | 984 | if not match: |
|
931 | 985 | for token_hash, token in enc_token_map.items(): |
|
932 | 986 | # NOTE(marcink): this is expensive to calculate, but most secure |
|
933 | 987 | if crypto_backend.hash_check(auth_token, token_hash): |
|
934 | 988 | match = token |
|
935 | 989 | break |
|
936 | 990 | |
|
937 | 991 | if match: |
|
938 | 992 | log.debug('Found matching token %s', match) |
|
939 | 993 | if match.repo_id: |
|
940 | 994 | log.debug('Found scope, checking for scope match of token %s', match) |
|
941 | 995 | if match.repo_id == scope_repo_id: |
|
942 | 996 | return True |
|
943 | 997 | else: |
|
944 | 998 | log.debug( |
|
945 | 999 | 'AUTH_TOKEN: scope mismatch, token has a set repo scope: %s, ' |
|
946 | 1000 | 'and calling scope is:%s, skipping further checks', |
|
947 | 1001 | match.repo, scope_repo_id) |
|
948 | 1002 | return False |
|
949 | 1003 | else: |
|
950 | 1004 | return True |
|
951 | 1005 | |
|
952 | 1006 | return False |
|
953 | 1007 | |
|
954 | 1008 | @property |
|
955 | 1009 | def ip_addresses(self): |
|
956 | 1010 | ret = UserIpMap.query().filter(UserIpMap.user == self).all() |
|
957 | 1011 | return [x.ip_addr for x in ret] |
|
958 | 1012 | |
|
959 | 1013 | @property |
|
960 | 1014 | def username_and_name(self): |
|
961 | 1015 | return f'{self.username} ({self.first_name} {self.last_name})' |
|
962 | 1016 | |
|
963 | 1017 | @property |
|
964 | 1018 | def username_or_name_or_email(self): |
|
965 | 1019 | full_name = self.full_name if self.full_name != ' ' else None |
|
966 | 1020 | return self.username or full_name or self.email |
|
967 | 1021 | |
|
968 | 1022 | @property |
|
969 | 1023 | def full_name(self): |
|
970 | 1024 | return f'{self.first_name} {self.last_name}' |
|
971 | 1025 | |
|
972 | 1026 | @property |
|
973 | 1027 | def full_name_or_username(self): |
|
974 | 1028 | return (f'{self.first_name} {self.last_name}' |
|
975 | 1029 | if (self.first_name and self.last_name) else self.username) |
|
976 | 1030 | |
|
977 | 1031 | @property |
|
978 | 1032 | def full_contact(self): |
|
979 | 1033 | return f'{self.first_name} {self.last_name} <{self.email}>' |
|
980 | 1034 | |
|
981 | 1035 | @property |
|
982 | 1036 | def short_contact(self): |
|
983 | 1037 | return f'{self.first_name} {self.last_name}' |
|
984 | 1038 | |
|
985 | 1039 | @property |
|
986 | 1040 | def is_admin(self): |
|
987 | 1041 | return self.admin |
|
988 | 1042 | |
|
989 | 1043 | @property |
|
990 | 1044 | def language(self): |
|
991 | 1045 | return self.user_data.get('language') |
|
992 | 1046 | |
|
993 | 1047 | def AuthUser(self, **kwargs): |
|
994 | 1048 | """ |
|
995 | 1049 | Returns instance of AuthUser for this user |
|
996 | 1050 | """ |
|
997 | 1051 | from rhodecode.lib.auth import AuthUser |
|
998 | 1052 | return AuthUser(user_id=self.user_id, username=self.username, **kwargs) |
|
999 | 1053 | |
|
1000 | 1054 | @hybrid_property |
|
1001 | 1055 | def user_data(self): |
|
1002 | 1056 | if not self._user_data: |
|
1003 | 1057 | return {} |
|
1004 | 1058 | |
|
1005 | 1059 | try: |
|
1006 | 1060 | return json.loads(self._user_data) or {} |
|
1007 | 1061 | except TypeError: |
|
1008 | 1062 | return {} |
|
1009 | 1063 | |
|
1010 | 1064 | @user_data.setter |
|
1011 | 1065 | def user_data(self, val): |
|
1012 | 1066 | if not isinstance(val, dict): |
|
1013 | 1067 | raise Exception(f'user_data must be dict, got {type(val)}') |
|
1014 | 1068 | try: |
|
1015 | 1069 | self._user_data = safe_bytes(json.dumps(val)) |
|
1016 | 1070 | except Exception: |
|
1017 | 1071 | log.error(traceback.format_exc()) |
|
1018 | 1072 | |
|
1019 | 1073 | @classmethod |
|
1020 | 1074 | def get(cls, user_id, cache=False): |
|
1021 | 1075 | if not user_id: |
|
1022 | 1076 | return |
|
1023 | 1077 | |
|
1024 | 1078 | user = cls.query() |
|
1025 | 1079 | if cache: |
|
1026 | 1080 | user = user.options( |
|
1027 | 1081 | FromCache("sql_cache_short", f"get_users_{user_id}")) |
|
1028 | 1082 | return user.get(user_id) |
|
1029 | 1083 | |
|
1030 | 1084 | @classmethod |
|
1031 | 1085 | def get_by_username(cls, username, case_insensitive=False, |
|
1032 | 1086 | cache=False): |
|
1033 | 1087 | |
|
1034 | 1088 | if case_insensitive: |
|
1035 | 1089 | q = cls.select().where( |
|
1036 | 1090 | func.lower(cls.username) == func.lower(username)) |
|
1037 | 1091 | else: |
|
1038 | 1092 | q = cls.select().where(cls.username == username) |
|
1039 | 1093 | |
|
1040 | 1094 | if cache: |
|
1041 | 1095 | hash_key = _hash_key(username) |
|
1042 | 1096 | q = q.options( |
|
1043 | 1097 | FromCache("sql_cache_short", f"get_user_by_name_{hash_key}")) |
|
1044 | 1098 | |
|
1045 | 1099 | return cls.execute(q).scalar_one_or_none() |
|
1046 | 1100 | |
|
1047 | 1101 | @classmethod |
|
1048 | 1102 | def get_by_username_or_primary_email(cls, user_identifier): |
|
1049 | 1103 | qs = union_all(cls.select().where(func.lower(cls.username) == func.lower(user_identifier)), |
|
1050 | 1104 | cls.select().where(func.lower(cls.email) == func.lower(user_identifier))) |
|
1051 | 1105 | return cls.execute(cls.select(User).from_statement(qs)).scalar_one_or_none() |
|
1052 | 1106 | |
|
1053 | 1107 | @classmethod |
|
1054 | 1108 | def get_by_auth_token(cls, auth_token, cache=False): |
|
1055 | 1109 | |
|
1056 | 1110 | q = cls.select(User)\ |
|
1057 | 1111 | .join(UserApiKeys)\ |
|
1058 | 1112 | .where(UserApiKeys.api_key == auth_token)\ |
|
1059 | 1113 | .where(or_(UserApiKeys.expires == -1, |
|
1060 | 1114 | UserApiKeys.expires >= time.time())) |
|
1061 | 1115 | |
|
1062 | 1116 | if cache: |
|
1063 | 1117 | q = q.options( |
|
1064 | 1118 | FromCache("sql_cache_short", f"get_auth_token_{auth_token}")) |
|
1065 | 1119 | |
|
1066 | 1120 | matched_user = cls.execute(q).scalar_one_or_none() |
|
1067 | 1121 | |
|
1068 | 1122 | return matched_user |
|
1069 | 1123 | |
|
1070 | 1124 | @classmethod |
|
1071 | 1125 | def get_by_email(cls, email, case_insensitive=False, cache=False): |
|
1072 | 1126 | |
|
1073 | 1127 | if case_insensitive: |
|
1074 | 1128 | q = cls.select().where(func.lower(cls.email) == func.lower(email)) |
|
1075 | 1129 | else: |
|
1076 | 1130 | q = cls.select().where(cls.email == email) |
|
1077 | 1131 | |
|
1078 | 1132 | if cache: |
|
1079 | 1133 | email_key = _hash_key(email) |
|
1080 | 1134 | q = q.options( |
|
1081 | 1135 | FromCache("sql_cache_short", f"get_email_key_{email_key}")) |
|
1082 | 1136 | |
|
1083 | 1137 | ret = cls.execute(q).scalar_one_or_none() |
|
1084 | 1138 | |
|
1085 | 1139 | if ret is None: |
|
1086 | 1140 | q = cls.select(UserEmailMap) |
|
1087 | 1141 | # try fetching in alternate email map |
|
1088 | 1142 | if case_insensitive: |
|
1089 | 1143 | q = q.where(func.lower(UserEmailMap.email) == func.lower(email)) |
|
1090 | 1144 | else: |
|
1091 | 1145 | q = q.where(UserEmailMap.email == email) |
|
1092 | 1146 | q = q.options(joinedload(UserEmailMap.user)) |
|
1093 | 1147 | if cache: |
|
1094 | 1148 | q = q.options( |
|
1095 | 1149 | FromCache("sql_cache_short", f"get_email_map_key_{email_key}")) |
|
1096 | 1150 | |
|
1097 | 1151 | result = cls.execute(q).scalar_one_or_none() |
|
1098 | 1152 | ret = getattr(result, 'user', None) |
|
1099 | 1153 | |
|
1100 | 1154 | return ret |
|
1101 | 1155 | |
|
1102 | 1156 | @classmethod |
|
1103 | 1157 | def get_from_cs_author(cls, author): |
|
1104 | 1158 | """ |
|
1105 | 1159 | Tries to get User objects out of commit author string |
|
1106 | 1160 | |
|
1107 | 1161 | :param author: |
|
1108 | 1162 | """ |
|
1109 | 1163 | from rhodecode.lib.helpers import email, author_name |
|
1110 | 1164 | # Valid email in the attribute passed, see if they're in the system |
|
1111 | 1165 | _email = email(author) |
|
1112 | 1166 | if _email: |
|
1113 | 1167 | user = cls.get_by_email(_email, case_insensitive=True) |
|
1114 | 1168 | if user: |
|
1115 | 1169 | return user |
|
1116 | 1170 | # Maybe we can match by username? |
|
1117 | 1171 | _author = author_name(author) |
|
1118 | 1172 | user = cls.get_by_username(_author, case_insensitive=True) |
|
1119 | 1173 | if user: |
|
1120 | 1174 | return user |
|
1121 | 1175 | |
|
1122 | 1176 | def update_userdata(self, **kwargs): |
|
1123 | 1177 | usr = self |
|
1124 | 1178 | old = usr.user_data |
|
1125 | 1179 | old.update(**kwargs) |
|
1126 | 1180 | usr.user_data = old |
|
1127 | 1181 | Session().add(usr) |
|
1128 | 1182 | log.debug('updated userdata with %s', kwargs) |
|
1129 | 1183 | |
|
1130 | 1184 | def update_lastlogin(self): |
|
1131 | 1185 | """Update user lastlogin""" |
|
1132 | 1186 | self.last_login = datetime.datetime.now() |
|
1133 | 1187 | Session().add(self) |
|
1134 | 1188 | log.debug('updated user %s lastlogin', self.username) |
|
1135 | 1189 | |
|
1136 | 1190 | def update_password(self, new_password): |
|
1137 | 1191 | from rhodecode.lib.auth import get_crypt_password |
|
1138 | 1192 | |
|
1139 | 1193 | self.password = get_crypt_password(new_password) |
|
1140 | 1194 | Session().add(self) |
|
1141 | 1195 | |
|
1142 | 1196 | @classmethod |
|
1143 | 1197 | def get_first_super_admin(cls): |
|
1144 | 1198 | stmt = cls.select().where(User.admin == true()).order_by(User.user_id.asc()) |
|
1145 | 1199 | user = cls.scalars(stmt).first() |
|
1146 | 1200 | |
|
1147 | 1201 | if user is None: |
|
1148 | 1202 | raise Exception('FATAL: Missing administrative account!') |
|
1149 | 1203 | return user |
|
1150 | 1204 | |
|
1151 | 1205 | @classmethod |
|
1152 | 1206 | def get_all_super_admins(cls, only_active=False): |
|
1153 | 1207 | """ |
|
1154 | 1208 | Returns all admin accounts sorted by username |
|
1155 | 1209 | """ |
|
1156 | 1210 | qry = User.query().filter(User.admin == true()).order_by(User.username.asc()) |
|
1157 | 1211 | if only_active: |
|
1158 | 1212 | qry = qry.filter(User.active == true()) |
|
1159 | 1213 | return qry.all() |
|
1160 | 1214 | |
|
1161 | 1215 | @classmethod |
|
1162 | 1216 | def get_all_user_ids(cls, only_active=True): |
|
1163 | 1217 | """ |
|
1164 | 1218 | Returns all users IDs |
|
1165 | 1219 | """ |
|
1166 | 1220 | qry = Session().query(User.user_id) |
|
1167 | 1221 | |
|
1168 | 1222 | if only_active: |
|
1169 | 1223 | qry = qry.filter(User.active == true()) |
|
1170 | 1224 | return [x.user_id for x in qry] |
|
1171 | 1225 | |
|
1172 | 1226 | @classmethod |
|
1173 | 1227 | def get_default_user(cls, cache=False, refresh=False): |
|
1174 | 1228 | user = User.get_by_username(User.DEFAULT_USER, cache=cache) |
|
1175 | 1229 | if user is None: |
|
1176 | 1230 | raise Exception('FATAL: Missing default account!') |
|
1177 | 1231 | if refresh: |
|
1178 | 1232 | # The default user might be based on outdated state which |
|
1179 | 1233 | # has been loaded from the cache. |
|
1180 | 1234 | # A call to refresh() ensures that the |
|
1181 | 1235 | # latest state from the database is used. |
|
1182 | 1236 | Session().refresh(user) |
|
1183 | 1237 | |
|
1184 | 1238 | return user |
|
1185 | 1239 | |
|
1186 | 1240 | @classmethod |
|
1187 | 1241 | def get_default_user_id(cls): |
|
1188 | 1242 | import rhodecode |
|
1189 | 1243 | return rhodecode.CONFIG['default_user_id'] |
|
1190 | 1244 | |
|
1191 | 1245 | def _get_default_perms(self, user, suffix=''): |
|
1192 | 1246 | from rhodecode.model.permission import PermissionModel |
|
1193 | 1247 | return PermissionModel().get_default_perms(user.user_perms, suffix) |
|
1194 | 1248 | |
|
1195 | 1249 | def get_default_perms(self, suffix=''): |
|
1196 | 1250 | return self._get_default_perms(self, suffix) |
|
1197 | 1251 | |
|
1198 | 1252 | def get_api_data(self, include_secrets=False, details='full'): |
|
1199 | 1253 | """ |
|
1200 | 1254 | Common function for generating user related data for API |
|
1201 | 1255 | |
|
1202 | 1256 | :param include_secrets: By default secrets in the API data will be replaced |
|
1203 | 1257 | by a placeholder value to prevent exposing this data by accident. In case |
|
1204 | 1258 | this data shall be exposed, set this flag to ``True``. |
|
1205 | 1259 | |
|
1206 | 1260 | :param details: details can be 'basic|full' basic gives only a subset of |
|
1207 | 1261 | the available user information that includes user_id, name and emails. |
|
1208 | 1262 | """ |
|
1209 | 1263 | user = self |
|
1210 | 1264 | user_data = self.user_data |
|
1211 | 1265 | data = { |
|
1212 | 1266 | 'user_id': user.user_id, |
|
1213 | 1267 | 'username': user.username, |
|
1214 | 1268 | 'firstname': user.name, |
|
1215 | 1269 | 'lastname': user.lastname, |
|
1216 | 1270 | 'description': user.description, |
|
1217 | 1271 | 'email': user.email, |
|
1218 | 1272 | 'emails': user.emails, |
|
1219 | 1273 | } |
|
1220 | 1274 | if details == 'basic': |
|
1221 | 1275 | return data |
|
1222 | 1276 | |
|
1223 | 1277 | auth_token_length = 40 |
|
1224 | 1278 | auth_token_replacement = '*' * auth_token_length |
|
1225 | 1279 | |
|
1226 | 1280 | extras = { |
|
1227 | 1281 | 'auth_tokens': [auth_token_replacement], |
|
1228 | 1282 | 'active': user.active, |
|
1229 | 1283 | 'admin': user.admin, |
|
1230 | 1284 | 'extern_type': user.extern_type, |
|
1231 | 1285 | 'extern_name': user.extern_name, |
|
1232 | 1286 | 'last_login': user.last_login, |
|
1233 | 1287 | 'last_activity': user.last_activity, |
|
1234 | 1288 | 'ip_addresses': user.ip_addresses, |
|
1235 | 1289 | 'language': user_data.get('language') |
|
1236 | 1290 | } |
|
1237 | 1291 | data.update(extras) |
|
1238 | 1292 | |
|
1239 | 1293 | if include_secrets: |
|
1240 | 1294 | data['auth_tokens'] = user.auth_tokens |
|
1241 | 1295 | return data |
|
1242 | 1296 | |
|
1243 | 1297 | def __json__(self): |
|
1244 | 1298 | data = { |
|
1245 | 1299 | 'full_name': self.full_name, |
|
1246 | 1300 | 'full_name_or_username': self.full_name_or_username, |
|
1247 | 1301 | 'short_contact': self.short_contact, |
|
1248 | 1302 | 'full_contact': self.full_contact, |
|
1249 | 1303 | } |
|
1250 | 1304 | data.update(self.get_api_data()) |
|
1251 | 1305 | return data |
|
1252 | 1306 | |
|
1253 | 1307 | |
|
1254 | 1308 | class UserApiKeys(Base, BaseModel): |
|
1255 | 1309 | __tablename__ = 'user_api_keys' |
|
1256 | 1310 | __table_args__ = ( |
|
1257 | 1311 | Index('uak_api_key_idx', 'api_key'), |
|
1258 | 1312 | Index('uak_api_key_expires_idx', 'api_key', 'expires'), |
|
1259 | 1313 | base_table_args |
|
1260 | 1314 | ) |
|
1261 | 1315 | |
|
1262 | 1316 | # ApiKey role |
|
1263 | 1317 | ROLE_ALL = 'token_role_all' |
|
1264 | 1318 | ROLE_VCS = 'token_role_vcs' |
|
1265 | 1319 | ROLE_API = 'token_role_api' |
|
1266 | 1320 | ROLE_HTTP = 'token_role_http' |
|
1267 | 1321 | ROLE_FEED = 'token_role_feed' |
|
1268 | 1322 | ROLE_ARTIFACT_DOWNLOAD = 'role_artifact_download' |
|
1269 | 1323 | # The last one is ignored in the list as we only |
|
1270 | 1324 | # use it for one action, and cannot be created by users |
|
1271 | 1325 | ROLE_PASSWORD_RESET = 'token_password_reset' |
|
1272 | 1326 | |
|
1273 | 1327 | ROLES = [ROLE_ALL, ROLE_VCS, ROLE_API, ROLE_HTTP, ROLE_FEED, ROLE_ARTIFACT_DOWNLOAD] |
|
1274 | 1328 | |
|
1275 | 1329 | user_api_key_id = Column("user_api_key_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1276 | 1330 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1277 | 1331 | api_key = Column("api_key", String(255), nullable=False, unique=True) |
|
1278 | 1332 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) |
|
1279 | 1333 | expires = Column('expires', Float(53), nullable=False) |
|
1280 | 1334 | role = Column('role', String(255), nullable=True) |
|
1281 | 1335 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1282 | 1336 | |
|
1283 | 1337 | # scope columns |
|
1284 | 1338 | repo_id = Column( |
|
1285 | 1339 | 'repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
1286 | 1340 | nullable=True, unique=None, default=None) |
|
1287 | 1341 | repo = relationship('Repository', lazy='joined', back_populates='scoped_tokens') |
|
1288 | 1342 | |
|
1289 | 1343 | repo_group_id = Column( |
|
1290 | 1344 | 'repo_group_id', Integer(), ForeignKey('groups.group_id'), |
|
1291 | 1345 | nullable=True, unique=None, default=None) |
|
1292 | 1346 | repo_group = relationship('RepoGroup', lazy='joined') |
|
1293 | 1347 | |
|
1294 | 1348 | user = relationship('User', lazy='joined', back_populates='user_auth_tokens') |
|
1295 | 1349 | |
|
1296 | 1350 | def __repr__(self): |
|
1297 | 1351 | return f"<{self.cls_name}('{self.role}')>" |
|
1298 | 1352 | |
|
1299 | 1353 | def __json__(self): |
|
1300 | 1354 | data = { |
|
1301 | 1355 | 'auth_token': self.api_key, |
|
1302 | 1356 | 'role': self.role, |
|
1303 | 1357 | 'scope': self.scope_humanized, |
|
1304 | 1358 | 'expired': self.expired |
|
1305 | 1359 | } |
|
1306 | 1360 | return data |
|
1307 | 1361 | |
|
1308 | 1362 | def get_api_data(self, include_secrets=False): |
|
1309 | 1363 | data = self.__json__() |
|
1310 | 1364 | if include_secrets: |
|
1311 | 1365 | return data |
|
1312 | 1366 | else: |
|
1313 | 1367 | data['auth_token'] = self.token_obfuscated |
|
1314 | 1368 | return data |
|
1315 | 1369 | |
|
1316 | 1370 | @hybrid_property |
|
1317 | 1371 | def description_safe(self): |
|
1318 | 1372 | from rhodecode.lib import helpers as h |
|
1319 | 1373 | return h.escape(self.description) |
|
1320 | 1374 | |
|
1321 | 1375 | @property |
|
1322 | 1376 | def expired(self): |
|
1323 | 1377 | if self.expires == -1: |
|
1324 | 1378 | return False |
|
1325 | 1379 | return time.time() > self.expires |
|
1326 | 1380 | |
|
1327 | 1381 | @classmethod |
|
1328 | 1382 | def _get_role_name(cls, role): |
|
1329 | 1383 | return { |
|
1330 | 1384 | cls.ROLE_ALL: _('all'), |
|
1331 | 1385 | cls.ROLE_HTTP: _('http/web interface'), |
|
1332 | 1386 | cls.ROLE_VCS: _('vcs (git/hg/svn protocol)'), |
|
1333 | 1387 | cls.ROLE_API: _('api calls'), |
|
1334 | 1388 | cls.ROLE_FEED: _('feed access'), |
|
1335 | 1389 | cls.ROLE_ARTIFACT_DOWNLOAD: _('artifacts downloads'), |
|
1336 | 1390 | }.get(role, role) |
|
1337 | 1391 | |
|
1338 | 1392 | @classmethod |
|
1339 | 1393 | def _get_role_description(cls, role): |
|
1340 | 1394 | return { |
|
1341 | 1395 | cls.ROLE_ALL: _('Token for all actions.'), |
|
1342 | 1396 | cls.ROLE_HTTP: _('Token to access RhodeCode pages via web interface without ' |
|
1343 | 1397 | 'login using `api_access_controllers_whitelist` functionality.'), |
|
1344 | 1398 | cls.ROLE_VCS: _('Token to interact over git/hg/svn protocols. ' |
|
1345 | 1399 | 'Requires auth_token authentication plugin to be active. <br/>' |
|
1346 | 1400 | 'Such Token should be used then instead of a password to ' |
|
1347 | 1401 | 'interact with a repository, and additionally can be ' |
|
1348 | 1402 | 'limited to single repository using repo scope.'), |
|
1349 | 1403 | cls.ROLE_API: _('Token limited to api calls.'), |
|
1350 | 1404 | cls.ROLE_FEED: _('Token to read RSS/ATOM feed.'), |
|
1351 | 1405 | cls.ROLE_ARTIFACT_DOWNLOAD: _('Token for artifacts downloads.'), |
|
1352 | 1406 | }.get(role, role) |
|
1353 | 1407 | |
|
1354 | 1408 | @property |
|
1355 | 1409 | def role_humanized(self): |
|
1356 | 1410 | return self._get_role_name(self.role) |
|
1357 | 1411 | |
|
1358 | 1412 | def _get_scope(self): |
|
1359 | 1413 | if self.repo: |
|
1360 | 1414 | return 'Repository: {}'.format(self.repo.repo_name) |
|
1361 | 1415 | if self.repo_group: |
|
1362 | 1416 | return 'RepositoryGroup: {} (recursive)'.format(self.repo_group.group_name) |
|
1363 | 1417 | return 'Global' |
|
1364 | 1418 | |
|
1365 | 1419 | @property |
|
1366 | 1420 | def scope_humanized(self): |
|
1367 | 1421 | return self._get_scope() |
|
1368 | 1422 | |
|
1369 | 1423 | @property |
|
1370 | 1424 | def token_obfuscated(self): |
|
1371 | 1425 | if self.api_key: |
|
1372 | 1426 | return self.api_key[:4] + "****" |
|
1373 | 1427 | |
|
1374 | 1428 | |
|
1375 | 1429 | class UserEmailMap(Base, BaseModel): |
|
1376 | 1430 | __tablename__ = 'user_email_map' |
|
1377 | 1431 | __table_args__ = ( |
|
1378 | 1432 | Index('uem_email_idx', 'email'), |
|
1379 | 1433 | Index('uem_user_id_idx', 'user_id'), |
|
1380 | 1434 | UniqueConstraint('email'), |
|
1381 | 1435 | base_table_args |
|
1382 | 1436 | ) |
|
1383 | 1437 | |
|
1384 | 1438 | email_id = Column("email_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1385 | 1439 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1386 | 1440 | _email = Column("email", String(255), nullable=True, unique=False, default=None) |
|
1387 | 1441 | user = relationship('User', lazy='joined', back_populates='user_emails') |
|
1388 | 1442 | |
|
1389 | 1443 | @validates('_email') |
|
1390 | 1444 | def validate_email(self, key, email): |
|
1391 | 1445 | # check if this email is not main one |
|
1392 | 1446 | main_email = Session().query(User).filter(User.email == email).scalar() |
|
1393 | 1447 | if main_email is not None: |
|
1394 | 1448 | raise AttributeError('email %s is present is user table' % email) |
|
1395 | 1449 | return email |
|
1396 | 1450 | |
|
1397 | 1451 | @hybrid_property |
|
1398 | 1452 | def email(self): |
|
1399 | 1453 | return self._email |
|
1400 | 1454 | |
|
1401 | 1455 | @email.setter |
|
1402 | 1456 | def email(self, val): |
|
1403 | 1457 | self._email = val.lower() if val else None |
|
1404 | 1458 | |
|
1405 | 1459 | |
|
1406 | 1460 | class UserIpMap(Base, BaseModel): |
|
1407 | 1461 | __tablename__ = 'user_ip_map' |
|
1408 | 1462 | __table_args__ = ( |
|
1409 | 1463 | UniqueConstraint('user_id', 'ip_addr'), |
|
1410 | 1464 | base_table_args |
|
1411 | 1465 | ) |
|
1412 | 1466 | |
|
1413 | 1467 | ip_id = Column("ip_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1414 | 1468 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1415 | 1469 | ip_addr = Column("ip_addr", String(255), nullable=True, unique=False, default=None) |
|
1416 | 1470 | active = Column("active", Boolean(), nullable=True, unique=None, default=True) |
|
1417 | 1471 | description = Column("description", String(10000), nullable=True, unique=None, default=None) |
|
1418 | 1472 | user = relationship('User', lazy='joined', back_populates='user_ip_map') |
|
1419 | 1473 | |
|
1420 | 1474 | @hybrid_property |
|
1421 | 1475 | def description_safe(self): |
|
1422 | 1476 | from rhodecode.lib import helpers as h |
|
1423 | 1477 | return h.escape(self.description) |
|
1424 | 1478 | |
|
1425 | 1479 | @classmethod |
|
1426 | 1480 | def _get_ip_range(cls, ip_addr): |
|
1427 | 1481 | net = ipaddress.ip_network(safe_str(ip_addr), strict=False) |
|
1428 | 1482 | return [str(net.network_address), str(net.broadcast_address)] |
|
1429 | 1483 | |
|
1430 | 1484 | def __json__(self): |
|
1431 | 1485 | return { |
|
1432 | 1486 | 'ip_addr': self.ip_addr, |
|
1433 | 1487 | 'ip_range': self._get_ip_range(self.ip_addr), |
|
1434 | 1488 | } |
|
1435 | 1489 | |
|
1436 | 1490 | def __repr__(self): |
|
1437 | 1491 | return f"<{self.cls_name}('user_id={self.user_id} => ip={self.ip_addr}')>" |
|
1438 | 1492 | |
|
1439 | 1493 | |
|
1440 | 1494 | class UserSshKeys(Base, BaseModel): |
|
1441 | 1495 | __tablename__ = 'user_ssh_keys' |
|
1442 | 1496 | __table_args__ = ( |
|
1443 | 1497 | Index('usk_ssh_key_fingerprint_idx', 'ssh_key_fingerprint'), |
|
1444 | 1498 | |
|
1445 | 1499 | UniqueConstraint('ssh_key_fingerprint'), |
|
1446 | 1500 | |
|
1447 | 1501 | base_table_args |
|
1448 | 1502 | ) |
|
1449 | 1503 | |
|
1450 | 1504 | ssh_key_id = Column('ssh_key_id', Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1451 | 1505 | ssh_key_data = Column('ssh_key_data', String(10240), nullable=False, unique=None, default=None) |
|
1452 | 1506 | ssh_key_fingerprint = Column('ssh_key_fingerprint', String(255), nullable=False, unique=None, default=None) |
|
1453 | 1507 | |
|
1454 | 1508 | description = Column('description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) |
|
1455 | 1509 | |
|
1456 | 1510 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1457 | 1511 | accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True, default=None) |
|
1458 | 1512 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
1459 | 1513 | |
|
1460 | 1514 | user = relationship('User', lazy='joined', back_populates='user_ssh_keys') |
|
1461 | 1515 | |
|
1462 | 1516 | def __json__(self): |
|
1463 | 1517 | data = { |
|
1464 | 1518 | 'ssh_fingerprint': self.ssh_key_fingerprint, |
|
1465 | 1519 | 'description': self.description, |
|
1466 | 1520 | 'created_on': self.created_on |
|
1467 | 1521 | } |
|
1468 | 1522 | return data |
|
1469 | 1523 | |
|
1470 | 1524 | def get_api_data(self): |
|
1471 | 1525 | data = self.__json__() |
|
1472 | 1526 | return data |
|
1473 | 1527 | |
|
1474 | 1528 | |
|
1475 | 1529 | class UserLog(Base, BaseModel): |
|
1476 | 1530 | __tablename__ = 'user_logs' |
|
1477 | 1531 | __table_args__ = ( |
|
1478 | 1532 | base_table_args, |
|
1479 | 1533 | ) |
|
1480 | 1534 | |
|
1481 | 1535 | VERSION_1 = 'v1' |
|
1482 | 1536 | VERSION_2 = 'v2' |
|
1483 | 1537 | VERSIONS = [VERSION_1, VERSION_2] |
|
1484 | 1538 | |
|
1485 | 1539 | user_log_id = Column("user_log_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1486 | 1540 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id',ondelete='SET NULL'), nullable=True, unique=None, default=None) |
|
1487 | 1541 | username = Column("username", String(255), nullable=True, unique=None, default=None) |
|
1488 | 1542 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id', ondelete='SET NULL'), nullable=True, unique=None, default=None) |
|
1489 | 1543 | repository_name = Column("repository_name", String(255), nullable=True, unique=None, default=None) |
|
1490 | 1544 | user_ip = Column("user_ip", String(255), nullable=True, unique=None, default=None) |
|
1491 | 1545 | action = Column("action", Text().with_variant(Text(1200000), 'mysql'), nullable=True, unique=None, default=None) |
|
1492 | 1546 | action_date = Column("action_date", DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
1493 | 1547 | |
|
1494 | 1548 | version = Column("version", String(255), nullable=True, default=VERSION_1) |
|
1495 | 1549 | user_data = Column('user_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) |
|
1496 | 1550 | action_data = Column('action_data_json', MutationObj.as_mutable(JsonType(dialect_map=dict(mysql=LONGTEXT())))) |
|
1497 | 1551 | user = relationship('User', cascade='', back_populates='user_log') |
|
1498 | 1552 | repository = relationship('Repository', cascade='', back_populates='logs') |
|
1499 | 1553 | |
|
1500 | 1554 | def __repr__(self): |
|
1501 | 1555 | return f"<{self.cls_name}('id:{self.repository_name}:{self.action}')>" |
|
1502 | 1556 | |
|
1503 | 1557 | def __json__(self): |
|
1504 | 1558 | return { |
|
1505 | 1559 | 'user_id': self.user_id, |
|
1506 | 1560 | 'username': self.username, |
|
1507 | 1561 | 'repository_id': self.repository_id, |
|
1508 | 1562 | 'repository_name': self.repository_name, |
|
1509 | 1563 | 'user_ip': self.user_ip, |
|
1510 | 1564 | 'action_date': self.action_date, |
|
1511 | 1565 | 'action': self.action, |
|
1512 | 1566 | } |
|
1513 | 1567 | |
|
1514 | 1568 | @hybrid_property |
|
1515 | 1569 | def entry_id(self): |
|
1516 | 1570 | return self.user_log_id |
|
1517 | 1571 | |
|
1518 | 1572 | @property |
|
1519 | 1573 | def action_as_day(self): |
|
1520 | 1574 | return datetime.date(*self.action_date.timetuple()[:3]) |
|
1521 | 1575 | |
|
1522 | 1576 | |
|
1523 | 1577 | class UserGroup(Base, BaseModel): |
|
1524 | 1578 | __tablename__ = 'users_groups' |
|
1525 | 1579 | __table_args__ = ( |
|
1526 | 1580 | base_table_args, |
|
1527 | 1581 | ) |
|
1528 | 1582 | |
|
1529 | 1583 | users_group_id = Column("users_group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1530 | 1584 | users_group_name = Column("users_group_name", String(255), nullable=False, unique=True, default=None) |
|
1531 | 1585 | user_group_description = Column("user_group_description", String(10000), nullable=True, unique=None, default=None) |
|
1532 | 1586 | users_group_active = Column("users_group_active", Boolean(), nullable=True, unique=None, default=None) |
|
1533 | 1587 | inherit_default_permissions = Column("users_group_inherit_default_permissions", Boolean(), nullable=False, unique=None, default=True) |
|
1534 | 1588 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
1535 | 1589 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1536 | 1590 | _group_data = Column("group_data", LargeBinary(), nullable=True) # JSON data |
|
1537 | 1591 | |
|
1538 | 1592 | members = relationship('UserGroupMember', cascade="all, delete-orphan", lazy="joined", back_populates='users_group') |
|
1539 | 1593 | users_group_to_perm = relationship('UserGroupToPerm', cascade='all', back_populates='users_group') |
|
1540 | 1594 | users_group_repo_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='users_group') |
|
1541 | 1595 | users_group_repo_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='users_group') |
|
1542 | 1596 | user_user_group_to_perm = relationship('UserUserGroupToPerm', cascade='all', back_populates='user_group') |
|
1543 | 1597 | |
|
1544 | 1598 | user_group_user_group_to_perm = relationship('UserGroupUserGroupToPerm', primaryjoin="UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id", cascade='all', back_populates='target_user_group') |
|
1545 | 1599 | |
|
1546 | 1600 | user_group_review_rules = relationship('RepoReviewRuleUserGroup', cascade='all', back_populates='users_group') |
|
1547 | 1601 | user = relationship('User', primaryjoin="User.user_id==UserGroup.user_id", back_populates='user_groups') |
|
1548 | 1602 | |
|
1549 | 1603 | @classmethod |
|
1550 | 1604 | def _load_group_data(cls, column): |
|
1551 | 1605 | if not column: |
|
1552 | 1606 | return {} |
|
1553 | 1607 | |
|
1554 | 1608 | try: |
|
1555 | 1609 | return json.loads(column) or {} |
|
1556 | 1610 | except TypeError: |
|
1557 | 1611 | return {} |
|
1558 | 1612 | |
|
1559 | 1613 | @hybrid_property |
|
1560 | 1614 | def description_safe(self): |
|
1561 | 1615 | from rhodecode.lib import helpers as h |
|
1562 | 1616 | return h.escape(self.user_group_description) |
|
1563 | 1617 | |
|
1564 | 1618 | @hybrid_property |
|
1565 | 1619 | def group_data(self): |
|
1566 | 1620 | return self._load_group_data(self._group_data) |
|
1567 | 1621 | |
|
1568 | 1622 | @group_data.expression |
|
1569 | 1623 | def group_data(self, **kwargs): |
|
1570 | 1624 | return self._group_data |
|
1571 | 1625 | |
|
1572 | 1626 | @group_data.setter |
|
1573 | 1627 | def group_data(self, val): |
|
1574 | 1628 | try: |
|
1575 | 1629 | self._group_data = json.dumps(val) |
|
1576 | 1630 | except Exception: |
|
1577 | 1631 | log.error(traceback.format_exc()) |
|
1578 | 1632 | |
|
1579 | 1633 | @classmethod |
|
1580 | 1634 | def _load_sync(cls, group_data): |
|
1581 | 1635 | if group_data: |
|
1582 | 1636 | return group_data.get('extern_type') |
|
1583 | 1637 | |
|
1584 | 1638 | @property |
|
1585 | 1639 | def sync(self): |
|
1586 | 1640 | return self._load_sync(self.group_data) |
|
1587 | 1641 | |
|
1588 | 1642 | def __repr__(self): |
|
1589 | 1643 | return f"<{self.cls_name}('id:{self.users_group_id}:{self.users_group_name}')>" |
|
1590 | 1644 | |
|
1591 | 1645 | @classmethod |
|
1592 | 1646 | def get_by_group_name(cls, group_name, cache=False, |
|
1593 | 1647 | case_insensitive=False): |
|
1594 | 1648 | if case_insensitive: |
|
1595 | 1649 | q = cls.query().filter(func.lower(cls.users_group_name) == |
|
1596 | 1650 | func.lower(group_name)) |
|
1597 | 1651 | |
|
1598 | 1652 | else: |
|
1599 | 1653 | q = cls.query().filter(cls.users_group_name == group_name) |
|
1600 | 1654 | if cache: |
|
1601 | 1655 | name_key = _hash_key(group_name) |
|
1602 | 1656 | q = q.options( |
|
1603 | 1657 | FromCache("sql_cache_short", f"get_group_{name_key}")) |
|
1604 | 1658 | return q.scalar() |
|
1605 | 1659 | |
|
1606 | 1660 | @classmethod |
|
1607 | 1661 | def get(cls, user_group_id, cache=False): |
|
1608 | 1662 | if not user_group_id: |
|
1609 | 1663 | return |
|
1610 | 1664 | |
|
1611 | 1665 | user_group = cls.query() |
|
1612 | 1666 | if cache: |
|
1613 | 1667 | user_group = user_group.options( |
|
1614 | 1668 | FromCache("sql_cache_short", f"get_users_group_{user_group_id}")) |
|
1615 | 1669 | return user_group.get(user_group_id) |
|
1616 | 1670 | |
|
1617 | 1671 | def permissions(self, with_admins=True, with_owner=True, |
|
1618 | 1672 | expand_from_user_groups=False): |
|
1619 | 1673 | """ |
|
1620 | 1674 | Permissions for user groups |
|
1621 | 1675 | """ |
|
1622 | 1676 | _admin_perm = 'usergroup.admin' |
|
1623 | 1677 | |
|
1624 | 1678 | owner_row = [] |
|
1625 | 1679 | if with_owner: |
|
1626 | 1680 | usr = AttributeDict(self.user.get_dict()) |
|
1627 | 1681 | usr.owner_row = True |
|
1628 | 1682 | usr.permission = _admin_perm |
|
1629 | 1683 | owner_row.append(usr) |
|
1630 | 1684 | |
|
1631 | 1685 | super_admin_ids = [] |
|
1632 | 1686 | super_admin_rows = [] |
|
1633 | 1687 | if with_admins: |
|
1634 | 1688 | for usr in User.get_all_super_admins(): |
|
1635 | 1689 | super_admin_ids.append(usr.user_id) |
|
1636 | 1690 | # if this admin is also owner, don't double the record |
|
1637 | 1691 | if usr.user_id == owner_row[0].user_id: |
|
1638 | 1692 | owner_row[0].admin_row = True |
|
1639 | 1693 | else: |
|
1640 | 1694 | usr = AttributeDict(usr.get_dict()) |
|
1641 | 1695 | usr.admin_row = True |
|
1642 | 1696 | usr.permission = _admin_perm |
|
1643 | 1697 | super_admin_rows.append(usr) |
|
1644 | 1698 | |
|
1645 | 1699 | q = UserUserGroupToPerm.query().filter(UserUserGroupToPerm.user_group == self) |
|
1646 | 1700 | q = q.options(joinedload(UserUserGroupToPerm.user_group), |
|
1647 | 1701 | joinedload(UserUserGroupToPerm.user), |
|
1648 | 1702 | joinedload(UserUserGroupToPerm.permission),) |
|
1649 | 1703 | |
|
1650 | 1704 | # get owners and admins and permissions. We do a trick of re-writing |
|
1651 | 1705 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
1652 | 1706 | # has a global reference and changing one object propagates to all |
|
1653 | 1707 | # others. This means if admin is also an owner admin_row that change |
|
1654 | 1708 | # would propagate to both objects |
|
1655 | 1709 | perm_rows = [] |
|
1656 | 1710 | for _usr in q.all(): |
|
1657 | 1711 | usr = AttributeDict(_usr.user.get_dict()) |
|
1658 | 1712 | # if this user is also owner/admin, mark as duplicate record |
|
1659 | 1713 | if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: |
|
1660 | 1714 | usr.duplicate_perm = True |
|
1661 | 1715 | usr.permission = _usr.permission.permission_name |
|
1662 | 1716 | perm_rows.append(usr) |
|
1663 | 1717 | |
|
1664 | 1718 | # filter the perm rows by 'default' first and then sort them by |
|
1665 | 1719 | # admin,write,read,none permissions sorted again alphabetically in |
|
1666 | 1720 | # each group |
|
1667 | 1721 | perm_rows = sorted(perm_rows, key=display_user_sort) |
|
1668 | 1722 | |
|
1669 | 1723 | user_groups_rows = [] |
|
1670 | 1724 | if expand_from_user_groups: |
|
1671 | 1725 | for ug in self.permission_user_groups(with_members=True): |
|
1672 | 1726 | for user_data in ug.members: |
|
1673 | 1727 | user_groups_rows.append(user_data) |
|
1674 | 1728 | |
|
1675 | 1729 | return super_admin_rows + owner_row + perm_rows + user_groups_rows |
|
1676 | 1730 | |
|
1677 | 1731 | def permission_user_groups(self, with_members=False): |
|
1678 | 1732 | q = UserGroupUserGroupToPerm.query()\ |
|
1679 | 1733 | .filter(UserGroupUserGroupToPerm.target_user_group == self) |
|
1680 | 1734 | q = q.options(joinedload(UserGroupUserGroupToPerm.user_group), |
|
1681 | 1735 | joinedload(UserGroupUserGroupToPerm.target_user_group), |
|
1682 | 1736 | joinedload(UserGroupUserGroupToPerm.permission),) |
|
1683 | 1737 | |
|
1684 | 1738 | perm_rows = [] |
|
1685 | 1739 | for _user_group in q.all(): |
|
1686 | 1740 | entry = AttributeDict(_user_group.user_group.get_dict()) |
|
1687 | 1741 | entry.permission = _user_group.permission.permission_name |
|
1688 | 1742 | if with_members: |
|
1689 | 1743 | entry.members = [x.user.get_dict() |
|
1690 | 1744 | for x in _user_group.user_group.members] |
|
1691 | 1745 | perm_rows.append(entry) |
|
1692 | 1746 | |
|
1693 | 1747 | perm_rows = sorted(perm_rows, key=display_user_group_sort) |
|
1694 | 1748 | return perm_rows |
|
1695 | 1749 | |
|
1696 | 1750 | def _get_default_perms(self, user_group, suffix=''): |
|
1697 | 1751 | from rhodecode.model.permission import PermissionModel |
|
1698 | 1752 | return PermissionModel().get_default_perms(user_group.users_group_to_perm, suffix) |
|
1699 | 1753 | |
|
1700 | 1754 | def get_default_perms(self, suffix=''): |
|
1701 | 1755 | return self._get_default_perms(self, suffix) |
|
1702 | 1756 | |
|
1703 | 1757 | def get_api_data(self, with_group_members=True, include_secrets=False): |
|
1704 | 1758 | """ |
|
1705 | 1759 | :param include_secrets: See :meth:`User.get_api_data`, this parameter is |
|
1706 | 1760 | basically forwarded. |
|
1707 | 1761 | |
|
1708 | 1762 | """ |
|
1709 | 1763 | user_group = self |
|
1710 | 1764 | data = { |
|
1711 | 1765 | 'users_group_id': user_group.users_group_id, |
|
1712 | 1766 | 'group_name': user_group.users_group_name, |
|
1713 | 1767 | 'group_description': user_group.user_group_description, |
|
1714 | 1768 | 'active': user_group.users_group_active, |
|
1715 | 1769 | 'owner': user_group.user.username, |
|
1716 | 1770 | 'sync': user_group.sync, |
|
1717 | 1771 | 'owner_email': user_group.user.email, |
|
1718 | 1772 | } |
|
1719 | 1773 | |
|
1720 | 1774 | if with_group_members: |
|
1721 | 1775 | users = [] |
|
1722 | 1776 | for user in user_group.members: |
|
1723 | 1777 | user = user.user |
|
1724 | 1778 | users.append(user.get_api_data(include_secrets=include_secrets)) |
|
1725 | 1779 | data['users'] = users |
|
1726 | 1780 | |
|
1727 | 1781 | return data |
|
1728 | 1782 | |
|
1729 | 1783 | |
|
1730 | 1784 | class UserGroupMember(Base, BaseModel): |
|
1731 | 1785 | __tablename__ = 'users_groups_members' |
|
1732 | 1786 | __table_args__ = ( |
|
1733 | 1787 | base_table_args, |
|
1734 | 1788 | ) |
|
1735 | 1789 | |
|
1736 | 1790 | users_group_member_id = Column("users_group_member_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1737 | 1791 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
1738 | 1792 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
1739 | 1793 | |
|
1740 | 1794 | user = relationship('User', lazy='joined', back_populates='group_member') |
|
1741 | 1795 | users_group = relationship('UserGroup', back_populates='members') |
|
1742 | 1796 | |
|
1743 | 1797 | def __init__(self, gr_id='', u_id=''): |
|
1744 | 1798 | self.users_group_id = gr_id |
|
1745 | 1799 | self.user_id = u_id |
|
1746 | 1800 | |
|
1747 | 1801 | |
|
1748 | 1802 | class RepositoryField(Base, BaseModel): |
|
1749 | 1803 | __tablename__ = 'repositories_fields' |
|
1750 | 1804 | __table_args__ = ( |
|
1751 | 1805 | UniqueConstraint('repository_id', 'field_key'), # no-multi field |
|
1752 | 1806 | base_table_args, |
|
1753 | 1807 | ) |
|
1754 | 1808 | |
|
1755 | 1809 | PREFIX = 'ex_' # prefix used in form to not conflict with already existing fields |
|
1756 | 1810 | |
|
1757 | 1811 | repo_field_id = Column("repo_field_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
1758 | 1812 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
1759 | 1813 | field_key = Column("field_key", String(250)) |
|
1760 | 1814 | field_label = Column("field_label", String(1024), nullable=False) |
|
1761 | 1815 | field_value = Column("field_value", String(10000), nullable=False) |
|
1762 | 1816 | field_desc = Column("field_desc", String(1024), nullable=False) |
|
1763 | 1817 | field_type = Column("field_type", String(255), nullable=False, unique=None) |
|
1764 | 1818 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
1765 | 1819 | |
|
1766 | 1820 | repository = relationship('Repository', back_populates='extra_fields') |
|
1767 | 1821 | |
|
1768 | 1822 | @property |
|
1769 | 1823 | def field_key_prefixed(self): |
|
1770 | 1824 | return 'ex_%s' % self.field_key |
|
1771 | 1825 | |
|
1772 | 1826 | @classmethod |
|
1773 | 1827 | def un_prefix_key(cls, key): |
|
1774 | 1828 | if key.startswith(cls.PREFIX): |
|
1775 | 1829 | return key[len(cls.PREFIX):] |
|
1776 | 1830 | return key |
|
1777 | 1831 | |
|
1778 | 1832 | @classmethod |
|
1779 | 1833 | def get_by_key_name(cls, key, repo): |
|
1780 | 1834 | row = cls.query()\ |
|
1781 | 1835 | .filter(cls.repository == repo)\ |
|
1782 | 1836 | .filter(cls.field_key == key).scalar() |
|
1783 | 1837 | return row |
|
1784 | 1838 | |
|
1785 | 1839 | |
|
1786 | 1840 | class Repository(Base, BaseModel): |
|
1787 | 1841 | __tablename__ = 'repositories' |
|
1788 | 1842 | __table_args__ = ( |
|
1789 | 1843 | Index('r_repo_name_idx', 'repo_name', mysql_length=255), |
|
1790 | 1844 | base_table_args, |
|
1791 | 1845 | ) |
|
1792 | 1846 | DEFAULT_CLONE_URI = '{scheme}://{user}@{netloc}/{repo}' |
|
1793 | 1847 | DEFAULT_CLONE_URI_ID = '{scheme}://{user}@{netloc}/_{repoid}' |
|
1794 | 1848 | DEFAULT_CLONE_URI_SSH = 'ssh://{sys_user}@{hostname}/{repo}' |
|
1795 | 1849 | |
|
1796 | 1850 | STATE_CREATED = 'repo_state_created' |
|
1797 | 1851 | STATE_PENDING = 'repo_state_pending' |
|
1798 | 1852 | STATE_ERROR = 'repo_state_error' |
|
1799 | 1853 | |
|
1800 | 1854 | LOCK_AUTOMATIC = 'lock_auto' |
|
1801 | 1855 | LOCK_API = 'lock_api' |
|
1802 | 1856 | LOCK_WEB = 'lock_web' |
|
1803 | 1857 | LOCK_PULL = 'lock_pull' |
|
1804 | 1858 | |
|
1805 | 1859 | NAME_SEP = URL_SEP |
|
1806 | 1860 | |
|
1807 | 1861 | repo_id = Column( |
|
1808 | 1862 | "repo_id", Integer(), nullable=False, unique=True, default=None, |
|
1809 | 1863 | primary_key=True) |
|
1810 | 1864 | _repo_name = Column( |
|
1811 | 1865 | "repo_name", Text(), nullable=False, default=None) |
|
1812 | 1866 | repo_name_hash = Column( |
|
1813 | 1867 | "repo_name_hash", String(255), nullable=False, unique=True) |
|
1814 | 1868 | repo_state = Column("repo_state", String(255), nullable=True) |
|
1815 | 1869 | |
|
1816 | 1870 | clone_uri = Column( |
|
1817 | 1871 | "clone_uri", EncryptedTextValue(), nullable=True, unique=False, |
|
1818 | 1872 | default=None) |
|
1819 | 1873 | push_uri = Column( |
|
1820 | 1874 | "push_uri", EncryptedTextValue(), nullable=True, unique=False, |
|
1821 | 1875 | default=None) |
|
1822 | 1876 | repo_type = Column( |
|
1823 | 1877 | "repo_type", String(255), nullable=False, unique=False, default=None) |
|
1824 | 1878 | user_id = Column( |
|
1825 | 1879 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, |
|
1826 | 1880 | unique=False, default=None) |
|
1827 | 1881 | private = Column( |
|
1828 | 1882 | "private", Boolean(), nullable=True, unique=None, default=None) |
|
1829 | 1883 | archived = Column( |
|
1830 | 1884 | "archived", Boolean(), nullable=True, unique=None, default=None) |
|
1831 | 1885 | enable_statistics = Column( |
|
1832 | 1886 | "statistics", Boolean(), nullable=True, unique=None, default=True) |
|
1833 | 1887 | enable_downloads = Column( |
|
1834 | 1888 | "downloads", Boolean(), nullable=True, unique=None, default=True) |
|
1835 | 1889 | description = Column( |
|
1836 | 1890 | "description", String(10000), nullable=True, unique=None, default=None) |
|
1837 | 1891 | created_on = Column( |
|
1838 | 1892 | 'created_on', DateTime(timezone=False), nullable=True, unique=None, |
|
1839 | 1893 | default=datetime.datetime.now) |
|
1840 | 1894 | updated_on = Column( |
|
1841 | 1895 | 'updated_on', DateTime(timezone=False), nullable=True, unique=None, |
|
1842 | 1896 | default=datetime.datetime.now) |
|
1843 | 1897 | _landing_revision = Column( |
|
1844 | 1898 | "landing_revision", String(255), nullable=False, unique=False, |
|
1845 | 1899 | default=None) |
|
1846 | 1900 | enable_locking = Column( |
|
1847 | 1901 | "enable_locking", Boolean(), nullable=False, unique=None, |
|
1848 | 1902 | default=False) |
|
1849 | 1903 | _locked = Column( |
|
1850 | 1904 | "locked", String(255), nullable=True, unique=False, default=None) |
|
1851 | 1905 | _changeset_cache = Column( |
|
1852 | 1906 | "changeset_cache", LargeBinary(), nullable=True) # JSON data |
|
1853 | 1907 | |
|
1854 | 1908 | fork_id = Column( |
|
1855 | 1909 | "fork_id", Integer(), ForeignKey('repositories.repo_id'), |
|
1856 | 1910 | nullable=True, unique=False, default=None) |
|
1857 | 1911 | group_id = Column( |
|
1858 | 1912 | "group_id", Integer(), ForeignKey('groups.group_id'), nullable=True, |
|
1859 | 1913 | unique=False, default=None) |
|
1860 | 1914 | |
|
1861 | 1915 | user = relationship('User', lazy='joined', back_populates='repositories') |
|
1862 | 1916 | fork = relationship('Repository', remote_side=repo_id, lazy='joined') |
|
1863 | 1917 | group = relationship('RepoGroup', lazy='joined') |
|
1864 | 1918 | repo_to_perm = relationship('UserRepoToPerm', cascade='all', order_by='UserRepoToPerm.repo_to_perm_id') |
|
1865 | 1919 | users_group_to_perm = relationship('UserGroupRepoToPerm', cascade='all', back_populates='repository') |
|
1866 | 1920 | stats = relationship('Statistics', cascade='all', uselist=False) |
|
1867 | 1921 | |
|
1868 | 1922 | followers = relationship('UserFollowing', primaryjoin='UserFollowing.follows_repo_id==Repository.repo_id', cascade='all', back_populates='follows_repository') |
|
1869 | 1923 | extra_fields = relationship('RepositoryField', cascade="all, delete-orphan", back_populates='repository') |
|
1870 | 1924 | |
|
1871 | 1925 | logs = relationship('UserLog', back_populates='repository') |
|
1872 | 1926 | |
|
1873 | 1927 | comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='repo') |
|
1874 | 1928 | |
|
1875 | 1929 | pull_requests_source = relationship( |
|
1876 | 1930 | 'PullRequest', |
|
1877 | 1931 | primaryjoin='PullRequest.source_repo_id==Repository.repo_id', |
|
1878 | 1932 | cascade="all, delete-orphan", |
|
1879 | 1933 | overlaps="source_repo" |
|
1880 | 1934 | ) |
|
1881 | 1935 | pull_requests_target = relationship( |
|
1882 | 1936 | 'PullRequest', |
|
1883 | 1937 | primaryjoin='PullRequest.target_repo_id==Repository.repo_id', |
|
1884 | 1938 | cascade="all, delete-orphan", |
|
1885 | 1939 | overlaps="target_repo" |
|
1886 | 1940 | ) |
|
1887 | 1941 | |
|
1888 | 1942 | ui = relationship('RepoRhodeCodeUi', cascade="all") |
|
1889 | 1943 | settings = relationship('RepoRhodeCodeSetting', cascade="all") |
|
1890 | 1944 | integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo') |
|
1891 | 1945 | |
|
1892 | 1946 | scoped_tokens = relationship('UserApiKeys', cascade="all", back_populates='repo') |
|
1893 | 1947 | |
|
1894 | 1948 | # no cascade, set NULL |
|
1895 | 1949 | artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_id==Repository.repo_id', viewonly=True) |
|
1896 | 1950 | |
|
1897 | 1951 | review_rules = relationship('RepoReviewRule') |
|
1898 | 1952 | user_branch_perms = relationship('UserToRepoBranchPermission') |
|
1899 | 1953 | user_group_branch_perms = relationship('UserGroupToRepoBranchPermission') |
|
1900 | 1954 | |
|
1901 | 1955 | def __repr__(self): |
|
1902 | 1956 | return "<%s('%s:%s')>" % (self.cls_name, self.repo_id, self.repo_name) |
|
1903 | 1957 | |
|
1904 | 1958 | @hybrid_property |
|
1905 | 1959 | def description_safe(self): |
|
1906 | 1960 | from rhodecode.lib import helpers as h |
|
1907 | 1961 | return h.escape(self.description) |
|
1908 | 1962 | |
|
1909 | 1963 | @hybrid_property |
|
1910 | 1964 | def landing_rev(self): |
|
1911 | 1965 | # always should return [rev_type, rev], e.g ['branch', 'master'] |
|
1912 | 1966 | if self._landing_revision: |
|
1913 | 1967 | _rev_info = self._landing_revision.split(':') |
|
1914 | 1968 | if len(_rev_info) < 2: |
|
1915 | 1969 | _rev_info.insert(0, 'rev') |
|
1916 | 1970 | return [_rev_info[0], _rev_info[1]] |
|
1917 | 1971 | return [None, None] |
|
1918 | 1972 | |
|
1919 | 1973 | @property |
|
1920 | 1974 | def landing_ref_type(self): |
|
1921 | 1975 | return self.landing_rev[0] |
|
1922 | 1976 | |
|
1923 | 1977 | @property |
|
1924 | 1978 | def landing_ref_name(self): |
|
1925 | 1979 | return self.landing_rev[1] |
|
1926 | 1980 | |
|
1927 | 1981 | @landing_rev.setter |
|
1928 | 1982 | def landing_rev(self, val): |
|
1929 | 1983 | if ':' not in val: |
|
1930 | 1984 | raise ValueError('value must be delimited with `:` and consist ' |
|
1931 | 1985 | 'of <rev_type>:<rev>, got %s instead' % val) |
|
1932 | 1986 | self._landing_revision = val |
|
1933 | 1987 | |
|
1934 | 1988 | @hybrid_property |
|
1935 | 1989 | def locked(self): |
|
1936 | 1990 | if self._locked: |
|
1937 | 1991 | user_id, timelocked, reason = self._locked.split(':') |
|
1938 | 1992 | lock_values = int(user_id), timelocked, reason |
|
1939 | 1993 | else: |
|
1940 | 1994 | lock_values = [None, None, None] |
|
1941 | 1995 | return lock_values |
|
1942 | 1996 | |
|
1943 | 1997 | @locked.setter |
|
1944 | 1998 | def locked(self, val): |
|
1945 | 1999 | if val and isinstance(val, (list, tuple)): |
|
1946 | 2000 | self._locked = ':'.join(map(str, val)) |
|
1947 | 2001 | else: |
|
1948 | 2002 | self._locked = None |
|
1949 | 2003 | |
|
1950 | 2004 | @classmethod |
|
1951 | 2005 | def _load_changeset_cache(cls, repo_id, changeset_cache_raw): |
|
1952 | 2006 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
1953 | 2007 | dummy = EmptyCommit().__json__() |
|
1954 | 2008 | if not changeset_cache_raw: |
|
1955 | 2009 | dummy['source_repo_id'] = repo_id |
|
1956 | 2010 | return json.loads(json.dumps(dummy)) |
|
1957 | 2011 | |
|
1958 | 2012 | try: |
|
1959 | 2013 | return json.loads(changeset_cache_raw) |
|
1960 | 2014 | except TypeError: |
|
1961 | 2015 | return dummy |
|
1962 | 2016 | except Exception: |
|
1963 | 2017 | log.error(traceback.format_exc()) |
|
1964 | 2018 | return dummy |
|
1965 | 2019 | |
|
1966 | 2020 | @hybrid_property |
|
1967 | 2021 | def changeset_cache(self): |
|
1968 | 2022 | return self._load_changeset_cache(self.repo_id, self._changeset_cache) |
|
1969 | 2023 | |
|
1970 | 2024 | @changeset_cache.setter |
|
1971 | 2025 | def changeset_cache(self, val): |
|
1972 | 2026 | try: |
|
1973 | 2027 | self._changeset_cache = json.dumps(val) |
|
1974 | 2028 | except Exception: |
|
1975 | 2029 | log.error(traceback.format_exc()) |
|
1976 | 2030 | |
|
1977 | 2031 | @hybrid_property |
|
1978 | 2032 | def repo_name(self): |
|
1979 | 2033 | return self._repo_name |
|
1980 | 2034 | |
|
1981 | 2035 | @repo_name.setter |
|
1982 | 2036 | def repo_name(self, value): |
|
1983 | 2037 | self._repo_name = value |
|
1984 | 2038 | self.repo_name_hash = sha1(safe_bytes(value)) |
|
1985 | 2039 | |
|
1986 | 2040 | @classmethod |
|
1987 | 2041 | def normalize_repo_name(cls, repo_name): |
|
1988 | 2042 | """ |
|
1989 | 2043 | Normalizes os specific repo_name to the format internally stored inside |
|
1990 | 2044 | database using URL_SEP |
|
1991 | 2045 | |
|
1992 | 2046 | :param cls: |
|
1993 | 2047 | :param repo_name: |
|
1994 | 2048 | """ |
|
1995 | 2049 | return cls.NAME_SEP.join(repo_name.split(os.sep)) |
|
1996 | 2050 | |
|
1997 | 2051 | @classmethod |
|
1998 | 2052 | def get_by_repo_name(cls, repo_name, cache=False, identity_cache=False): |
|
1999 | 2053 | session = Session() |
|
2000 | 2054 | q = session.query(cls).filter(cls.repo_name == repo_name) |
|
2001 | 2055 | |
|
2002 | 2056 | if cache: |
|
2003 | 2057 | if identity_cache: |
|
2004 | 2058 | val = cls.identity_cache(session, 'repo_name', repo_name) |
|
2005 | 2059 | if val: |
|
2006 | 2060 | return val |
|
2007 | 2061 | else: |
|
2008 | 2062 | cache_key = f"get_repo_by_name_{_hash_key(repo_name)}" |
|
2009 | 2063 | q = q.options( |
|
2010 | 2064 | FromCache("sql_cache_short", cache_key)) |
|
2011 | 2065 | |
|
2012 | 2066 | return q.scalar() |
|
2013 | 2067 | |
|
2014 | 2068 | @classmethod |
|
2015 | 2069 | def get_by_id_or_repo_name(cls, repoid): |
|
2016 | 2070 | if isinstance(repoid, int): |
|
2017 | 2071 | try: |
|
2018 | 2072 | repo = cls.get(repoid) |
|
2019 | 2073 | except ValueError: |
|
2020 | 2074 | repo = None |
|
2021 | 2075 | else: |
|
2022 | 2076 | repo = cls.get_by_repo_name(repoid) |
|
2023 | 2077 | return repo |
|
2024 | 2078 | |
|
2025 | 2079 | @classmethod |
|
2026 | 2080 | def get_by_full_path(cls, repo_full_path): |
|
2027 | 2081 | repo_name = repo_full_path.split(cls.base_path(), 1)[-1] |
|
2028 | 2082 | repo_name = cls.normalize_repo_name(repo_name) |
|
2029 | 2083 | return cls.get_by_repo_name(repo_name.strip(URL_SEP)) |
|
2030 | 2084 | |
|
2031 | 2085 | @classmethod |
|
2032 | 2086 | def get_repo_forks(cls, repo_id): |
|
2033 | 2087 | return cls.query().filter(Repository.fork_id == repo_id) |
|
2034 | 2088 | |
|
2035 | 2089 | @classmethod |
|
2036 | 2090 | def base_path(cls): |
|
2037 | 2091 | """ |
|
2038 | 2092 | Returns base path when all repos are stored |
|
2039 | 2093 | |
|
2040 | 2094 | :param cls: |
|
2041 | 2095 | """ |
|
2042 | 2096 | from rhodecode.lib.utils import get_rhodecode_repo_store_path |
|
2043 | 2097 | return get_rhodecode_repo_store_path() |
|
2044 | 2098 | |
|
2045 | 2099 | @classmethod |
|
2046 | 2100 | def get_all_repos(cls, user_id=Optional(None), group_id=Optional(None), |
|
2047 | 2101 | case_insensitive=True, archived=False): |
|
2048 | 2102 | q = Repository.query() |
|
2049 | 2103 | |
|
2050 | 2104 | if not archived: |
|
2051 | 2105 | q = q.filter(Repository.archived.isnot(true())) |
|
2052 | 2106 | |
|
2053 | 2107 | if not isinstance(user_id, Optional): |
|
2054 | 2108 | q = q.filter(Repository.user_id == user_id) |
|
2055 | 2109 | |
|
2056 | 2110 | if not isinstance(group_id, Optional): |
|
2057 | 2111 | q = q.filter(Repository.group_id == group_id) |
|
2058 | 2112 | |
|
2059 | 2113 | if case_insensitive: |
|
2060 | 2114 | q = q.order_by(func.lower(Repository.repo_name)) |
|
2061 | 2115 | else: |
|
2062 | 2116 | q = q.order_by(Repository.repo_name) |
|
2063 | 2117 | |
|
2064 | 2118 | return q.all() |
|
2065 | 2119 | |
|
2066 | 2120 | @property |
|
2067 | 2121 | def repo_uid(self): |
|
2068 | 2122 | return '_{}'.format(self.repo_id) |
|
2069 | 2123 | |
|
2070 | 2124 | @property |
|
2071 | 2125 | def forks(self): |
|
2072 | 2126 | """ |
|
2073 | 2127 | Return forks of this repo |
|
2074 | 2128 | """ |
|
2075 | 2129 | return Repository.get_repo_forks(self.repo_id) |
|
2076 | 2130 | |
|
2077 | 2131 | @property |
|
2078 | 2132 | def parent(self): |
|
2079 | 2133 | """ |
|
2080 | 2134 | Returns fork parent |
|
2081 | 2135 | """ |
|
2082 | 2136 | return self.fork |
|
2083 | 2137 | |
|
2084 | 2138 | @property |
|
2085 | 2139 | def just_name(self): |
|
2086 | 2140 | return self.repo_name.split(self.NAME_SEP)[-1] |
|
2087 | 2141 | |
|
2088 | 2142 | @property |
|
2089 | 2143 | def groups_with_parents(self): |
|
2090 | 2144 | groups = [] |
|
2091 | 2145 | if self.group is None: |
|
2092 | 2146 | return groups |
|
2093 | 2147 | |
|
2094 | 2148 | cur_gr = self.group |
|
2095 | 2149 | groups.insert(0, cur_gr) |
|
2096 | 2150 | while 1: |
|
2097 | 2151 | gr = getattr(cur_gr, 'parent_group', None) |
|
2098 | 2152 | cur_gr = cur_gr.parent_group |
|
2099 | 2153 | if gr is None: |
|
2100 | 2154 | break |
|
2101 | 2155 | groups.insert(0, gr) |
|
2102 | 2156 | |
|
2103 | 2157 | return groups |
|
2104 | 2158 | |
|
2105 | 2159 | @property |
|
2106 | 2160 | def groups_and_repo(self): |
|
2107 | 2161 | return self.groups_with_parents, self |
|
2108 | 2162 | |
|
2109 | 2163 | @property |
|
2110 | 2164 | def repo_path(self): |
|
2111 | 2165 | """ |
|
2112 | 2166 | Returns base full path for that repository means where it actually |
|
2113 | 2167 | exists on a filesystem |
|
2114 | 2168 | """ |
|
2115 | 2169 | return self.base_path() |
|
2116 | 2170 | |
|
2117 | 2171 | @property |
|
2118 | 2172 | def repo_full_path(self): |
|
2119 | 2173 | p = [self.repo_path] |
|
2120 | 2174 | # we need to split the name by / since this is how we store the |
|
2121 | 2175 | # names in the database, but that eventually needs to be converted |
|
2122 | 2176 | # into a valid system path |
|
2123 | 2177 | p += self.repo_name.split(self.NAME_SEP) |
|
2124 | 2178 | return os.path.join(*map(safe_str, p)) |
|
2125 | 2179 | |
|
2126 | 2180 | @property |
|
2127 | 2181 | def cache_keys(self): |
|
2128 | 2182 | """ |
|
2129 | 2183 | Returns associated cache keys for that repo |
|
2130 | 2184 | """ |
|
2131 | 2185 | repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id) |
|
2132 | 2186 | return CacheKey.query()\ |
|
2133 | 2187 | .filter(CacheKey.cache_key == repo_namespace_key)\ |
|
2134 | 2188 | .order_by(CacheKey.cache_key)\ |
|
2135 | 2189 | .all() |
|
2136 | 2190 | |
|
2137 | 2191 | @property |
|
2138 | 2192 | def cached_diffs_relative_dir(self): |
|
2139 | 2193 | """ |
|
2140 | 2194 | Return a relative to the repository store path of cached diffs |
|
2141 | 2195 | used for safe display for users, who shouldn't know the absolute store |
|
2142 | 2196 | path |
|
2143 | 2197 | """ |
|
2144 | 2198 | return os.path.join( |
|
2145 | 2199 | os.path.dirname(self.repo_name), |
|
2146 | 2200 | self.cached_diffs_dir.split(os.path.sep)[-1]) |
|
2147 | 2201 | |
|
2148 | 2202 | @property |
|
2149 | 2203 | def cached_diffs_dir(self): |
|
2150 | 2204 | path = self.repo_full_path |
|
2151 | 2205 | return os.path.join( |
|
2152 | 2206 | os.path.dirname(path), |
|
2153 | 2207 | f'.__shadow_diff_cache_repo_{self.repo_id}') |
|
2154 | 2208 | |
|
2155 | 2209 | def cached_diffs(self): |
|
2156 | 2210 | diff_cache_dir = self.cached_diffs_dir |
|
2157 | 2211 | if os.path.isdir(diff_cache_dir): |
|
2158 | 2212 | return os.listdir(diff_cache_dir) |
|
2159 | 2213 | return [] |
|
2160 | 2214 | |
|
2161 | 2215 | def shadow_repos(self): |
|
2162 | 2216 | shadow_repos_pattern = f'.__shadow_repo_{self.repo_id}' |
|
2163 | 2217 | return [ |
|
2164 | 2218 | x for x in os.listdir(os.path.dirname(self.repo_full_path)) |
|
2165 | 2219 | if x.startswith(shadow_repos_pattern) |
|
2166 | 2220 | ] |
|
2167 | 2221 | |
|
2168 | 2222 | def get_new_name(self, repo_name): |
|
2169 | 2223 | """ |
|
2170 | 2224 | returns new full repository name based on assigned group and new new |
|
2171 | 2225 | |
|
2172 | 2226 | :param repo_name: |
|
2173 | 2227 | """ |
|
2174 | 2228 | path_prefix = self.group.full_path_splitted if self.group else [] |
|
2175 | 2229 | return self.NAME_SEP.join(path_prefix + [repo_name]) |
|
2176 | 2230 | |
|
2177 | 2231 | @property |
|
2178 | 2232 | def _config(self): |
|
2179 | 2233 | """ |
|
2180 | 2234 | Returns db based config object. |
|
2181 | 2235 | """ |
|
2182 | 2236 | from rhodecode.lib.utils import make_db_config |
|
2183 | 2237 | return make_db_config(clear_session=False, repo=self) |
|
2184 | 2238 | |
|
2185 | 2239 | def permissions(self, with_admins=True, with_owner=True, |
|
2186 | 2240 | expand_from_user_groups=False): |
|
2187 | 2241 | """ |
|
2188 | 2242 | Permissions for repositories |
|
2189 | 2243 | """ |
|
2190 | 2244 | _admin_perm = 'repository.admin' |
|
2191 | 2245 | |
|
2192 | 2246 | owner_row = [] |
|
2193 | 2247 | if with_owner: |
|
2194 | 2248 | usr = AttributeDict(self.user.get_dict()) |
|
2195 | 2249 | usr.owner_row = True |
|
2196 | 2250 | usr.permission = _admin_perm |
|
2197 | 2251 | usr.permission_id = None |
|
2198 | 2252 | owner_row.append(usr) |
|
2199 | 2253 | |
|
2200 | 2254 | super_admin_ids = [] |
|
2201 | 2255 | super_admin_rows = [] |
|
2202 | 2256 | if with_admins: |
|
2203 | 2257 | for usr in User.get_all_super_admins(): |
|
2204 | 2258 | super_admin_ids.append(usr.user_id) |
|
2205 | 2259 | # if this admin is also owner, don't double the record |
|
2206 | 2260 | if usr.user_id == owner_row[0].user_id: |
|
2207 | 2261 | owner_row[0].admin_row = True |
|
2208 | 2262 | else: |
|
2209 | 2263 | usr = AttributeDict(usr.get_dict()) |
|
2210 | 2264 | usr.admin_row = True |
|
2211 | 2265 | usr.permission = _admin_perm |
|
2212 | 2266 | usr.permission_id = None |
|
2213 | 2267 | super_admin_rows.append(usr) |
|
2214 | 2268 | |
|
2215 | 2269 | q = UserRepoToPerm.query().filter(UserRepoToPerm.repository == self) |
|
2216 | 2270 | q = q.options(joinedload(UserRepoToPerm.repository), |
|
2217 | 2271 | joinedload(UserRepoToPerm.user), |
|
2218 | 2272 | joinedload(UserRepoToPerm.permission),) |
|
2219 | 2273 | |
|
2220 | 2274 | # get owners and admins and permissions. We do a trick of re-writing |
|
2221 | 2275 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
2222 | 2276 | # has a global reference and changing one object propagates to all |
|
2223 | 2277 | # others. This means if admin is also an owner admin_row that change |
|
2224 | 2278 | # would propagate to both objects |
|
2225 | 2279 | perm_rows = [] |
|
2226 | 2280 | for _usr in q.all(): |
|
2227 | 2281 | usr = AttributeDict(_usr.user.get_dict()) |
|
2228 | 2282 | # if this user is also owner/admin, mark as duplicate record |
|
2229 | 2283 | if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: |
|
2230 | 2284 | usr.duplicate_perm = True |
|
2231 | 2285 | # also check if this permission is maybe used by branch_permissions |
|
2232 | 2286 | if _usr.branch_perm_entry: |
|
2233 | 2287 | usr.branch_rules = [x.branch_rule_id for x in _usr.branch_perm_entry] |
|
2234 | 2288 | |
|
2235 | 2289 | usr.permission = _usr.permission.permission_name |
|
2236 | 2290 | usr.permission_id = _usr.repo_to_perm_id |
|
2237 | 2291 | perm_rows.append(usr) |
|
2238 | 2292 | |
|
2239 | 2293 | # filter the perm rows by 'default' first and then sort them by |
|
2240 | 2294 | # admin,write,read,none permissions sorted again alphabetically in |
|
2241 | 2295 | # each group |
|
2242 | 2296 | perm_rows = sorted(perm_rows, key=display_user_sort) |
|
2243 | 2297 | |
|
2244 | 2298 | user_groups_rows = [] |
|
2245 | 2299 | if expand_from_user_groups: |
|
2246 | 2300 | for ug in self.permission_user_groups(with_members=True): |
|
2247 | 2301 | for user_data in ug.members: |
|
2248 | 2302 | user_groups_rows.append(user_data) |
|
2249 | 2303 | |
|
2250 | 2304 | return super_admin_rows + owner_row + perm_rows + user_groups_rows |
|
2251 | 2305 | |
|
2252 | 2306 | def permission_user_groups(self, with_members=True): |
|
2253 | 2307 | q = UserGroupRepoToPerm.query()\ |
|
2254 | 2308 | .filter(UserGroupRepoToPerm.repository == self) |
|
2255 | 2309 | q = q.options(joinedload(UserGroupRepoToPerm.repository), |
|
2256 | 2310 | joinedload(UserGroupRepoToPerm.users_group), |
|
2257 | 2311 | joinedload(UserGroupRepoToPerm.permission),) |
|
2258 | 2312 | |
|
2259 | 2313 | perm_rows = [] |
|
2260 | 2314 | for _user_group in q.all(): |
|
2261 | 2315 | entry = AttributeDict(_user_group.users_group.get_dict()) |
|
2262 | 2316 | entry.permission = _user_group.permission.permission_name |
|
2263 | 2317 | if with_members: |
|
2264 | 2318 | entry.members = [x.user.get_dict() |
|
2265 | 2319 | for x in _user_group.users_group.members] |
|
2266 | 2320 | perm_rows.append(entry) |
|
2267 | 2321 | |
|
2268 | 2322 | perm_rows = sorted(perm_rows, key=display_user_group_sort) |
|
2269 | 2323 | return perm_rows |
|
2270 | 2324 | |
|
2271 | 2325 | def get_api_data(self, include_secrets=False): |
|
2272 | 2326 | """ |
|
2273 | 2327 | Common function for generating repo api data |
|
2274 | 2328 | |
|
2275 | 2329 | :param include_secrets: See :meth:`User.get_api_data`. |
|
2276 | 2330 | |
|
2277 | 2331 | """ |
|
2278 | 2332 | # TODO: mikhail: Here there is an anti-pattern, we probably need to |
|
2279 | 2333 | # move this methods on models level. |
|
2280 | 2334 | from rhodecode.model.settings import SettingsModel |
|
2281 | 2335 | from rhodecode.model.repo import RepoModel |
|
2282 | 2336 | |
|
2283 | 2337 | repo = self |
|
2284 | 2338 | _user_id, _time, _reason = self.locked |
|
2285 | 2339 | |
|
2286 | 2340 | data = { |
|
2287 | 2341 | 'repo_id': repo.repo_id, |
|
2288 | 2342 | 'repo_name': repo.repo_name, |
|
2289 | 2343 | 'repo_type': repo.repo_type, |
|
2290 | 2344 | 'clone_uri': repo.clone_uri or '', |
|
2291 | 2345 | 'push_uri': repo.push_uri or '', |
|
2292 | 2346 | 'url': RepoModel().get_url(self), |
|
2293 | 2347 | 'private': repo.private, |
|
2294 | 2348 | 'created_on': repo.created_on, |
|
2295 | 2349 | 'description': repo.description_safe, |
|
2296 | 2350 | 'landing_rev': repo.landing_rev, |
|
2297 | 2351 | 'owner': repo.user.username, |
|
2298 | 2352 | 'fork_of': repo.fork.repo_name if repo.fork else None, |
|
2299 | 2353 | 'fork_of_id': repo.fork.repo_id if repo.fork else None, |
|
2300 | 2354 | 'enable_statistics': repo.enable_statistics, |
|
2301 | 2355 | 'enable_locking': repo.enable_locking, |
|
2302 | 2356 | 'enable_downloads': repo.enable_downloads, |
|
2303 | 2357 | 'last_changeset': repo.changeset_cache, |
|
2304 | 2358 | 'locked_by': User.get(_user_id).get_api_data( |
|
2305 | 2359 | include_secrets=include_secrets) if _user_id else None, |
|
2306 | 2360 | 'locked_date': time_to_datetime(_time) if _time else None, |
|
2307 | 2361 | 'lock_reason': _reason if _reason else None, |
|
2308 | 2362 | } |
|
2309 | 2363 | |
|
2310 | 2364 | # TODO: mikhail: should be per-repo settings here |
|
2311 | 2365 | rc_config = SettingsModel().get_all_settings() |
|
2312 | 2366 | repository_fields = str2bool( |
|
2313 | 2367 | rc_config.get('rhodecode_repository_fields')) |
|
2314 | 2368 | if repository_fields: |
|
2315 | 2369 | for f in self.extra_fields: |
|
2316 | 2370 | data[f.field_key_prefixed] = f.field_value |
|
2317 | 2371 | |
|
2318 | 2372 | return data |
|
2319 | 2373 | |
|
2320 | 2374 | @classmethod |
|
2321 | 2375 | def lock(cls, repo, user_id, lock_time=None, lock_reason=None): |
|
2322 | 2376 | if not lock_time: |
|
2323 | 2377 | lock_time = time.time() |
|
2324 | 2378 | if not lock_reason: |
|
2325 | 2379 | lock_reason = cls.LOCK_AUTOMATIC |
|
2326 | 2380 | repo.locked = [user_id, lock_time, lock_reason] |
|
2327 | 2381 | Session().add(repo) |
|
2328 | 2382 | Session().commit() |
|
2329 | 2383 | |
|
2330 | 2384 | @classmethod |
|
2331 | 2385 | def unlock(cls, repo): |
|
2332 | 2386 | repo.locked = None |
|
2333 | 2387 | Session().add(repo) |
|
2334 | 2388 | Session().commit() |
|
2335 | 2389 | |
|
2336 | 2390 | @classmethod |
|
2337 | 2391 | def getlock(cls, repo): |
|
2338 | 2392 | return repo.locked |
|
2339 | 2393 | |
|
2340 | 2394 | def get_locking_state(self, action, user_id, only_when_enabled=True): |
|
2341 | 2395 | """ |
|
2342 | 2396 | Checks locking on this repository, if locking is enabled and lock is |
|
2343 | 2397 | present returns a tuple of make_lock, locked, locked_by. |
|
2344 | 2398 | make_lock can have 3 states None (do nothing) True, make lock |
|
2345 | 2399 | False release lock, This value is later propagated to hooks, which |
|
2346 | 2400 | do the locking. Think about this as signals passed to hooks what to do. |
|
2347 | 2401 | |
|
2348 | 2402 | """ |
|
2349 | 2403 | # TODO: johbo: This is part of the business logic and should be moved |
|
2350 | 2404 | # into the RepositoryModel. |
|
2351 | 2405 | |
|
2352 | 2406 | if action not in ('push', 'pull'): |
|
2353 | 2407 | raise ValueError("Invalid action value: %s" % repr(action)) |
|
2354 | 2408 | |
|
2355 | 2409 | # defines if locked error should be thrown to user |
|
2356 | 2410 | currently_locked = False |
|
2357 | 2411 | # defines if new lock should be made, tri-state |
|
2358 | 2412 | make_lock = None |
|
2359 | 2413 | repo = self |
|
2360 | 2414 | user = User.get(user_id) |
|
2361 | 2415 | |
|
2362 | 2416 | lock_info = repo.locked |
|
2363 | 2417 | |
|
2364 | 2418 | if repo and (repo.enable_locking or not only_when_enabled): |
|
2365 | 2419 | if action == 'push': |
|
2366 | 2420 | # check if it's already locked !, if it is compare users |
|
2367 | 2421 | locked_by_user_id = lock_info[0] |
|
2368 | 2422 | if user.user_id == locked_by_user_id: |
|
2369 | 2423 | log.debug( |
|
2370 | 2424 | 'Got `push` action from user %s, now unlocking', user) |
|
2371 | 2425 | # unlock if we have push from user who locked |
|
2372 | 2426 | make_lock = False |
|
2373 | 2427 | else: |
|
2374 | 2428 | # we're not the same user who locked, ban with |
|
2375 | 2429 | # code defined in settings (default is 423 HTTP Locked) ! |
|
2376 | 2430 | log.debug('Repo %s is currently locked by %s', repo, user) |
|
2377 | 2431 | currently_locked = True |
|
2378 | 2432 | elif action == 'pull': |
|
2379 | 2433 | # [0] user [1] date |
|
2380 | 2434 | if lock_info[0] and lock_info[1]: |
|
2381 | 2435 | log.debug('Repo %s is currently locked by %s', repo, user) |
|
2382 | 2436 | currently_locked = True |
|
2383 | 2437 | else: |
|
2384 | 2438 | log.debug('Setting lock on repo %s by %s', repo, user) |
|
2385 | 2439 | make_lock = True |
|
2386 | 2440 | |
|
2387 | 2441 | else: |
|
2388 | 2442 | log.debug('Repository %s do not have locking enabled', repo) |
|
2389 | 2443 | |
|
2390 | 2444 | log.debug('FINAL locking values make_lock:%s,locked:%s,locked_by:%s', |
|
2391 | 2445 | make_lock, currently_locked, lock_info) |
|
2392 | 2446 | |
|
2393 | 2447 | from rhodecode.lib.auth import HasRepoPermissionAny |
|
2394 | 2448 | perm_check = HasRepoPermissionAny('repository.write', 'repository.admin') |
|
2395 | 2449 | if make_lock and not perm_check(repo_name=repo.repo_name, user=user): |
|
2396 | 2450 | # if we don't have at least write permission we cannot make a lock |
|
2397 | 2451 | log.debug('lock state reset back to FALSE due to lack ' |
|
2398 | 2452 | 'of at least read permission') |
|
2399 | 2453 | make_lock = False |
|
2400 | 2454 | |
|
2401 | 2455 | return make_lock, currently_locked, lock_info |
|
2402 | 2456 | |
|
2403 | 2457 | @property |
|
2404 | 2458 | def last_commit_cache_update_diff(self): |
|
2405 | 2459 | return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) |
|
2406 | 2460 | |
|
2407 | 2461 | @classmethod |
|
2408 | 2462 | def _load_commit_change(cls, last_commit_cache): |
|
2409 | 2463 | from rhodecode.lib.vcs.utils.helpers import parse_datetime |
|
2410 | 2464 | empty_date = datetime.datetime.fromtimestamp(0) |
|
2411 | 2465 | date_latest = last_commit_cache.get('date', empty_date) |
|
2412 | 2466 | try: |
|
2413 | 2467 | return parse_datetime(date_latest) |
|
2414 | 2468 | except Exception: |
|
2415 | 2469 | return empty_date |
|
2416 | 2470 | |
|
2417 | 2471 | @property |
|
2418 | 2472 | def last_commit_change(self): |
|
2419 | 2473 | return self._load_commit_change(self.changeset_cache) |
|
2420 | 2474 | |
|
2421 | 2475 | @property |
|
2422 | 2476 | def last_db_change(self): |
|
2423 | 2477 | return self.updated_on |
|
2424 | 2478 | |
|
2425 | 2479 | @property |
|
2426 | 2480 | def clone_uri_hidden(self): |
|
2427 | 2481 | clone_uri = self.clone_uri |
|
2428 | 2482 | if clone_uri: |
|
2429 | 2483 | import urlobject |
|
2430 | 2484 | url_obj = urlobject.URLObject(cleaned_uri(clone_uri)) |
|
2431 | 2485 | if url_obj.password: |
|
2432 | 2486 | clone_uri = url_obj.with_password('*****') |
|
2433 | 2487 | return clone_uri |
|
2434 | 2488 | |
|
2435 | 2489 | @property |
|
2436 | 2490 | def push_uri_hidden(self): |
|
2437 | 2491 | push_uri = self.push_uri |
|
2438 | 2492 | if push_uri: |
|
2439 | 2493 | import urlobject |
|
2440 | 2494 | url_obj = urlobject.URLObject(cleaned_uri(push_uri)) |
|
2441 | 2495 | if url_obj.password: |
|
2442 | 2496 | push_uri = url_obj.with_password('*****') |
|
2443 | 2497 | return push_uri |
|
2444 | 2498 | |
|
2445 | 2499 | def clone_url(self, **override): |
|
2446 | 2500 | from rhodecode.model.settings import SettingsModel |
|
2447 | 2501 | |
|
2448 | 2502 | uri_tmpl = None |
|
2449 | 2503 | if 'with_id' in override: |
|
2450 | 2504 | uri_tmpl = self.DEFAULT_CLONE_URI_ID |
|
2451 | 2505 | del override['with_id'] |
|
2452 | 2506 | |
|
2453 | 2507 | if 'uri_tmpl' in override: |
|
2454 | 2508 | uri_tmpl = override['uri_tmpl'] |
|
2455 | 2509 | del override['uri_tmpl'] |
|
2456 | 2510 | |
|
2457 | 2511 | ssh = False |
|
2458 | 2512 | if 'ssh' in override: |
|
2459 | 2513 | ssh = True |
|
2460 | 2514 | del override['ssh'] |
|
2461 | 2515 | |
|
2462 | 2516 | # we didn't override our tmpl from **overrides |
|
2463 | 2517 | request = get_current_request() |
|
2464 | 2518 | if not uri_tmpl: |
|
2465 | 2519 | if hasattr(request, 'call_context') and hasattr(request.call_context, 'rc_config'): |
|
2466 | 2520 | rc_config = request.call_context.rc_config |
|
2467 | 2521 | else: |
|
2468 | 2522 | rc_config = SettingsModel().get_all_settings(cache=True) |
|
2469 | 2523 | |
|
2470 | 2524 | if ssh: |
|
2471 | 2525 | uri_tmpl = rc_config.get( |
|
2472 | 2526 | 'rhodecode_clone_uri_ssh_tmpl') or self.DEFAULT_CLONE_URI_SSH |
|
2473 | 2527 | |
|
2474 | 2528 | else: |
|
2475 | 2529 | uri_tmpl = rc_config.get( |
|
2476 | 2530 | 'rhodecode_clone_uri_tmpl') or self.DEFAULT_CLONE_URI |
|
2477 | 2531 | |
|
2478 | 2532 | return get_clone_url(request=request, |
|
2479 | 2533 | uri_tmpl=uri_tmpl, |
|
2480 | 2534 | repo_name=self.repo_name, |
|
2481 | 2535 | repo_id=self.repo_id, |
|
2482 | 2536 | repo_type=self.repo_type, |
|
2483 | 2537 | **override) |
|
2484 | 2538 | |
|
2485 | 2539 | def set_state(self, state): |
|
2486 | 2540 | self.repo_state = state |
|
2487 | 2541 | Session().add(self) |
|
2488 | 2542 | #========================================================================== |
|
2489 | 2543 | # SCM PROPERTIES |
|
2490 | 2544 | #========================================================================== |
|
2491 | 2545 | |
|
2492 | 2546 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, maybe_unreachable=False, reference_obj=None): |
|
2493 | 2547 | return get_commit_safe( |
|
2494 | 2548 | self.scm_instance(), commit_id, commit_idx, pre_load=pre_load, |
|
2495 | 2549 | maybe_unreachable=maybe_unreachable, reference_obj=reference_obj) |
|
2496 | 2550 | |
|
2497 | 2551 | def get_changeset(self, rev=None, pre_load=None): |
|
2498 | 2552 | warnings.warn("Use get_commit", DeprecationWarning) |
|
2499 | 2553 | commit_id = None |
|
2500 | 2554 | commit_idx = None |
|
2501 | 2555 | if isinstance(rev, str): |
|
2502 | 2556 | commit_id = rev |
|
2503 | 2557 | else: |
|
2504 | 2558 | commit_idx = rev |
|
2505 | 2559 | return self.get_commit(commit_id=commit_id, commit_idx=commit_idx, |
|
2506 | 2560 | pre_load=pre_load) |
|
2507 | 2561 | |
|
2508 | 2562 | def get_landing_commit(self): |
|
2509 | 2563 | """ |
|
2510 | 2564 | Returns landing commit, or if that doesn't exist returns the tip |
|
2511 | 2565 | """ |
|
2512 | 2566 | _rev_type, _rev = self.landing_rev |
|
2513 | 2567 | commit = self.get_commit(_rev) |
|
2514 | 2568 | if isinstance(commit, EmptyCommit): |
|
2515 | 2569 | return self.get_commit() |
|
2516 | 2570 | return commit |
|
2517 | 2571 | |
|
2518 | 2572 | def flush_commit_cache(self): |
|
2519 | 2573 | self.update_commit_cache(cs_cache={'raw_id':'0'}) |
|
2520 | 2574 | self.update_commit_cache() |
|
2521 | 2575 | |
|
2522 | 2576 | def update_commit_cache(self, cs_cache=None, config=None): |
|
2523 | 2577 | """ |
|
2524 | 2578 | Update cache of last commit for repository |
|
2525 | 2579 | cache_keys should be:: |
|
2526 | 2580 | |
|
2527 | 2581 | source_repo_id |
|
2528 | 2582 | short_id |
|
2529 | 2583 | raw_id |
|
2530 | 2584 | revision |
|
2531 | 2585 | parents |
|
2532 | 2586 | message |
|
2533 | 2587 | date |
|
2534 | 2588 | author |
|
2535 | 2589 | updated_on |
|
2536 | 2590 | |
|
2537 | 2591 | """ |
|
2538 | 2592 | from rhodecode.lib.vcs.backends.base import BaseCommit |
|
2539 | 2593 | from rhodecode.lib.vcs.utils.helpers import parse_datetime |
|
2540 | 2594 | empty_date = datetime.datetime.fromtimestamp(0) |
|
2541 | 2595 | repo_commit_count = 0 |
|
2542 | 2596 | |
|
2543 | 2597 | if cs_cache is None: |
|
2544 | 2598 | # use no-cache version here |
|
2545 | 2599 | try: |
|
2546 | 2600 | scm_repo = self.scm_instance(cache=False, config=config) |
|
2547 | 2601 | except VCSError: |
|
2548 | 2602 | scm_repo = None |
|
2549 | 2603 | empty = scm_repo is None or scm_repo.is_empty() |
|
2550 | 2604 | |
|
2551 | 2605 | if not empty: |
|
2552 | 2606 | cs_cache = scm_repo.get_commit( |
|
2553 | 2607 | pre_load=["author", "date", "message", "parents", "branch"]) |
|
2554 | 2608 | repo_commit_count = scm_repo.count() |
|
2555 | 2609 | else: |
|
2556 | 2610 | cs_cache = EmptyCommit() |
|
2557 | 2611 | |
|
2558 | 2612 | if isinstance(cs_cache, BaseCommit): |
|
2559 | 2613 | cs_cache = cs_cache.__json__() |
|
2560 | 2614 | |
|
2561 | 2615 | def is_outdated(new_cs_cache): |
|
2562 | 2616 | if (new_cs_cache['raw_id'] != self.changeset_cache['raw_id'] or |
|
2563 | 2617 | new_cs_cache['revision'] != self.changeset_cache['revision']): |
|
2564 | 2618 | return True |
|
2565 | 2619 | return False |
|
2566 | 2620 | |
|
2567 | 2621 | # check if we have maybe already latest cached revision |
|
2568 | 2622 | if is_outdated(cs_cache) or not self.changeset_cache: |
|
2569 | 2623 | _current_datetime = datetime.datetime.utcnow() |
|
2570 | 2624 | last_change = cs_cache.get('date') or _current_datetime |
|
2571 | 2625 | # we check if last update is newer than the new value |
|
2572 | 2626 | # if yes, we use the current timestamp instead. Imagine you get |
|
2573 | 2627 | # old commit pushed 1y ago, we'd set last update 1y to ago. |
|
2574 | 2628 | last_change_timestamp = datetime_to_time(last_change) |
|
2575 | 2629 | current_timestamp = datetime_to_time(last_change) |
|
2576 | 2630 | if last_change_timestamp > current_timestamp and not empty: |
|
2577 | 2631 | cs_cache['date'] = _current_datetime |
|
2578 | 2632 | |
|
2579 | 2633 | # also store size of repo |
|
2580 | 2634 | cs_cache['repo_commit_count'] = repo_commit_count |
|
2581 | 2635 | |
|
2582 | 2636 | _date_latest = parse_datetime(cs_cache.get('date') or empty_date) |
|
2583 | 2637 | cs_cache['updated_on'] = time.time() |
|
2584 | 2638 | self.changeset_cache = cs_cache |
|
2585 | 2639 | self.updated_on = last_change |
|
2586 | 2640 | Session().add(self) |
|
2587 | 2641 | Session().commit() |
|
2588 | 2642 | |
|
2589 | 2643 | else: |
|
2590 | 2644 | if empty: |
|
2591 | 2645 | cs_cache = EmptyCommit().__json__() |
|
2592 | 2646 | else: |
|
2593 | 2647 | cs_cache = self.changeset_cache |
|
2594 | 2648 | |
|
2595 | 2649 | _date_latest = parse_datetime(cs_cache.get('date') or empty_date) |
|
2596 | 2650 | |
|
2597 | 2651 | cs_cache['updated_on'] = time.time() |
|
2598 | 2652 | self.changeset_cache = cs_cache |
|
2599 | 2653 | self.updated_on = _date_latest |
|
2600 | 2654 | Session().add(self) |
|
2601 | 2655 | Session().commit() |
|
2602 | 2656 | |
|
2603 | 2657 | log.debug('updated repo `%s` with new commit cache %s, and last update_date: %s', |
|
2604 | 2658 | self.repo_name, cs_cache, _date_latest) |
|
2605 | 2659 | |
|
2606 | 2660 | @property |
|
2607 | 2661 | def tip(self): |
|
2608 | 2662 | return self.get_commit('tip') |
|
2609 | 2663 | |
|
2610 | 2664 | @property |
|
2611 | 2665 | def author(self): |
|
2612 | 2666 | return self.tip.author |
|
2613 | 2667 | |
|
2614 | 2668 | @property |
|
2615 | 2669 | def last_change(self): |
|
2616 | 2670 | return self.scm_instance().last_change |
|
2617 | 2671 | |
|
2618 | 2672 | def get_comments(self, revisions=None): |
|
2619 | 2673 | """ |
|
2620 | 2674 | Returns comments for this repository grouped by revisions |
|
2621 | 2675 | |
|
2622 | 2676 | :param revisions: filter query by revisions only |
|
2623 | 2677 | """ |
|
2624 | 2678 | cmts = ChangesetComment.query()\ |
|
2625 | 2679 | .filter(ChangesetComment.repo == self) |
|
2626 | 2680 | if revisions: |
|
2627 | 2681 | cmts = cmts.filter(ChangesetComment.revision.in_(revisions)) |
|
2628 | 2682 | grouped = collections.defaultdict(list) |
|
2629 | 2683 | for cmt in cmts.all(): |
|
2630 | 2684 | grouped[cmt.revision].append(cmt) |
|
2631 | 2685 | return grouped |
|
2632 | 2686 | |
|
2633 | 2687 | def statuses(self, revisions=None): |
|
2634 | 2688 | """ |
|
2635 | 2689 | Returns statuses for this repository |
|
2636 | 2690 | |
|
2637 | 2691 | :param revisions: list of revisions to get statuses for |
|
2638 | 2692 | """ |
|
2639 | 2693 | statuses = ChangesetStatus.query()\ |
|
2640 | 2694 | .filter(ChangesetStatus.repo == self)\ |
|
2641 | 2695 | .filter(ChangesetStatus.version == 0) |
|
2642 | 2696 | |
|
2643 | 2697 | if revisions: |
|
2644 | 2698 | # Try doing the filtering in chunks to avoid hitting limits |
|
2645 | 2699 | size = 500 |
|
2646 | 2700 | status_results = [] |
|
2647 | 2701 | for chunk in range(0, len(revisions), size): |
|
2648 | 2702 | status_results += statuses.filter( |
|
2649 | 2703 | ChangesetStatus.revision.in_( |
|
2650 | 2704 | revisions[chunk: chunk+size]) |
|
2651 | 2705 | ).all() |
|
2652 | 2706 | else: |
|
2653 | 2707 | status_results = statuses.all() |
|
2654 | 2708 | |
|
2655 | 2709 | grouped = {} |
|
2656 | 2710 | |
|
2657 | 2711 | # maybe we have open new pullrequest without a status? |
|
2658 | 2712 | stat = ChangesetStatus.STATUS_UNDER_REVIEW |
|
2659 | 2713 | status_lbl = ChangesetStatus.get_status_lbl(stat) |
|
2660 | 2714 | for pr in PullRequest.query().filter(PullRequest.source_repo == self).all(): |
|
2661 | 2715 | for rev in pr.revisions: |
|
2662 | 2716 | pr_id = pr.pull_request_id |
|
2663 | 2717 | pr_repo = pr.target_repo.repo_name |
|
2664 | 2718 | grouped[rev] = [stat, status_lbl, pr_id, pr_repo] |
|
2665 | 2719 | |
|
2666 | 2720 | for stat in status_results: |
|
2667 | 2721 | pr_id = pr_repo = None |
|
2668 | 2722 | if stat.pull_request: |
|
2669 | 2723 | pr_id = stat.pull_request.pull_request_id |
|
2670 | 2724 | pr_repo = stat.pull_request.target_repo.repo_name |
|
2671 | 2725 | grouped[stat.revision] = [str(stat.status), stat.status_lbl, |
|
2672 | 2726 | pr_id, pr_repo] |
|
2673 | 2727 | return grouped |
|
2674 | 2728 | |
|
2675 | 2729 | # ========================================================================== |
|
2676 | 2730 | # SCM CACHE INSTANCE |
|
2677 | 2731 | # ========================================================================== |
|
2678 | 2732 | |
|
2679 | 2733 | def scm_instance(self, **kwargs): |
|
2680 | 2734 | import rhodecode |
|
2681 | 2735 | |
|
2682 | 2736 | # Passing a config will not hit the cache currently only used |
|
2683 | 2737 | # for repo2dbmapper |
|
2684 | 2738 | config = kwargs.pop('config', None) |
|
2685 | 2739 | cache = kwargs.pop('cache', None) |
|
2686 | 2740 | vcs_full_cache = kwargs.pop('vcs_full_cache', None) |
|
2687 | 2741 | if vcs_full_cache is not None: |
|
2688 | 2742 | # allows override global config |
|
2689 | 2743 | full_cache = vcs_full_cache |
|
2690 | 2744 | else: |
|
2691 | 2745 | full_cache = rhodecode.ConfigGet().get_bool('vcs_full_cache') |
|
2692 | 2746 | # if cache is NOT defined use default global, else we have a full |
|
2693 | 2747 | # control over cache behaviour |
|
2694 | 2748 | if cache is None and full_cache and not config: |
|
2695 | 2749 | log.debug('Initializing pure cached instance for %s', self.repo_path) |
|
2696 | 2750 | return self._get_instance_cached() |
|
2697 | 2751 | |
|
2698 | 2752 | # cache here is sent to the "vcs server" |
|
2699 | 2753 | return self._get_instance(cache=bool(cache), config=config) |
|
2700 | 2754 | |
|
2701 | 2755 | def _get_instance_cached(self): |
|
2702 | 2756 | from rhodecode.lib import rc_cache |
|
2703 | 2757 | |
|
2704 | 2758 | cache_namespace_uid = f'repo_instance.{self.repo_id}' |
|
2705 | 2759 | region = rc_cache.get_or_create_region('cache_repo_longterm', cache_namespace_uid) |
|
2706 | 2760 | |
|
2707 | 2761 | # we must use thread scoped cache here, |
|
2708 | 2762 | # because each thread of gevent needs it's own not shared connection and cache |
|
2709 | 2763 | # we also alter `args` so the cache key is individual for every green thread. |
|
2710 | 2764 | repo_namespace_key = CacheKey.REPO_INVALIDATION_NAMESPACE.format(repo_id=self.repo_id) |
|
2711 | 2765 | inv_context_manager = rc_cache.InvalidationContext(key=repo_namespace_key, thread_scoped=True) |
|
2712 | 2766 | |
|
2713 | 2767 | # our wrapped caching function that takes state_uid to save the previous state in |
|
2714 | 2768 | def cache_generator(_state_uid): |
|
2715 | 2769 | |
|
2716 | 2770 | @region.conditional_cache_on_arguments(namespace=cache_namespace_uid) |
|
2717 | 2771 | def get_instance_cached(_repo_id, _process_context_id): |
|
2718 | 2772 | # we save in cached func the generation state so we can detect a change and invalidate caches |
|
2719 | 2773 | return _state_uid, self._get_instance(repo_state_uid=_state_uid) |
|
2720 | 2774 | |
|
2721 | 2775 | return get_instance_cached |
|
2722 | 2776 | |
|
2723 | 2777 | with inv_context_manager as invalidation_context: |
|
2724 | 2778 | cache_state_uid = invalidation_context.state_uid |
|
2725 | 2779 | cache_func = cache_generator(cache_state_uid) |
|
2726 | 2780 | |
|
2727 | 2781 | args = self.repo_id, inv_context_manager.proc_key |
|
2728 | 2782 | |
|
2729 | 2783 | previous_state_uid, instance = cache_func(*args) |
|
2730 | 2784 | |
|
2731 | 2785 | # now compare keys, the "cache" state vs expected state. |
|
2732 | 2786 | if previous_state_uid != cache_state_uid: |
|
2733 | 2787 | log.warning('Cached state uid %s is different than current state uid %s', |
|
2734 | 2788 | previous_state_uid, cache_state_uid) |
|
2735 | 2789 | _, instance = cache_func.refresh(*args) |
|
2736 | 2790 | |
|
2737 | 2791 | log.debug('Repo instance fetched in %.4fs', inv_context_manager.compute_time) |
|
2738 | 2792 | return instance |
|
2739 | 2793 | |
|
2740 | 2794 | def _get_instance(self, cache=True, config=None, repo_state_uid=None): |
|
2741 | 2795 | log.debug('Initializing %s instance `%s` with cache flag set to: %s', |
|
2742 | 2796 | self.repo_type, self.repo_path, cache) |
|
2743 | 2797 | config = config or self._config |
|
2744 | 2798 | custom_wire = { |
|
2745 | 2799 | 'cache': cache, # controls the vcs.remote cache |
|
2746 | 2800 | 'repo_state_uid': repo_state_uid |
|
2747 | 2801 | } |
|
2748 | 2802 | |
|
2749 | 2803 | repo = get_vcs_instance( |
|
2750 | 2804 | repo_path=safe_str(self.repo_full_path), |
|
2751 | 2805 | config=config, |
|
2752 | 2806 | with_wire=custom_wire, |
|
2753 | 2807 | create=False, |
|
2754 | 2808 | _vcs_alias=self.repo_type) |
|
2755 | 2809 | if repo is not None: |
|
2756 | 2810 | repo.count() # cache rebuild |
|
2757 | 2811 | |
|
2758 | 2812 | return repo |
|
2759 | 2813 | |
|
2760 | 2814 | def get_shadow_repository_path(self, workspace_id): |
|
2761 | 2815 | from rhodecode.lib.vcs.backends.base import BaseRepository |
|
2762 | 2816 | shadow_repo_path = BaseRepository._get_shadow_repository_path( |
|
2763 | 2817 | self.repo_full_path, self.repo_id, workspace_id) |
|
2764 | 2818 | return shadow_repo_path |
|
2765 | 2819 | |
|
2766 | 2820 | def __json__(self): |
|
2767 | 2821 | return {'landing_rev': self.landing_rev} |
|
2768 | 2822 | |
|
2769 | 2823 | def get_dict(self): |
|
2770 | 2824 | |
|
2771 | 2825 | # Since we transformed `repo_name` to a hybrid property, we need to |
|
2772 | 2826 | # keep compatibility with the code which uses `repo_name` field. |
|
2773 | 2827 | |
|
2774 | 2828 | result = super(Repository, self).get_dict() |
|
2775 | 2829 | result['repo_name'] = result.pop('_repo_name', None) |
|
2776 | 2830 | result.pop('_changeset_cache', '') |
|
2777 | 2831 | return result |
|
2778 | 2832 | |
|
2779 | 2833 | |
|
2780 | 2834 | class RepoGroup(Base, BaseModel): |
|
2781 | 2835 | __tablename__ = 'groups' |
|
2782 | 2836 | __table_args__ = ( |
|
2783 | 2837 | UniqueConstraint('group_name', 'group_parent_id'), |
|
2784 | 2838 | base_table_args, |
|
2785 | 2839 | ) |
|
2786 | 2840 | |
|
2787 | 2841 | CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups |
|
2788 | 2842 | |
|
2789 | 2843 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2790 | 2844 | _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) |
|
2791 | 2845 | group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False) |
|
2792 | 2846 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
2793 | 2847 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) |
|
2794 | 2848 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
2795 | 2849 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=False, default=None) |
|
2796 | 2850 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
2797 | 2851 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
2798 | 2852 | personal = Column('personal', Boolean(), nullable=True, unique=None, default=None) |
|
2799 | 2853 | _changeset_cache = Column("changeset_cache", LargeBinary(), nullable=True) # JSON data |
|
2800 | 2854 | |
|
2801 | 2855 | repo_group_to_perm = relationship('UserRepoGroupToPerm', cascade='all', order_by='UserRepoGroupToPerm.group_to_perm_id', back_populates='group') |
|
2802 | 2856 | users_group_to_perm = relationship('UserGroupRepoGroupToPerm', cascade='all', back_populates='group') |
|
2803 | 2857 | parent_group = relationship('RepoGroup', remote_side=group_id) |
|
2804 | 2858 | user = relationship('User', back_populates='repository_groups') |
|
2805 | 2859 | integrations = relationship('Integration', cascade="all, delete-orphan", back_populates='repo_group') |
|
2806 | 2860 | |
|
2807 | 2861 | # no cascade, set NULL |
|
2808 | 2862 | scope_artifacts = relationship('FileStore', primaryjoin='FileStore.scope_repo_group_id==RepoGroup.group_id', viewonly=True) |
|
2809 | 2863 | |
|
2810 | 2864 | def __init__(self, group_name='', parent_group=None): |
|
2811 | 2865 | self.group_name = group_name |
|
2812 | 2866 | self.parent_group = parent_group |
|
2813 | 2867 | |
|
2814 | 2868 | def __repr__(self): |
|
2815 | 2869 | return f"<{self.cls_name}('id:{self.group_id}:{self.group_name}')>" |
|
2816 | 2870 | |
|
2817 | 2871 | @hybrid_property |
|
2818 | 2872 | def group_name(self): |
|
2819 | 2873 | return self._group_name |
|
2820 | 2874 | |
|
2821 | 2875 | @group_name.setter |
|
2822 | 2876 | def group_name(self, value): |
|
2823 | 2877 | self._group_name = value |
|
2824 | 2878 | self.group_name_hash = self.hash_repo_group_name(value) |
|
2825 | 2879 | |
|
2826 | 2880 | @classmethod |
|
2827 | 2881 | def _load_changeset_cache(cls, repo_id, changeset_cache_raw): |
|
2828 | 2882 | from rhodecode.lib.vcs.backends.base import EmptyCommit |
|
2829 | 2883 | dummy = EmptyCommit().__json__() |
|
2830 | 2884 | if not changeset_cache_raw: |
|
2831 | 2885 | dummy['source_repo_id'] = repo_id |
|
2832 | 2886 | return json.loads(json.dumps(dummy)) |
|
2833 | 2887 | |
|
2834 | 2888 | try: |
|
2835 | 2889 | return json.loads(changeset_cache_raw) |
|
2836 | 2890 | except TypeError: |
|
2837 | 2891 | return dummy |
|
2838 | 2892 | except Exception: |
|
2839 | 2893 | log.error(traceback.format_exc()) |
|
2840 | 2894 | return dummy |
|
2841 | 2895 | |
|
2842 | 2896 | @hybrid_property |
|
2843 | 2897 | def changeset_cache(self): |
|
2844 | 2898 | return self._load_changeset_cache('', self._changeset_cache) |
|
2845 | 2899 | |
|
2846 | 2900 | @changeset_cache.setter |
|
2847 | 2901 | def changeset_cache(self, val): |
|
2848 | 2902 | try: |
|
2849 | 2903 | self._changeset_cache = json.dumps(val) |
|
2850 | 2904 | except Exception: |
|
2851 | 2905 | log.error(traceback.format_exc()) |
|
2852 | 2906 | |
|
2853 | 2907 | @validates('group_parent_id') |
|
2854 | 2908 | def validate_group_parent_id(self, key, val): |
|
2855 | 2909 | """ |
|
2856 | 2910 | Check cycle references for a parent group to self |
|
2857 | 2911 | """ |
|
2858 | 2912 | if self.group_id and val: |
|
2859 | 2913 | assert val != self.group_id |
|
2860 | 2914 | |
|
2861 | 2915 | return val |
|
2862 | 2916 | |
|
2863 | 2917 | @hybrid_property |
|
2864 | 2918 | def description_safe(self): |
|
2865 | 2919 | from rhodecode.lib import helpers as h |
|
2866 | 2920 | return h.escape(self.group_description) |
|
2867 | 2921 | |
|
2868 | 2922 | @classmethod |
|
2869 | 2923 | def hash_repo_group_name(cls, repo_group_name): |
|
2870 | 2924 | val = remove_formatting(repo_group_name) |
|
2871 | 2925 | val = safe_str(val).lower() |
|
2872 | 2926 | chars = [] |
|
2873 | 2927 | for c in val: |
|
2874 | 2928 | if c not in string.ascii_letters: |
|
2875 | 2929 | c = str(ord(c)) |
|
2876 | 2930 | chars.append(c) |
|
2877 | 2931 | |
|
2878 | 2932 | return ''.join(chars) |
|
2879 | 2933 | |
|
2880 | 2934 | @classmethod |
|
2881 | 2935 | def _generate_choice(cls, repo_group): |
|
2882 | 2936 | from webhelpers2.html import literal as _literal |
|
2883 | 2937 | |
|
2884 | 2938 | def _name(k): |
|
2885 | 2939 | return _literal(cls.CHOICES_SEPARATOR.join(k)) |
|
2886 | 2940 | |
|
2887 | 2941 | return repo_group.group_id, _name(repo_group.full_path_splitted) |
|
2888 | 2942 | |
|
2889 | 2943 | @classmethod |
|
2890 | 2944 | def groups_choices(cls, groups=None, show_empty_group=True): |
|
2891 | 2945 | if not groups: |
|
2892 | 2946 | groups = cls.query().all() |
|
2893 | 2947 | |
|
2894 | 2948 | repo_groups = [] |
|
2895 | 2949 | if show_empty_group: |
|
2896 | 2950 | repo_groups = [(-1, '-- %s --' % _('No parent'))] |
|
2897 | 2951 | |
|
2898 | 2952 | repo_groups.extend([cls._generate_choice(x) for x in groups]) |
|
2899 | 2953 | |
|
2900 | 2954 | repo_groups = sorted( |
|
2901 | 2955 | repo_groups, key=lambda t: t[1].split(cls.CHOICES_SEPARATOR)[0]) |
|
2902 | 2956 | return repo_groups |
|
2903 | 2957 | |
|
2904 | 2958 | @classmethod |
|
2905 | 2959 | def url_sep(cls): |
|
2906 | 2960 | return URL_SEP |
|
2907 | 2961 | |
|
2908 | 2962 | @classmethod |
|
2909 | 2963 | def get_by_group_name(cls, group_name, cache=False, case_insensitive=False): |
|
2910 | 2964 | if case_insensitive: |
|
2911 | 2965 | gr = cls.query().filter(func.lower(cls.group_name) |
|
2912 | 2966 | == func.lower(group_name)) |
|
2913 | 2967 | else: |
|
2914 | 2968 | gr = cls.query().filter(cls.group_name == group_name) |
|
2915 | 2969 | if cache: |
|
2916 | 2970 | name_key = _hash_key(group_name) |
|
2917 | 2971 | gr = gr.options( |
|
2918 | 2972 | FromCache("sql_cache_short", f"get_group_{name_key}")) |
|
2919 | 2973 | return gr.scalar() |
|
2920 | 2974 | |
|
2921 | 2975 | @classmethod |
|
2922 | 2976 | def get_user_personal_repo_group(cls, user_id): |
|
2923 | 2977 | user = User.get(user_id) |
|
2924 | 2978 | if user.username == User.DEFAULT_USER: |
|
2925 | 2979 | return None |
|
2926 | 2980 | |
|
2927 | 2981 | return cls.query()\ |
|
2928 | 2982 | .filter(cls.personal == true()) \ |
|
2929 | 2983 | .filter(cls.user == user) \ |
|
2930 | 2984 | .order_by(cls.group_id.asc()) \ |
|
2931 | 2985 | .first() |
|
2932 | 2986 | |
|
2933 | 2987 | @classmethod |
|
2934 | 2988 | def get_all_repo_groups(cls, user_id=Optional(None), group_id=Optional(None), |
|
2935 | 2989 | case_insensitive=True): |
|
2936 | 2990 | q = RepoGroup.query() |
|
2937 | 2991 | |
|
2938 | 2992 | if not isinstance(user_id, Optional): |
|
2939 | 2993 | q = q.filter(RepoGroup.user_id == user_id) |
|
2940 | 2994 | |
|
2941 | 2995 | if not isinstance(group_id, Optional): |
|
2942 | 2996 | q = q.filter(RepoGroup.group_parent_id == group_id) |
|
2943 | 2997 | |
|
2944 | 2998 | if case_insensitive: |
|
2945 | 2999 | q = q.order_by(func.lower(RepoGroup.group_name)) |
|
2946 | 3000 | else: |
|
2947 | 3001 | q = q.order_by(RepoGroup.group_name) |
|
2948 | 3002 | return q.all() |
|
2949 | 3003 | |
|
2950 | 3004 | @property |
|
2951 | 3005 | def parents(self, parents_recursion_limit=10): |
|
2952 | 3006 | groups = [] |
|
2953 | 3007 | if self.parent_group is None: |
|
2954 | 3008 | return groups |
|
2955 | 3009 | cur_gr = self.parent_group |
|
2956 | 3010 | groups.insert(0, cur_gr) |
|
2957 | 3011 | cnt = 0 |
|
2958 | 3012 | while 1: |
|
2959 | 3013 | cnt += 1 |
|
2960 | 3014 | gr = getattr(cur_gr, 'parent_group', None) |
|
2961 | 3015 | cur_gr = cur_gr.parent_group |
|
2962 | 3016 | if gr is None: |
|
2963 | 3017 | break |
|
2964 | 3018 | if cnt == parents_recursion_limit: |
|
2965 | 3019 | # this will prevent accidental infinit loops |
|
2966 | 3020 | log.error('more than %s parents found for group %s, stopping ' |
|
2967 | 3021 | 'recursive parent fetching', parents_recursion_limit, self) |
|
2968 | 3022 | break |
|
2969 | 3023 | |
|
2970 | 3024 | groups.insert(0, gr) |
|
2971 | 3025 | return groups |
|
2972 | 3026 | |
|
2973 | 3027 | @property |
|
2974 | 3028 | def last_commit_cache_update_diff(self): |
|
2975 | 3029 | return time.time() - (safe_int(self.changeset_cache.get('updated_on')) or 0) |
|
2976 | 3030 | |
|
2977 | 3031 | @classmethod |
|
2978 | 3032 | def _load_commit_change(cls, last_commit_cache): |
|
2979 | 3033 | from rhodecode.lib.vcs.utils.helpers import parse_datetime |
|
2980 | 3034 | empty_date = datetime.datetime.fromtimestamp(0) |
|
2981 | 3035 | date_latest = last_commit_cache.get('date', empty_date) |
|
2982 | 3036 | try: |
|
2983 | 3037 | return parse_datetime(date_latest) |
|
2984 | 3038 | except Exception: |
|
2985 | 3039 | return empty_date |
|
2986 | 3040 | |
|
2987 | 3041 | @property |
|
2988 | 3042 | def last_commit_change(self): |
|
2989 | 3043 | return self._load_commit_change(self.changeset_cache) |
|
2990 | 3044 | |
|
2991 | 3045 | @property |
|
2992 | 3046 | def last_db_change(self): |
|
2993 | 3047 | return self.updated_on |
|
2994 | 3048 | |
|
2995 | 3049 | @property |
|
2996 | 3050 | def children(self): |
|
2997 | 3051 | return RepoGroup.query().filter(RepoGroup.parent_group == self) |
|
2998 | 3052 | |
|
2999 | 3053 | @property |
|
3000 | 3054 | def name(self): |
|
3001 | 3055 | return self.group_name.split(RepoGroup.url_sep())[-1] |
|
3002 | 3056 | |
|
3003 | 3057 | @property |
|
3004 | 3058 | def full_path(self): |
|
3005 | 3059 | return self.group_name |
|
3006 | 3060 | |
|
3007 | 3061 | @property |
|
3008 | 3062 | def full_path_splitted(self): |
|
3009 | 3063 | return self.group_name.split(RepoGroup.url_sep()) |
|
3010 | 3064 | |
|
3011 | 3065 | @property |
|
3012 | 3066 | def repositories(self): |
|
3013 | 3067 | return Repository.query()\ |
|
3014 | 3068 | .filter(Repository.group == self)\ |
|
3015 | 3069 | .order_by(Repository.repo_name) |
|
3016 | 3070 | |
|
3017 | 3071 | @property |
|
3018 | 3072 | def repositories_recursive_count(self): |
|
3019 | 3073 | cnt = self.repositories.count() |
|
3020 | 3074 | |
|
3021 | 3075 | def children_count(group): |
|
3022 | 3076 | cnt = 0 |
|
3023 | 3077 | for child in group.children: |
|
3024 | 3078 | cnt += child.repositories.count() |
|
3025 | 3079 | cnt += children_count(child) |
|
3026 | 3080 | return cnt |
|
3027 | 3081 | |
|
3028 | 3082 | return cnt + children_count(self) |
|
3029 | 3083 | |
|
3030 | 3084 | def _recursive_objects(self, include_repos=True, include_groups=True): |
|
3031 | 3085 | all_ = [] |
|
3032 | 3086 | |
|
3033 | 3087 | def _get_members(root_gr): |
|
3034 | 3088 | if include_repos: |
|
3035 | 3089 | for r in root_gr.repositories: |
|
3036 | 3090 | all_.append(r) |
|
3037 | 3091 | childs = root_gr.children.all() |
|
3038 | 3092 | if childs: |
|
3039 | 3093 | for gr in childs: |
|
3040 | 3094 | if include_groups: |
|
3041 | 3095 | all_.append(gr) |
|
3042 | 3096 | _get_members(gr) |
|
3043 | 3097 | |
|
3044 | 3098 | root_group = [] |
|
3045 | 3099 | if include_groups: |
|
3046 | 3100 | root_group = [self] |
|
3047 | 3101 | |
|
3048 | 3102 | _get_members(self) |
|
3049 | 3103 | return root_group + all_ |
|
3050 | 3104 | |
|
3051 | 3105 | def recursive_groups_and_repos(self): |
|
3052 | 3106 | """ |
|
3053 | 3107 | Recursive return all groups, with repositories in those groups |
|
3054 | 3108 | """ |
|
3055 | 3109 | return self._recursive_objects() |
|
3056 | 3110 | |
|
3057 | 3111 | def recursive_groups(self): |
|
3058 | 3112 | """ |
|
3059 | 3113 | Returns all children groups for this group including children of children |
|
3060 | 3114 | """ |
|
3061 | 3115 | return self._recursive_objects(include_repos=False) |
|
3062 | 3116 | |
|
3063 | 3117 | def recursive_repos(self): |
|
3064 | 3118 | """ |
|
3065 | 3119 | Returns all children repositories for this group |
|
3066 | 3120 | """ |
|
3067 | 3121 | return self._recursive_objects(include_groups=False) |
|
3068 | 3122 | |
|
3069 | 3123 | def get_new_name(self, group_name): |
|
3070 | 3124 | """ |
|
3071 | 3125 | returns new full group name based on parent and new name |
|
3072 | 3126 | |
|
3073 | 3127 | :param group_name: |
|
3074 | 3128 | """ |
|
3075 | 3129 | path_prefix = (self.parent_group.full_path_splitted if |
|
3076 | 3130 | self.parent_group else []) |
|
3077 | 3131 | return RepoGroup.url_sep().join(path_prefix + [group_name]) |
|
3078 | 3132 | |
|
3079 | 3133 | def update_commit_cache(self, config=None): |
|
3080 | 3134 | """ |
|
3081 | 3135 | Update cache of last commit for newest repository inside this repository group. |
|
3082 | 3136 | cache_keys should be:: |
|
3083 | 3137 | |
|
3084 | 3138 | source_repo_id |
|
3085 | 3139 | short_id |
|
3086 | 3140 | raw_id |
|
3087 | 3141 | revision |
|
3088 | 3142 | parents |
|
3089 | 3143 | message |
|
3090 | 3144 | date |
|
3091 | 3145 | author |
|
3092 | 3146 | |
|
3093 | 3147 | """ |
|
3094 | 3148 | from rhodecode.lib.vcs.utils.helpers import parse_datetime |
|
3095 | 3149 | empty_date = datetime.datetime.fromtimestamp(0) |
|
3096 | 3150 | |
|
3097 | 3151 | def repo_groups_and_repos(root_gr): |
|
3098 | 3152 | for _repo in root_gr.repositories: |
|
3099 | 3153 | yield _repo |
|
3100 | 3154 | for child_group in root_gr.children.all(): |
|
3101 | 3155 | yield child_group |
|
3102 | 3156 | |
|
3103 | 3157 | latest_repo_cs_cache = {} |
|
3104 | 3158 | for obj in repo_groups_and_repos(self): |
|
3105 | 3159 | repo_cs_cache = obj.changeset_cache |
|
3106 | 3160 | date_latest = latest_repo_cs_cache.get('date', empty_date) |
|
3107 | 3161 | date_current = repo_cs_cache.get('date', empty_date) |
|
3108 | 3162 | current_timestamp = datetime_to_time(parse_datetime(date_latest)) |
|
3109 | 3163 | if current_timestamp < datetime_to_time(parse_datetime(date_current)): |
|
3110 | 3164 | latest_repo_cs_cache = repo_cs_cache |
|
3111 | 3165 | if hasattr(obj, 'repo_id'): |
|
3112 | 3166 | latest_repo_cs_cache['source_repo_id'] = obj.repo_id |
|
3113 | 3167 | else: |
|
3114 | 3168 | latest_repo_cs_cache['source_repo_id'] = repo_cs_cache.get('source_repo_id') |
|
3115 | 3169 | |
|
3116 | 3170 | _date_latest = parse_datetime(latest_repo_cs_cache.get('date') or empty_date) |
|
3117 | 3171 | |
|
3118 | 3172 | latest_repo_cs_cache['updated_on'] = time.time() |
|
3119 | 3173 | self.changeset_cache = latest_repo_cs_cache |
|
3120 | 3174 | self.updated_on = _date_latest |
|
3121 | 3175 | Session().add(self) |
|
3122 | 3176 | Session().commit() |
|
3123 | 3177 | |
|
3124 | 3178 | log.debug('updated repo group `%s` with new commit cache %s, and last update_date: %s', |
|
3125 | 3179 | self.group_name, latest_repo_cs_cache, _date_latest) |
|
3126 | 3180 | |
|
3127 | 3181 | def permissions(self, with_admins=True, with_owner=True, |
|
3128 | 3182 | expand_from_user_groups=False): |
|
3129 | 3183 | """ |
|
3130 | 3184 | Permissions for repository groups |
|
3131 | 3185 | """ |
|
3132 | 3186 | _admin_perm = 'group.admin' |
|
3133 | 3187 | |
|
3134 | 3188 | owner_row = [] |
|
3135 | 3189 | if with_owner: |
|
3136 | 3190 | usr = AttributeDict(self.user.get_dict()) |
|
3137 | 3191 | usr.owner_row = True |
|
3138 | 3192 | usr.permission = _admin_perm |
|
3139 | 3193 | owner_row.append(usr) |
|
3140 | 3194 | |
|
3141 | 3195 | super_admin_ids = [] |
|
3142 | 3196 | super_admin_rows = [] |
|
3143 | 3197 | if with_admins: |
|
3144 | 3198 | for usr in User.get_all_super_admins(): |
|
3145 | 3199 | super_admin_ids.append(usr.user_id) |
|
3146 | 3200 | # if this admin is also owner, don't double the record |
|
3147 | 3201 | if usr.user_id == owner_row[0].user_id: |
|
3148 | 3202 | owner_row[0].admin_row = True |
|
3149 | 3203 | else: |
|
3150 | 3204 | usr = AttributeDict(usr.get_dict()) |
|
3151 | 3205 | usr.admin_row = True |
|
3152 | 3206 | usr.permission = _admin_perm |
|
3153 | 3207 | super_admin_rows.append(usr) |
|
3154 | 3208 | |
|
3155 | 3209 | q = UserRepoGroupToPerm.query().filter(UserRepoGroupToPerm.group == self) |
|
3156 | 3210 | q = q.options(joinedload(UserRepoGroupToPerm.group), |
|
3157 | 3211 | joinedload(UserRepoGroupToPerm.user), |
|
3158 | 3212 | joinedload(UserRepoGroupToPerm.permission),) |
|
3159 | 3213 | |
|
3160 | 3214 | # get owners and admins and permissions. We do a trick of re-writing |
|
3161 | 3215 | # objects from sqlalchemy to named-tuples due to sqlalchemy session |
|
3162 | 3216 | # has a global reference and changing one object propagates to all |
|
3163 | 3217 | # others. This means if admin is also an owner admin_row that change |
|
3164 | 3218 | # would propagate to both objects |
|
3165 | 3219 | perm_rows = [] |
|
3166 | 3220 | for _usr in q.all(): |
|
3167 | 3221 | usr = AttributeDict(_usr.user.get_dict()) |
|
3168 | 3222 | # if this user is also owner/admin, mark as duplicate record |
|
3169 | 3223 | if usr.user_id == owner_row[0].user_id or usr.user_id in super_admin_ids: |
|
3170 | 3224 | usr.duplicate_perm = True |
|
3171 | 3225 | usr.permission = _usr.permission.permission_name |
|
3172 | 3226 | perm_rows.append(usr) |
|
3173 | 3227 | |
|
3174 | 3228 | # filter the perm rows by 'default' first and then sort them by |
|
3175 | 3229 | # admin,write,read,none permissions sorted again alphabetically in |
|
3176 | 3230 | # each group |
|
3177 | 3231 | perm_rows = sorted(perm_rows, key=display_user_sort) |
|
3178 | 3232 | |
|
3179 | 3233 | user_groups_rows = [] |
|
3180 | 3234 | if expand_from_user_groups: |
|
3181 | 3235 | for ug in self.permission_user_groups(with_members=True): |
|
3182 | 3236 | for user_data in ug.members: |
|
3183 | 3237 | user_groups_rows.append(user_data) |
|
3184 | 3238 | |
|
3185 | 3239 | return super_admin_rows + owner_row + perm_rows + user_groups_rows |
|
3186 | 3240 | |
|
3187 | 3241 | def permission_user_groups(self, with_members=False): |
|
3188 | 3242 | q = UserGroupRepoGroupToPerm.query()\ |
|
3189 | 3243 | .filter(UserGroupRepoGroupToPerm.group == self) |
|
3190 | 3244 | q = q.options(joinedload(UserGroupRepoGroupToPerm.group), |
|
3191 | 3245 | joinedload(UserGroupRepoGroupToPerm.users_group), |
|
3192 | 3246 | joinedload(UserGroupRepoGroupToPerm.permission),) |
|
3193 | 3247 | |
|
3194 | 3248 | perm_rows = [] |
|
3195 | 3249 | for _user_group in q.all(): |
|
3196 | 3250 | entry = AttributeDict(_user_group.users_group.get_dict()) |
|
3197 | 3251 | entry.permission = _user_group.permission.permission_name |
|
3198 | 3252 | if with_members: |
|
3199 | 3253 | entry.members = [x.user.get_dict() |
|
3200 | 3254 | for x in _user_group.users_group.members] |
|
3201 | 3255 | perm_rows.append(entry) |
|
3202 | 3256 | |
|
3203 | 3257 | perm_rows = sorted(perm_rows, key=display_user_group_sort) |
|
3204 | 3258 | return perm_rows |
|
3205 | 3259 | |
|
3206 | 3260 | def get_api_data(self): |
|
3207 | 3261 | """ |
|
3208 | 3262 | Common function for generating api data |
|
3209 | 3263 | |
|
3210 | 3264 | """ |
|
3211 | 3265 | group = self |
|
3212 | 3266 | data = { |
|
3213 | 3267 | 'group_id': group.group_id, |
|
3214 | 3268 | 'group_name': group.group_name, |
|
3215 | 3269 | 'group_description': group.description_safe, |
|
3216 | 3270 | 'parent_group': group.parent_group.group_name if group.parent_group else None, |
|
3217 | 3271 | 'repositories': [x.repo_name for x in group.repositories], |
|
3218 | 3272 | 'owner': group.user.username, |
|
3219 | 3273 | } |
|
3220 | 3274 | return data |
|
3221 | 3275 | |
|
3222 | 3276 | def get_dict(self): |
|
3223 | 3277 | # Since we transformed `group_name` to a hybrid property, we need to |
|
3224 | 3278 | # keep compatibility with the code which uses `group_name` field. |
|
3225 | 3279 | result = super(RepoGroup, self).get_dict() |
|
3226 | 3280 | result['group_name'] = result.pop('_group_name', None) |
|
3227 | 3281 | result.pop('_changeset_cache', '') |
|
3228 | 3282 | return result |
|
3229 | 3283 | |
|
3230 | 3284 | |
|
3231 | 3285 | class Permission(Base, BaseModel): |
|
3232 | 3286 | __tablename__ = 'permissions' |
|
3233 | 3287 | __table_args__ = ( |
|
3234 | 3288 | Index('p_perm_name_idx', 'permission_name'), |
|
3235 | 3289 | base_table_args, |
|
3236 | 3290 | ) |
|
3237 | 3291 | |
|
3238 | 3292 | PERMS = [ |
|
3239 | 3293 | ('hg.admin', _('RhodeCode Super Administrator')), |
|
3240 | 3294 | |
|
3241 | 3295 | ('repository.none', _('Repository no access')), |
|
3242 | 3296 | ('repository.read', _('Repository read access')), |
|
3243 | 3297 | ('repository.write', _('Repository write access')), |
|
3244 | 3298 | ('repository.admin', _('Repository admin access')), |
|
3245 | 3299 | |
|
3246 | 3300 | ('group.none', _('Repository group no access')), |
|
3247 | 3301 | ('group.read', _('Repository group read access')), |
|
3248 | 3302 | ('group.write', _('Repository group write access')), |
|
3249 | 3303 | ('group.admin', _('Repository group admin access')), |
|
3250 | 3304 | |
|
3251 | 3305 | ('usergroup.none', _('User group no access')), |
|
3252 | 3306 | ('usergroup.read', _('User group read access')), |
|
3253 | 3307 | ('usergroup.write', _('User group write access')), |
|
3254 | 3308 | ('usergroup.admin', _('User group admin access')), |
|
3255 | 3309 | |
|
3256 | 3310 | ('branch.none', _('Branch no permissions')), |
|
3257 | 3311 | ('branch.merge', _('Branch access by web merge')), |
|
3258 | 3312 | ('branch.push', _('Branch access by push')), |
|
3259 | 3313 | ('branch.push_force', _('Branch access by push with force')), |
|
3260 | 3314 | |
|
3261 | 3315 | ('hg.repogroup.create.false', _('Repository Group creation disabled')), |
|
3262 | 3316 | ('hg.repogroup.create.true', _('Repository Group creation enabled')), |
|
3263 | 3317 | |
|
3264 | 3318 | ('hg.usergroup.create.false', _('User Group creation disabled')), |
|
3265 | 3319 | ('hg.usergroup.create.true', _('User Group creation enabled')), |
|
3266 | 3320 | |
|
3267 | 3321 | ('hg.create.none', _('Repository creation disabled')), |
|
3268 | 3322 | ('hg.create.repository', _('Repository creation enabled')), |
|
3269 | 3323 | ('hg.create.write_on_repogroup.true', _('Repository creation enabled with write permission to a repository group')), |
|
3270 | 3324 | ('hg.create.write_on_repogroup.false', _('Repository creation disabled with write permission to a repository group')), |
|
3271 | 3325 | |
|
3272 | 3326 | ('hg.fork.none', _('Repository forking disabled')), |
|
3273 | 3327 | ('hg.fork.repository', _('Repository forking enabled')), |
|
3274 | 3328 | |
|
3275 | 3329 | ('hg.register.none', _('Registration disabled')), |
|
3276 | 3330 | ('hg.register.manual_activate', _('User Registration with manual account activation')), |
|
3277 | 3331 | ('hg.register.auto_activate', _('User Registration with automatic account activation')), |
|
3278 | 3332 | |
|
3279 | 3333 | ('hg.password_reset.enabled', _('Password reset enabled')), |
|
3280 | 3334 | ('hg.password_reset.hidden', _('Password reset hidden')), |
|
3281 | 3335 | ('hg.password_reset.disabled', _('Password reset disabled')), |
|
3282 | 3336 | |
|
3283 | 3337 | ('hg.extern_activate.manual', _('Manual activation of external account')), |
|
3284 | 3338 | ('hg.extern_activate.auto', _('Automatic activation of external account')), |
|
3285 | 3339 | |
|
3286 | 3340 | ('hg.inherit_default_perms.false', _('Inherit object permissions from default user disabled')), |
|
3287 | 3341 | ('hg.inherit_default_perms.true', _('Inherit object permissions from default user enabled')), |
|
3288 | 3342 | ] |
|
3289 | 3343 | |
|
3290 | 3344 | # definition of system default permissions for DEFAULT user, created on |
|
3291 | 3345 | # system setup |
|
3292 | 3346 | DEFAULT_USER_PERMISSIONS = [ |
|
3293 | 3347 | # object perms |
|
3294 | 3348 | 'repository.read', |
|
3295 | 3349 | 'group.read', |
|
3296 | 3350 | 'usergroup.read', |
|
3297 | 3351 | # branch, for backward compat we need same value as before so forced pushed |
|
3298 | 3352 | 'branch.push_force', |
|
3299 | 3353 | # global |
|
3300 | 3354 | 'hg.create.repository', |
|
3301 | 3355 | 'hg.repogroup.create.false', |
|
3302 | 3356 | 'hg.usergroup.create.false', |
|
3303 | 3357 | 'hg.create.write_on_repogroup.true', |
|
3304 | 3358 | 'hg.fork.repository', |
|
3305 | 3359 | 'hg.register.manual_activate', |
|
3306 | 3360 | 'hg.password_reset.enabled', |
|
3307 | 3361 | 'hg.extern_activate.auto', |
|
3308 | 3362 | 'hg.inherit_default_perms.true', |
|
3309 | 3363 | ] |
|
3310 | 3364 | |
|
3311 | 3365 | # defines which permissions are more important higher the more important |
|
3312 | 3366 | # Weight defines which permissions are more important. |
|
3313 | 3367 | # The higher number the more important. |
|
3314 | 3368 | PERM_WEIGHTS = { |
|
3315 | 3369 | 'repository.none': 0, |
|
3316 | 3370 | 'repository.read': 1, |
|
3317 | 3371 | 'repository.write': 3, |
|
3318 | 3372 | 'repository.admin': 4, |
|
3319 | 3373 | |
|
3320 | 3374 | 'group.none': 0, |
|
3321 | 3375 | 'group.read': 1, |
|
3322 | 3376 | 'group.write': 3, |
|
3323 | 3377 | 'group.admin': 4, |
|
3324 | 3378 | |
|
3325 | 3379 | 'usergroup.none': 0, |
|
3326 | 3380 | 'usergroup.read': 1, |
|
3327 | 3381 | 'usergroup.write': 3, |
|
3328 | 3382 | 'usergroup.admin': 4, |
|
3329 | 3383 | |
|
3330 | 3384 | 'branch.none': 0, |
|
3331 | 3385 | 'branch.merge': 1, |
|
3332 | 3386 | 'branch.push': 3, |
|
3333 | 3387 | 'branch.push_force': 4, |
|
3334 | 3388 | |
|
3335 | 3389 | 'hg.repogroup.create.false': 0, |
|
3336 | 3390 | 'hg.repogroup.create.true': 1, |
|
3337 | 3391 | |
|
3338 | 3392 | 'hg.usergroup.create.false': 0, |
|
3339 | 3393 | 'hg.usergroup.create.true': 1, |
|
3340 | 3394 | |
|
3341 | 3395 | 'hg.fork.none': 0, |
|
3342 | 3396 | 'hg.fork.repository': 1, |
|
3343 | 3397 | 'hg.create.none': 0, |
|
3344 | 3398 | 'hg.create.repository': 1 |
|
3345 | 3399 | } |
|
3346 | 3400 | |
|
3347 | 3401 | permission_id = Column("permission_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3348 | 3402 | permission_name = Column("permission_name", String(255), nullable=True, unique=None, default=None) |
|
3349 | 3403 | permission_longname = Column("permission_longname", String(255), nullable=True, unique=None, default=None) |
|
3350 | 3404 | |
|
3351 | 3405 | def __repr__(self): |
|
3352 | 3406 | return "<%s('%s:%s')>" % ( |
|
3353 | 3407 | self.cls_name, self.permission_id, self.permission_name |
|
3354 | 3408 | ) |
|
3355 | 3409 | |
|
3356 | 3410 | @classmethod |
|
3357 | 3411 | def get_by_key(cls, key): |
|
3358 | 3412 | return cls.query().filter(cls.permission_name == key).scalar() |
|
3359 | 3413 | |
|
3360 | 3414 | @classmethod |
|
3361 | 3415 | def get_default_repo_perms(cls, user_id, repo_id=None): |
|
3362 | 3416 | q = Session().query(UserRepoToPerm, Repository, Permission)\ |
|
3363 | 3417 | .join((Permission, UserRepoToPerm.permission_id == Permission.permission_id))\ |
|
3364 | 3418 | .join((Repository, UserRepoToPerm.repository_id == Repository.repo_id))\ |
|
3365 | 3419 | .filter(UserRepoToPerm.user_id == user_id) |
|
3366 | 3420 | if repo_id: |
|
3367 | 3421 | q = q.filter(UserRepoToPerm.repository_id == repo_id) |
|
3368 | 3422 | return q.all() |
|
3369 | 3423 | |
|
3370 | 3424 | @classmethod |
|
3371 | 3425 | def get_default_repo_branch_perms(cls, user_id, repo_id=None): |
|
3372 | 3426 | q = Session().query(UserToRepoBranchPermission, UserRepoToPerm, Permission) \ |
|
3373 | 3427 | .join( |
|
3374 | 3428 | Permission, |
|
3375 | 3429 | UserToRepoBranchPermission.permission_id == Permission.permission_id) \ |
|
3376 | 3430 | .join( |
|
3377 | 3431 | UserRepoToPerm, |
|
3378 | 3432 | UserToRepoBranchPermission.rule_to_perm_id == UserRepoToPerm.repo_to_perm_id) \ |
|
3379 | 3433 | .filter(UserRepoToPerm.user_id == user_id) |
|
3380 | 3434 | |
|
3381 | 3435 | if repo_id: |
|
3382 | 3436 | q = q.filter(UserToRepoBranchPermission.repository_id == repo_id) |
|
3383 | 3437 | return q.order_by(UserToRepoBranchPermission.rule_order).all() |
|
3384 | 3438 | |
|
3385 | 3439 | @classmethod |
|
3386 | 3440 | def get_default_repo_perms_from_user_group(cls, user_id, repo_id=None): |
|
3387 | 3441 | q = Session().query(UserGroupRepoToPerm, Repository, Permission)\ |
|
3388 | 3442 | .join( |
|
3389 | 3443 | Permission, |
|
3390 | 3444 | UserGroupRepoToPerm.permission_id == Permission.permission_id)\ |
|
3391 | 3445 | .join( |
|
3392 | 3446 | Repository, |
|
3393 | 3447 | UserGroupRepoToPerm.repository_id == Repository.repo_id)\ |
|
3394 | 3448 | .join( |
|
3395 | 3449 | UserGroup, |
|
3396 | 3450 | UserGroupRepoToPerm.users_group_id == |
|
3397 | 3451 | UserGroup.users_group_id)\ |
|
3398 | 3452 | .join( |
|
3399 | 3453 | UserGroupMember, |
|
3400 | 3454 | UserGroupRepoToPerm.users_group_id == |
|
3401 | 3455 | UserGroupMember.users_group_id)\ |
|
3402 | 3456 | .filter( |
|
3403 | 3457 | UserGroupMember.user_id == user_id, |
|
3404 | 3458 | UserGroup.users_group_active == true()) |
|
3405 | 3459 | if repo_id: |
|
3406 | 3460 | q = q.filter(UserGroupRepoToPerm.repository_id == repo_id) |
|
3407 | 3461 | return q.all() |
|
3408 | 3462 | |
|
3409 | 3463 | @classmethod |
|
3410 | 3464 | def get_default_repo_branch_perms_from_user_group(cls, user_id, repo_id=None): |
|
3411 | 3465 | q = Session().query(UserGroupToRepoBranchPermission, UserGroupRepoToPerm, Permission) \ |
|
3412 | 3466 | .join( |
|
3413 | 3467 | Permission, |
|
3414 | 3468 | UserGroupToRepoBranchPermission.permission_id == Permission.permission_id) \ |
|
3415 | 3469 | .join( |
|
3416 | 3470 | UserGroupRepoToPerm, |
|
3417 | 3471 | UserGroupToRepoBranchPermission.rule_to_perm_id == UserGroupRepoToPerm.users_group_to_perm_id) \ |
|
3418 | 3472 | .join( |
|
3419 | 3473 | UserGroup, |
|
3420 | 3474 | UserGroupRepoToPerm.users_group_id == UserGroup.users_group_id) \ |
|
3421 | 3475 | .join( |
|
3422 | 3476 | UserGroupMember, |
|
3423 | 3477 | UserGroupRepoToPerm.users_group_id == UserGroupMember.users_group_id) \ |
|
3424 | 3478 | .filter( |
|
3425 | 3479 | UserGroupMember.user_id == user_id, |
|
3426 | 3480 | UserGroup.users_group_active == true()) |
|
3427 | 3481 | |
|
3428 | 3482 | if repo_id: |
|
3429 | 3483 | q = q.filter(UserGroupToRepoBranchPermission.repository_id == repo_id) |
|
3430 | 3484 | return q.order_by(UserGroupToRepoBranchPermission.rule_order).all() |
|
3431 | 3485 | |
|
3432 | 3486 | @classmethod |
|
3433 | 3487 | def get_default_group_perms(cls, user_id, repo_group_id=None): |
|
3434 | 3488 | q = Session().query(UserRepoGroupToPerm, RepoGroup, Permission)\ |
|
3435 | 3489 | .join( |
|
3436 | 3490 | Permission, |
|
3437 | 3491 | UserRepoGroupToPerm.permission_id == Permission.permission_id)\ |
|
3438 | 3492 | .join( |
|
3439 | 3493 | RepoGroup, |
|
3440 | 3494 | UserRepoGroupToPerm.group_id == RepoGroup.group_id)\ |
|
3441 | 3495 | .filter(UserRepoGroupToPerm.user_id == user_id) |
|
3442 | 3496 | if repo_group_id: |
|
3443 | 3497 | q = q.filter(UserRepoGroupToPerm.group_id == repo_group_id) |
|
3444 | 3498 | return q.all() |
|
3445 | 3499 | |
|
3446 | 3500 | @classmethod |
|
3447 | 3501 | def get_default_group_perms_from_user_group( |
|
3448 | 3502 | cls, user_id, repo_group_id=None): |
|
3449 | 3503 | q = Session().query(UserGroupRepoGroupToPerm, RepoGroup, Permission)\ |
|
3450 | 3504 | .join( |
|
3451 | 3505 | Permission, |
|
3452 | 3506 | UserGroupRepoGroupToPerm.permission_id == |
|
3453 | 3507 | Permission.permission_id)\ |
|
3454 | 3508 | .join( |
|
3455 | 3509 | RepoGroup, |
|
3456 | 3510 | UserGroupRepoGroupToPerm.group_id == RepoGroup.group_id)\ |
|
3457 | 3511 | .join( |
|
3458 | 3512 | UserGroup, |
|
3459 | 3513 | UserGroupRepoGroupToPerm.users_group_id == |
|
3460 | 3514 | UserGroup.users_group_id)\ |
|
3461 | 3515 | .join( |
|
3462 | 3516 | UserGroupMember, |
|
3463 | 3517 | UserGroupRepoGroupToPerm.users_group_id == |
|
3464 | 3518 | UserGroupMember.users_group_id)\ |
|
3465 | 3519 | .filter( |
|
3466 | 3520 | UserGroupMember.user_id == user_id, |
|
3467 | 3521 | UserGroup.users_group_active == true()) |
|
3468 | 3522 | if repo_group_id: |
|
3469 | 3523 | q = q.filter(UserGroupRepoGroupToPerm.group_id == repo_group_id) |
|
3470 | 3524 | return q.all() |
|
3471 | 3525 | |
|
3472 | 3526 | @classmethod |
|
3473 | 3527 | def get_default_user_group_perms(cls, user_id, user_group_id=None): |
|
3474 | 3528 | q = Session().query(UserUserGroupToPerm, UserGroup, Permission)\ |
|
3475 | 3529 | .join((Permission, UserUserGroupToPerm.permission_id == Permission.permission_id))\ |
|
3476 | 3530 | .join((UserGroup, UserUserGroupToPerm.user_group_id == UserGroup.users_group_id))\ |
|
3477 | 3531 | .filter(UserUserGroupToPerm.user_id == user_id) |
|
3478 | 3532 | if user_group_id: |
|
3479 | 3533 | q = q.filter(UserUserGroupToPerm.user_group_id == user_group_id) |
|
3480 | 3534 | return q.all() |
|
3481 | 3535 | |
|
3482 | 3536 | @classmethod |
|
3483 | 3537 | def get_default_user_group_perms_from_user_group( |
|
3484 | 3538 | cls, user_id, user_group_id=None): |
|
3485 | 3539 | TargetUserGroup = aliased(UserGroup, name='target_user_group') |
|
3486 | 3540 | q = Session().query(UserGroupUserGroupToPerm, UserGroup, Permission)\ |
|
3487 | 3541 | .join( |
|
3488 | 3542 | Permission, |
|
3489 | 3543 | UserGroupUserGroupToPerm.permission_id == |
|
3490 | 3544 | Permission.permission_id)\ |
|
3491 | 3545 | .join( |
|
3492 | 3546 | TargetUserGroup, |
|
3493 | 3547 | UserGroupUserGroupToPerm.target_user_group_id == |
|
3494 | 3548 | TargetUserGroup.users_group_id)\ |
|
3495 | 3549 | .join( |
|
3496 | 3550 | UserGroup, |
|
3497 | 3551 | UserGroupUserGroupToPerm.user_group_id == |
|
3498 | 3552 | UserGroup.users_group_id)\ |
|
3499 | 3553 | .join( |
|
3500 | 3554 | UserGroupMember, |
|
3501 | 3555 | UserGroupUserGroupToPerm.user_group_id == |
|
3502 | 3556 | UserGroupMember.users_group_id)\ |
|
3503 | 3557 | .filter( |
|
3504 | 3558 | UserGroupMember.user_id == user_id, |
|
3505 | 3559 | UserGroup.users_group_active == true()) |
|
3506 | 3560 | if user_group_id: |
|
3507 | 3561 | q = q.filter( |
|
3508 | 3562 | UserGroupUserGroupToPerm.user_group_id == user_group_id) |
|
3509 | 3563 | |
|
3510 | 3564 | return q.all() |
|
3511 | 3565 | |
|
3512 | 3566 | |
|
3513 | 3567 | class UserRepoToPerm(Base, BaseModel): |
|
3514 | 3568 | __tablename__ = 'repo_to_perm' |
|
3515 | 3569 | __table_args__ = ( |
|
3516 | 3570 | UniqueConstraint('user_id', 'repository_id', 'permission_id'), |
|
3517 | 3571 | base_table_args |
|
3518 | 3572 | ) |
|
3519 | 3573 | |
|
3520 | 3574 | repo_to_perm_id = Column("repo_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3521 | 3575 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
3522 | 3576 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
3523 | 3577 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
3524 | 3578 | |
|
3525 | 3579 | user = relationship('User', back_populates="repo_to_perm") |
|
3526 | 3580 | repository = relationship('Repository', back_populates="repo_to_perm") |
|
3527 | 3581 | permission = relationship('Permission') |
|
3528 | 3582 | |
|
3529 | 3583 | branch_perm_entry = relationship('UserToRepoBranchPermission', cascade="all, delete-orphan", lazy='joined', back_populates='user_repo_to_perm') |
|
3530 | 3584 | |
|
3531 | 3585 | @classmethod |
|
3532 | 3586 | def create(cls, user, repository, permission): |
|
3533 | 3587 | n = cls() |
|
3534 | 3588 | n.user = user |
|
3535 | 3589 | n.repository = repository |
|
3536 | 3590 | n.permission = permission |
|
3537 | 3591 | Session().add(n) |
|
3538 | 3592 | return n |
|
3539 | 3593 | |
|
3540 | 3594 | def __repr__(self): |
|
3541 | 3595 | return f'<{self.user} => {self.repository} >' |
|
3542 | 3596 | |
|
3543 | 3597 | |
|
3544 | 3598 | class UserUserGroupToPerm(Base, BaseModel): |
|
3545 | 3599 | __tablename__ = 'user_user_group_to_perm' |
|
3546 | 3600 | __table_args__ = ( |
|
3547 | 3601 | UniqueConstraint('user_id', 'user_group_id', 'permission_id'), |
|
3548 | 3602 | base_table_args |
|
3549 | 3603 | ) |
|
3550 | 3604 | |
|
3551 | 3605 | user_user_group_to_perm_id = Column("user_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3552 | 3606 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
3553 | 3607 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
3554 | 3608 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
3555 | 3609 | |
|
3556 | 3610 | user = relationship('User', back_populates='user_group_to_perm') |
|
3557 | 3611 | user_group = relationship('UserGroup', back_populates='user_user_group_to_perm') |
|
3558 | 3612 | permission = relationship('Permission') |
|
3559 | 3613 | |
|
3560 | 3614 | @classmethod |
|
3561 | 3615 | def create(cls, user, user_group, permission): |
|
3562 | 3616 | n = cls() |
|
3563 | 3617 | n.user = user |
|
3564 | 3618 | n.user_group = user_group |
|
3565 | 3619 | n.permission = permission |
|
3566 | 3620 | Session().add(n) |
|
3567 | 3621 | return n |
|
3568 | 3622 | |
|
3569 | 3623 | def __repr__(self): |
|
3570 | 3624 | return f'<{self.user} => {self.user_group} >' |
|
3571 | 3625 | |
|
3572 | 3626 | |
|
3573 | 3627 | class UserToPerm(Base, BaseModel): |
|
3574 | 3628 | __tablename__ = 'user_to_perm' |
|
3575 | 3629 | __table_args__ = ( |
|
3576 | 3630 | UniqueConstraint('user_id', 'permission_id'), |
|
3577 | 3631 | base_table_args |
|
3578 | 3632 | ) |
|
3579 | 3633 | |
|
3580 | 3634 | user_to_perm_id = Column("user_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3581 | 3635 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
3582 | 3636 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
3583 | 3637 | |
|
3584 | 3638 | user = relationship('User', back_populates='user_perms') |
|
3585 | 3639 | permission = relationship('Permission', lazy='joined') |
|
3586 | 3640 | |
|
3587 | 3641 | def __repr__(self): |
|
3588 | 3642 | return f'<{self.user} => {self.permission} >' |
|
3589 | 3643 | |
|
3590 | 3644 | |
|
3591 | 3645 | class UserGroupRepoToPerm(Base, BaseModel): |
|
3592 | 3646 | __tablename__ = 'users_group_repo_to_perm' |
|
3593 | 3647 | __table_args__ = ( |
|
3594 | 3648 | UniqueConstraint('repository_id', 'users_group_id', 'permission_id'), |
|
3595 | 3649 | base_table_args |
|
3596 | 3650 | ) |
|
3597 | 3651 | |
|
3598 | 3652 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3599 | 3653 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
3600 | 3654 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
3601 | 3655 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
3602 | 3656 | |
|
3603 | 3657 | users_group = relationship('UserGroup', back_populates='users_group_repo_to_perm') |
|
3604 | 3658 | permission = relationship('Permission') |
|
3605 | 3659 | repository = relationship('Repository', back_populates='users_group_to_perm') |
|
3606 | 3660 | user_group_branch_perms = relationship('UserGroupToRepoBranchPermission', cascade='all', back_populates='user_group_repo_to_perm') |
|
3607 | 3661 | |
|
3608 | 3662 | @classmethod |
|
3609 | 3663 | def create(cls, users_group, repository, permission): |
|
3610 | 3664 | n = cls() |
|
3611 | 3665 | n.users_group = users_group |
|
3612 | 3666 | n.repository = repository |
|
3613 | 3667 | n.permission = permission |
|
3614 | 3668 | Session().add(n) |
|
3615 | 3669 | return n |
|
3616 | 3670 | |
|
3617 | 3671 | def __repr__(self): |
|
3618 | 3672 | return f'<UserGroupRepoToPerm:{self.users_group} => {self.repository} >' |
|
3619 | 3673 | |
|
3620 | 3674 | |
|
3621 | 3675 | class UserGroupUserGroupToPerm(Base, BaseModel): |
|
3622 | 3676 | __tablename__ = 'user_group_user_group_to_perm' |
|
3623 | 3677 | __table_args__ = ( |
|
3624 | 3678 | UniqueConstraint('target_user_group_id', 'user_group_id', 'permission_id'), |
|
3625 | 3679 | CheckConstraint('target_user_group_id != user_group_id'), |
|
3626 | 3680 | base_table_args |
|
3627 | 3681 | ) |
|
3628 | 3682 | |
|
3629 | 3683 | user_group_user_group_to_perm_id = Column("user_group_user_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3630 | 3684 | target_user_group_id = Column("target_user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
3631 | 3685 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
3632 | 3686 | user_group_id = Column("user_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
3633 | 3687 | |
|
3634 | 3688 | target_user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.target_user_group_id==UserGroup.users_group_id', back_populates='user_group_user_group_to_perm') |
|
3635 | 3689 | user_group = relationship('UserGroup', primaryjoin='UserGroupUserGroupToPerm.user_group_id==UserGroup.users_group_id') |
|
3636 | 3690 | permission = relationship('Permission') |
|
3637 | 3691 | |
|
3638 | 3692 | @classmethod |
|
3639 | 3693 | def create(cls, target_user_group, user_group, permission): |
|
3640 | 3694 | n = cls() |
|
3641 | 3695 | n.target_user_group = target_user_group |
|
3642 | 3696 | n.user_group = user_group |
|
3643 | 3697 | n.permission = permission |
|
3644 | 3698 | Session().add(n) |
|
3645 | 3699 | return n |
|
3646 | 3700 | |
|
3647 | 3701 | def __repr__(self): |
|
3648 | 3702 | return f'<UserGroupUserGroup:{self.target_user_group} => {self.user_group} >' |
|
3649 | 3703 | |
|
3650 | 3704 | |
|
3651 | 3705 | class UserGroupToPerm(Base, BaseModel): |
|
3652 | 3706 | __tablename__ = 'users_group_to_perm' |
|
3653 | 3707 | __table_args__ = ( |
|
3654 | 3708 | UniqueConstraint('users_group_id', 'permission_id',), |
|
3655 | 3709 | base_table_args |
|
3656 | 3710 | ) |
|
3657 | 3711 | |
|
3658 | 3712 | users_group_to_perm_id = Column("users_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3659 | 3713 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
3660 | 3714 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
3661 | 3715 | |
|
3662 | 3716 | users_group = relationship('UserGroup', back_populates='users_group_to_perm') |
|
3663 | 3717 | permission = relationship('Permission') |
|
3664 | 3718 | |
|
3665 | 3719 | |
|
3666 | 3720 | class UserRepoGroupToPerm(Base, BaseModel): |
|
3667 | 3721 | __tablename__ = 'user_repo_group_to_perm' |
|
3668 | 3722 | __table_args__ = ( |
|
3669 | 3723 | UniqueConstraint('user_id', 'group_id', 'permission_id'), |
|
3670 | 3724 | base_table_args |
|
3671 | 3725 | ) |
|
3672 | 3726 | |
|
3673 | 3727 | group_to_perm_id = Column("group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3674 | 3728 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
3675 | 3729 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
3676 | 3730 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
3677 | 3731 | |
|
3678 | 3732 | user = relationship('User', back_populates='repo_group_to_perm') |
|
3679 | 3733 | group = relationship('RepoGroup', back_populates='repo_group_to_perm') |
|
3680 | 3734 | permission = relationship('Permission') |
|
3681 | 3735 | |
|
3682 | 3736 | @classmethod |
|
3683 | 3737 | def create(cls, user, repository_group, permission): |
|
3684 | 3738 | n = cls() |
|
3685 | 3739 | n.user = user |
|
3686 | 3740 | n.group = repository_group |
|
3687 | 3741 | n.permission = permission |
|
3688 | 3742 | Session().add(n) |
|
3689 | 3743 | return n |
|
3690 | 3744 | |
|
3691 | 3745 | |
|
3692 | 3746 | class UserGroupRepoGroupToPerm(Base, BaseModel): |
|
3693 | 3747 | __tablename__ = 'users_group_repo_group_to_perm' |
|
3694 | 3748 | __table_args__ = ( |
|
3695 | 3749 | UniqueConstraint('users_group_id', 'group_id'), |
|
3696 | 3750 | base_table_args |
|
3697 | 3751 | ) |
|
3698 | 3752 | |
|
3699 | 3753 | users_group_repo_group_to_perm_id = Column("users_group_repo_group_to_perm_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3700 | 3754 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False, unique=None, default=None) |
|
3701 | 3755 | group_id = Column("group_id", Integer(), ForeignKey('groups.group_id'), nullable=False, unique=None, default=None) |
|
3702 | 3756 | permission_id = Column("permission_id", Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
3703 | 3757 | |
|
3704 | 3758 | users_group = relationship('UserGroup', back_populates='users_group_repo_group_to_perm') |
|
3705 | 3759 | permission = relationship('Permission') |
|
3706 | 3760 | group = relationship('RepoGroup', back_populates='users_group_to_perm') |
|
3707 | 3761 | |
|
3708 | 3762 | @classmethod |
|
3709 | 3763 | def create(cls, user_group, repository_group, permission): |
|
3710 | 3764 | n = cls() |
|
3711 | 3765 | n.users_group = user_group |
|
3712 | 3766 | n.group = repository_group |
|
3713 | 3767 | n.permission = permission |
|
3714 | 3768 | Session().add(n) |
|
3715 | 3769 | return n |
|
3716 | 3770 | |
|
3717 | 3771 | def __repr__(self): |
|
3718 | 3772 | return '<UserGroupRepoGroupToPerm:%s => %s >' % (self.users_group, self.group) |
|
3719 | 3773 | |
|
3720 | 3774 | |
|
3721 | 3775 | class Statistics(Base, BaseModel): |
|
3722 | 3776 | __tablename__ = 'statistics' |
|
3723 | 3777 | __table_args__ = ( |
|
3724 | 3778 | base_table_args |
|
3725 | 3779 | ) |
|
3726 | 3780 | |
|
3727 | 3781 | stat_id = Column("stat_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3728 | 3782 | repository_id = Column("repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=True, default=None) |
|
3729 | 3783 | stat_on_revision = Column("stat_on_revision", Integer(), nullable=False) |
|
3730 | 3784 | commit_activity = Column("commit_activity", LargeBinary(1000000), nullable=False) #JSON data |
|
3731 | 3785 | commit_activity_combined = Column("commit_activity_combined", LargeBinary(), nullable=False) #JSON data |
|
3732 | 3786 | languages = Column("languages", LargeBinary(1000000), nullable=False) #JSON data |
|
3733 | 3787 | |
|
3734 | 3788 | repository = relationship('Repository', single_parent=True, viewonly=True) |
|
3735 | 3789 | |
|
3736 | 3790 | |
|
3737 | 3791 | class UserFollowing(Base, BaseModel): |
|
3738 | 3792 | __tablename__ = 'user_followings' |
|
3739 | 3793 | __table_args__ = ( |
|
3740 | 3794 | UniqueConstraint('user_id', 'follows_repository_id'), |
|
3741 | 3795 | UniqueConstraint('user_id', 'follows_user_id'), |
|
3742 | 3796 | base_table_args |
|
3743 | 3797 | ) |
|
3744 | 3798 | |
|
3745 | 3799 | user_following_id = Column("user_following_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3746 | 3800 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
3747 | 3801 | follows_repo_id = Column("follows_repository_id", Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
3748 | 3802 | follows_user_id = Column("follows_user_id", Integer(), ForeignKey('users.user_id'), nullable=True, unique=None, default=None) |
|
3749 | 3803 | follows_from = Column('follows_from', DateTime(timezone=False), nullable=True, unique=None, default=datetime.datetime.now) |
|
3750 | 3804 | |
|
3751 | 3805 | user = relationship('User', primaryjoin='User.user_id==UserFollowing.user_id', back_populates='followings') |
|
3752 | 3806 | |
|
3753 | 3807 | follows_user = relationship('User', primaryjoin='User.user_id==UserFollowing.follows_user_id') |
|
3754 | 3808 | follows_repository = relationship('Repository', order_by='Repository.repo_name', back_populates='followers') |
|
3755 | 3809 | |
|
3756 | 3810 | @classmethod |
|
3757 | 3811 | def get_repo_followers(cls, repo_id): |
|
3758 | 3812 | return cls.query().filter(cls.follows_repo_id == repo_id) |
|
3759 | 3813 | |
|
3760 | 3814 | |
|
3761 | 3815 | class CacheKey(Base, BaseModel): |
|
3762 | 3816 | __tablename__ = 'cache_invalidation' |
|
3763 | 3817 | __table_args__ = ( |
|
3764 | 3818 | UniqueConstraint('cache_key'), |
|
3765 | 3819 | Index('key_idx', 'cache_key'), |
|
3766 | 3820 | Index('cache_args_idx', 'cache_args'), |
|
3767 | 3821 | base_table_args, |
|
3768 | 3822 | ) |
|
3769 | 3823 | |
|
3770 | 3824 | CACHE_TYPE_FEED = 'FEED' |
|
3771 | 3825 | |
|
3772 | 3826 | # namespaces used to register process/thread aware caches |
|
3773 | 3827 | REPO_INVALIDATION_NAMESPACE = 'repo_cache.v1:{repo_id}' |
|
3774 | 3828 | |
|
3775 | 3829 | cache_id = Column("cache_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
3776 | 3830 | cache_key = Column("cache_key", String(255), nullable=True, unique=None, default=None) |
|
3777 | 3831 | cache_args = Column("cache_args", String(255), nullable=True, unique=None, default=None) |
|
3778 | 3832 | cache_state_uid = Column("cache_state_uid", String(255), nullable=True, unique=None, default=None) |
|
3779 | 3833 | cache_active = Column("cache_active", Boolean(), nullable=True, unique=None, default=False) |
|
3780 | 3834 | |
|
3781 | 3835 | def __init__(self, cache_key, cache_args='', cache_state_uid=None, cache_active=False): |
|
3782 | 3836 | self.cache_key = cache_key |
|
3783 | 3837 | self.cache_args = cache_args |
|
3784 | 3838 | self.cache_active = cache_active |
|
3785 | 3839 | # first key should be same for all entries, since all workers should share it |
|
3786 | 3840 | self.cache_state_uid = cache_state_uid or self.generate_new_state_uid() |
|
3787 | 3841 | |
|
3788 | 3842 | def __repr__(self): |
|
3789 | 3843 | return "<%s('%s:%s[%s]')>" % ( |
|
3790 | 3844 | self.cls_name, |
|
3791 | 3845 | self.cache_id, self.cache_key, self.cache_active) |
|
3792 | 3846 | |
|
3793 | 3847 | def _cache_key_partition(self): |
|
3794 | 3848 | prefix, repo_name, suffix = self.cache_key.partition(self.cache_args) |
|
3795 | 3849 | return prefix, repo_name, suffix |
|
3796 | 3850 | |
|
3797 | 3851 | def get_prefix(self): |
|
3798 | 3852 | """ |
|
3799 | 3853 | Try to extract prefix from existing cache key. The key could consist |
|
3800 | 3854 | of prefix, repo_name, suffix |
|
3801 | 3855 | """ |
|
3802 | 3856 | # this returns prefix, repo_name, suffix |
|
3803 | 3857 | return self._cache_key_partition()[0] |
|
3804 | 3858 | |
|
3805 | 3859 | def get_suffix(self): |
|
3806 | 3860 | """ |
|
3807 | 3861 | get suffix that might have been used in _get_cache_key to |
|
3808 | 3862 | generate self.cache_key. Only used for informational purposes |
|
3809 | 3863 | in repo_edit.mako. |
|
3810 | 3864 | """ |
|
3811 | 3865 | # prefix, repo_name, suffix |
|
3812 | 3866 | return self._cache_key_partition()[2] |
|
3813 | 3867 | |
|
3814 | 3868 | @classmethod |
|
3815 | 3869 | def generate_new_state_uid(cls, based_on=None): |
|
3816 | 3870 | if based_on: |
|
3817 | 3871 | return str(uuid.uuid5(uuid.NAMESPACE_URL, safe_str(based_on))) |
|
3818 | 3872 | else: |
|
3819 | 3873 | return str(uuid.uuid4()) |
|
3820 | 3874 | |
|
3821 | 3875 | @classmethod |
|
3822 | 3876 | def delete_all_cache(cls): |
|
3823 | 3877 | """ |
|
3824 | 3878 | Delete all cache keys from database. |
|
3825 | 3879 | Should only be run when all instances are down and all entries |
|
3826 | 3880 | thus stale. |
|
3827 | 3881 | """ |
|
3828 | 3882 | cls.query().delete() |
|
3829 | 3883 | Session().commit() |
|
3830 | 3884 | |
|
3831 | 3885 | @classmethod |
|
3832 | 3886 | def set_invalidate(cls, cache_uid, delete=False): |
|
3833 | 3887 | """ |
|
3834 | 3888 | Mark all caches of a repo as invalid in the database. |
|
3835 | 3889 | """ |
|
3836 | 3890 | try: |
|
3837 | 3891 | qry = Session().query(cls).filter(cls.cache_key == cache_uid) |
|
3838 | 3892 | if delete: |
|
3839 | 3893 | qry.delete() |
|
3840 | 3894 | log.debug('cache objects deleted for cache args %s', |
|
3841 | 3895 | safe_str(cache_uid)) |
|
3842 | 3896 | else: |
|
3843 | 3897 | new_uid = cls.generate_new_state_uid() |
|
3844 | 3898 | qry.update({"cache_state_uid": new_uid, |
|
3845 | 3899 | "cache_args": f"repo_state:{time.time()}"}) |
|
3846 | 3900 | log.debug('cache object %s set new UID %s', |
|
3847 | 3901 | safe_str(cache_uid), new_uid) |
|
3848 | 3902 | |
|
3849 | 3903 | Session().commit() |
|
3850 | 3904 | except Exception: |
|
3851 | 3905 | log.exception( |
|
3852 | 3906 | 'Cache key invalidation failed for cache args %s', |
|
3853 | 3907 | safe_str(cache_uid)) |
|
3854 | 3908 | Session().rollback() |
|
3855 | 3909 | |
|
3856 | 3910 | @classmethod |
|
3857 | 3911 | def get_active_cache(cls, cache_key): |
|
3858 | 3912 | inv_obj = cls.query().filter(cls.cache_key == cache_key).scalar() |
|
3859 | 3913 | if inv_obj: |
|
3860 | 3914 | return inv_obj |
|
3861 | 3915 | return None |
|
3862 | 3916 | |
|
3863 | 3917 | @classmethod |
|
3864 | 3918 | def get_namespace_map(cls, namespace): |
|
3865 | 3919 | return { |
|
3866 | 3920 | x.cache_key: x |
|
3867 | 3921 | for x in cls.query().filter(cls.cache_args == namespace)} |
|
3868 | 3922 | |
|
3869 | 3923 | |
|
3870 | 3924 | class ChangesetComment(Base, BaseModel): |
|
3871 | 3925 | __tablename__ = 'changeset_comments' |
|
3872 | 3926 | __table_args__ = ( |
|
3873 | 3927 | Index('cc_revision_idx', 'revision'), |
|
3874 | 3928 | base_table_args, |
|
3875 | 3929 | ) |
|
3876 | 3930 | |
|
3877 | 3931 | COMMENT_OUTDATED = 'comment_outdated' |
|
3878 | 3932 | COMMENT_TYPE_NOTE = 'note' |
|
3879 | 3933 | COMMENT_TYPE_TODO = 'todo' |
|
3880 | 3934 | COMMENT_TYPES = [COMMENT_TYPE_NOTE, COMMENT_TYPE_TODO] |
|
3881 | 3935 | |
|
3882 | 3936 | OP_IMMUTABLE = 'immutable' |
|
3883 | 3937 | OP_CHANGEABLE = 'changeable' |
|
3884 | 3938 | |
|
3885 | 3939 | comment_id = Column('comment_id', Integer(), nullable=False, primary_key=True) |
|
3886 | 3940 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
3887 | 3941 | revision = Column('revision', String(40), nullable=True) |
|
3888 | 3942 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
3889 | 3943 | pull_request_version_id = Column("pull_request_version_id", Integer(), ForeignKey('pull_request_versions.pull_request_version_id'), nullable=True) |
|
3890 | 3944 | line_no = Column('line_no', Unicode(10), nullable=True) |
|
3891 | 3945 | hl_lines = Column('hl_lines', Unicode(512), nullable=True) |
|
3892 | 3946 | f_path = Column('f_path', Unicode(1000), nullable=True) |
|
3893 | 3947 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
3894 | 3948 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) |
|
3895 | 3949 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3896 | 3950 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
3897 | 3951 | renderer = Column('renderer', Unicode(64), nullable=True) |
|
3898 | 3952 | display_state = Column('display_state', Unicode(128), nullable=True) |
|
3899 | 3953 | immutable_state = Column('immutable_state', Unicode(128), nullable=True, default=OP_CHANGEABLE) |
|
3900 | 3954 | draft = Column('draft', Boolean(), nullable=True, default=False) |
|
3901 | 3955 | |
|
3902 | 3956 | comment_type = Column('comment_type', Unicode(128), nullable=True, default=COMMENT_TYPE_NOTE) |
|
3903 | 3957 | resolved_comment_id = Column('resolved_comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=True) |
|
3904 | 3958 | |
|
3905 | 3959 | resolved_comment = relationship('ChangesetComment', remote_side=comment_id, back_populates='resolved_by') |
|
3906 | 3960 | resolved_by = relationship('ChangesetComment', back_populates='resolved_comment') |
|
3907 | 3961 | |
|
3908 | 3962 | author = relationship('User', lazy='select', back_populates='user_comments') |
|
3909 | 3963 | repo = relationship('Repository', back_populates='comments') |
|
3910 | 3964 | status_change = relationship('ChangesetStatus', cascade="all, delete-orphan", lazy='select', back_populates='comment') |
|
3911 | 3965 | pull_request = relationship('PullRequest', lazy='select', back_populates='comments') |
|
3912 | 3966 | pull_request_version = relationship('PullRequestVersion', lazy='select') |
|
3913 | 3967 | history = relationship('ChangesetCommentHistory', cascade='all, delete-orphan', lazy='select', order_by='ChangesetCommentHistory.version', back_populates="comment") |
|
3914 | 3968 | |
|
3915 | 3969 | @classmethod |
|
3916 | 3970 | def get_users(cls, revision=None, pull_request_id=None): |
|
3917 | 3971 | """ |
|
3918 | 3972 | Returns user associated with this ChangesetComment. ie those |
|
3919 | 3973 | who actually commented |
|
3920 | 3974 | |
|
3921 | 3975 | :param cls: |
|
3922 | 3976 | :param revision: |
|
3923 | 3977 | """ |
|
3924 | 3978 | q = Session().query(User).join(ChangesetComment.author) |
|
3925 | 3979 | if revision: |
|
3926 | 3980 | q = q.filter(cls.revision == revision) |
|
3927 | 3981 | elif pull_request_id: |
|
3928 | 3982 | q = q.filter(cls.pull_request_id == pull_request_id) |
|
3929 | 3983 | return q.all() |
|
3930 | 3984 | |
|
3931 | 3985 | @classmethod |
|
3932 | 3986 | def get_index_from_version(cls, pr_version, versions=None, num_versions=None) -> int: |
|
3933 | 3987 | if pr_version is None: |
|
3934 | 3988 | return 0 |
|
3935 | 3989 | |
|
3936 | 3990 | if versions is not None: |
|
3937 | 3991 | num_versions = [x.pull_request_version_id for x in versions] |
|
3938 | 3992 | |
|
3939 | 3993 | num_versions = num_versions or [] |
|
3940 | 3994 | try: |
|
3941 | 3995 | return num_versions.index(pr_version) + 1 |
|
3942 | 3996 | except (IndexError, ValueError): |
|
3943 | 3997 | return 0 |
|
3944 | 3998 | |
|
3945 | 3999 | @property |
|
3946 | 4000 | def outdated(self): |
|
3947 | 4001 | return self.display_state == self.COMMENT_OUTDATED |
|
3948 | 4002 | |
|
3949 | 4003 | @property |
|
3950 | 4004 | def outdated_js(self): |
|
3951 | 4005 | return str_json(self.display_state == self.COMMENT_OUTDATED) |
|
3952 | 4006 | |
|
3953 | 4007 | @property |
|
3954 | 4008 | def immutable(self): |
|
3955 | 4009 | return self.immutable_state == self.OP_IMMUTABLE |
|
3956 | 4010 | |
|
3957 | 4011 | def outdated_at_version(self, version: int) -> bool: |
|
3958 | 4012 | """ |
|
3959 | 4013 | Checks if comment is outdated for given pull request version |
|
3960 | 4014 | """ |
|
3961 | 4015 | |
|
3962 | 4016 | def version_check(): |
|
3963 | 4017 | return self.pull_request_version_id and self.pull_request_version_id != version |
|
3964 | 4018 | |
|
3965 | 4019 | if self.is_inline: |
|
3966 | 4020 | return self.outdated and version_check() |
|
3967 | 4021 | else: |
|
3968 | 4022 | # general comments don't have .outdated set, also latest don't have a version |
|
3969 | 4023 | return version_check() |
|
3970 | 4024 | |
|
3971 | 4025 | def outdated_at_version_js(self, version): |
|
3972 | 4026 | """ |
|
3973 | 4027 | Checks if comment is outdated for given pull request version |
|
3974 | 4028 | """ |
|
3975 | 4029 | return str_json(self.outdated_at_version(version)) |
|
3976 | 4030 | |
|
3977 | 4031 | def older_than_version(self, version: int) -> bool: |
|
3978 | 4032 | """ |
|
3979 | 4033 | Checks if comment is made from a previous version than given. |
|
3980 | 4034 | Assumes self.pull_request_version.pull_request_version_id is an integer if not None. |
|
3981 | 4035 | """ |
|
3982 | 4036 | |
|
3983 | 4037 | # If version is None, return False as the current version cannot be less than None |
|
3984 | 4038 | if version is None: |
|
3985 | 4039 | return False |
|
3986 | 4040 | |
|
3987 | 4041 | # Ensure that the version is an integer to prevent TypeError on comparison |
|
3988 | 4042 | if not isinstance(version, int): |
|
3989 | 4043 | raise ValueError("The provided version must be an integer.") |
|
3990 | 4044 | |
|
3991 | 4045 | # Initialize current version to 0 or pull_request_version_id if it's available |
|
3992 | 4046 | cur_ver = 0 |
|
3993 | 4047 | if self.pull_request_version and self.pull_request_version.pull_request_version_id is not None: |
|
3994 | 4048 | cur_ver = self.pull_request_version.pull_request_version_id |
|
3995 | 4049 | |
|
3996 | 4050 | # Return True if the current version is less than the given version |
|
3997 | 4051 | return cur_ver < version |
|
3998 | 4052 | |
|
3999 | 4053 | def older_than_version_js(self, version): |
|
4000 | 4054 | """ |
|
4001 | 4055 | Checks if comment is made from previous version than given |
|
4002 | 4056 | """ |
|
4003 | 4057 | return str_json(self.older_than_version(version)) |
|
4004 | 4058 | |
|
4005 | 4059 | @property |
|
4006 | 4060 | def commit_id(self): |
|
4007 | 4061 | """New style naming to stop using .revision""" |
|
4008 | 4062 | return self.revision |
|
4009 | 4063 | |
|
4010 | 4064 | @property |
|
4011 | 4065 | def resolved(self): |
|
4012 | 4066 | return self.resolved_by[0] if self.resolved_by else None |
|
4013 | 4067 | |
|
4014 | 4068 | @property |
|
4015 | 4069 | def is_todo(self): |
|
4016 | 4070 | return self.comment_type == self.COMMENT_TYPE_TODO |
|
4017 | 4071 | |
|
4018 | 4072 | @property |
|
4019 | 4073 | def is_inline(self): |
|
4020 | 4074 | if self.line_no and self.f_path: |
|
4021 | 4075 | return True |
|
4022 | 4076 | return False |
|
4023 | 4077 | |
|
4024 | 4078 | @property |
|
4025 | 4079 | def last_version(self): |
|
4026 | 4080 | version = 0 |
|
4027 | 4081 | if self.history: |
|
4028 | 4082 | version = self.history[-1].version |
|
4029 | 4083 | return version |
|
4030 | 4084 | |
|
4031 | 4085 | def get_index_version(self, versions): |
|
4032 | 4086 | return self.get_index_from_version( |
|
4033 | 4087 | self.pull_request_version_id, versions) |
|
4034 | 4088 | |
|
4035 | 4089 | @property |
|
4036 | 4090 | def review_status(self): |
|
4037 | 4091 | if self.status_change: |
|
4038 | 4092 | return self.status_change[0].status |
|
4039 | 4093 | |
|
4040 | 4094 | @property |
|
4041 | 4095 | def review_status_lbl(self): |
|
4042 | 4096 | if self.status_change: |
|
4043 | 4097 | return self.status_change[0].status_lbl |
|
4044 | 4098 | |
|
4045 | 4099 | def __repr__(self): |
|
4046 | 4100 | if self.comment_id: |
|
4047 | 4101 | return f'<DB:Comment #{self.comment_id}>' |
|
4048 | 4102 | else: |
|
4049 | 4103 | return f'<DB:Comment at {id(self)!r}>' |
|
4050 | 4104 | |
|
4051 | 4105 | def get_api_data(self): |
|
4052 | 4106 | comment = self |
|
4053 | 4107 | |
|
4054 | 4108 | data = { |
|
4055 | 4109 | 'comment_id': comment.comment_id, |
|
4056 | 4110 | 'comment_type': comment.comment_type, |
|
4057 | 4111 | 'comment_text': comment.text, |
|
4058 | 4112 | 'comment_status': comment.status_change, |
|
4059 | 4113 | 'comment_f_path': comment.f_path, |
|
4060 | 4114 | 'comment_lineno': comment.line_no, |
|
4061 | 4115 | 'comment_author': comment.author, |
|
4062 | 4116 | 'comment_created_on': comment.created_on, |
|
4063 | 4117 | 'comment_resolved_by': self.resolved, |
|
4064 | 4118 | 'comment_commit_id': comment.revision, |
|
4065 | 4119 | 'comment_pull_request_id': comment.pull_request_id, |
|
4066 | 4120 | 'comment_last_version': self.last_version |
|
4067 | 4121 | } |
|
4068 | 4122 | return data |
|
4069 | 4123 | |
|
4070 | 4124 | def __json__(self): |
|
4071 | 4125 | data = dict() |
|
4072 | 4126 | data.update(self.get_api_data()) |
|
4073 | 4127 | return data |
|
4074 | 4128 | |
|
4075 | 4129 | |
|
4076 | 4130 | class ChangesetCommentHistory(Base, BaseModel): |
|
4077 | 4131 | __tablename__ = 'changeset_comments_history' |
|
4078 | 4132 | __table_args__ = ( |
|
4079 | 4133 | Index('cch_comment_id_idx', 'comment_id'), |
|
4080 | 4134 | base_table_args, |
|
4081 | 4135 | ) |
|
4082 | 4136 | |
|
4083 | 4137 | comment_history_id = Column('comment_history_id', Integer(), nullable=False, primary_key=True) |
|
4084 | 4138 | comment_id = Column('comment_id', Integer(), ForeignKey('changeset_comments.comment_id'), nullable=False) |
|
4085 | 4139 | version = Column("version", Integer(), nullable=False, default=0) |
|
4086 | 4140 | created_by_user_id = Column('created_by_user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
4087 | 4141 | text = Column('text', UnicodeText().with_variant(UnicodeText(25000), 'mysql'), nullable=False) |
|
4088 | 4142 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
4089 | 4143 | deleted = Column('deleted', Boolean(), default=False) |
|
4090 | 4144 | |
|
4091 | 4145 | author = relationship('User', lazy='joined') |
|
4092 | 4146 | comment = relationship('ChangesetComment', cascade="all, delete", back_populates="history") |
|
4093 | 4147 | |
|
4094 | 4148 | @classmethod |
|
4095 | 4149 | def get_version(cls, comment_id): |
|
4096 | 4150 | q = Session().query(ChangesetCommentHistory).filter( |
|
4097 | 4151 | ChangesetCommentHistory.comment_id == comment_id).order_by(ChangesetCommentHistory.version.desc()) |
|
4098 | 4152 | if q.count() == 0: |
|
4099 | 4153 | return 1 |
|
4100 | 4154 | elif q.count() >= q[0].version: |
|
4101 | 4155 | return q.count() + 1 |
|
4102 | 4156 | else: |
|
4103 | 4157 | return q[0].version + 1 |
|
4104 | 4158 | |
|
4105 | 4159 | |
|
4106 | 4160 | class ChangesetStatus(Base, BaseModel): |
|
4107 | 4161 | __tablename__ = 'changeset_statuses' |
|
4108 | 4162 | __table_args__ = ( |
|
4109 | 4163 | Index('cs_revision_idx', 'revision'), |
|
4110 | 4164 | Index('cs_version_idx', 'version'), |
|
4111 | 4165 | UniqueConstraint('repo_id', 'revision', 'version'), |
|
4112 | 4166 | base_table_args |
|
4113 | 4167 | ) |
|
4114 | 4168 | |
|
4115 | 4169 | STATUS_NOT_REVIEWED = DEFAULT = 'not_reviewed' |
|
4116 | 4170 | STATUS_APPROVED = 'approved' |
|
4117 | 4171 | STATUS_REJECTED = 'rejected' |
|
4118 | 4172 | STATUS_UNDER_REVIEW = 'under_review' |
|
4119 | 4173 | |
|
4120 | 4174 | STATUSES = [ |
|
4121 | 4175 | (STATUS_NOT_REVIEWED, _("Not Reviewed")), # (no icon) and default |
|
4122 | 4176 | (STATUS_APPROVED, _("Approved")), |
|
4123 | 4177 | (STATUS_REJECTED, _("Rejected")), |
|
4124 | 4178 | (STATUS_UNDER_REVIEW, _("Under Review")), |
|
4125 | 4179 | ] |
|
4126 | 4180 | |
|
4127 | 4181 | changeset_status_id = Column('changeset_status_id', Integer(), nullable=False, primary_key=True) |
|
4128 | 4182 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False) |
|
4129 | 4183 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None) |
|
4130 | 4184 | revision = Column('revision', String(40), nullable=False) |
|
4131 | 4185 | status = Column('status', String(128), nullable=False, default=DEFAULT) |
|
4132 | 4186 | changeset_comment_id = Column('changeset_comment_id', Integer(), ForeignKey('changeset_comments.comment_id')) |
|
4133 | 4187 | modified_at = Column('modified_at', DateTime(), nullable=False, default=datetime.datetime.now) |
|
4134 | 4188 | version = Column('version', Integer(), nullable=False, default=0) |
|
4135 | 4189 | pull_request_id = Column("pull_request_id", Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=True) |
|
4136 | 4190 | |
|
4137 | 4191 | author = relationship('User', lazy='select') |
|
4138 | 4192 | repo = relationship('Repository', lazy='select') |
|
4139 | 4193 | comment = relationship('ChangesetComment', lazy='select', back_populates='status_change') |
|
4140 | 4194 | pull_request = relationship('PullRequest', lazy='select', back_populates='statuses') |
|
4141 | 4195 | |
|
4142 | 4196 | def __repr__(self): |
|
4143 | 4197 | return f"<{self.cls_name}('{self.status}[v{self.version}]:{self.author}')>" |
|
4144 | 4198 | |
|
4145 | 4199 | @classmethod |
|
4146 | 4200 | def get_status_lbl(cls, value): |
|
4147 | 4201 | return dict(cls.STATUSES).get(value) |
|
4148 | 4202 | |
|
4149 | 4203 | @property |
|
4150 | 4204 | def status_lbl(self): |
|
4151 | 4205 | return ChangesetStatus.get_status_lbl(self.status) |
|
4152 | 4206 | |
|
4153 | 4207 | def get_api_data(self): |
|
4154 | 4208 | status = self |
|
4155 | 4209 | data = { |
|
4156 | 4210 | 'status_id': status.changeset_status_id, |
|
4157 | 4211 | 'status': status.status, |
|
4158 | 4212 | } |
|
4159 | 4213 | return data |
|
4160 | 4214 | |
|
4161 | 4215 | def __json__(self): |
|
4162 | 4216 | data = dict() |
|
4163 | 4217 | data.update(self.get_api_data()) |
|
4164 | 4218 | return data |
|
4165 | 4219 | |
|
4166 | 4220 | |
|
4167 | 4221 | class _SetState(object): |
|
4168 | 4222 | """ |
|
4169 | 4223 | Context processor allowing changing state for sensitive operation such as |
|
4170 | 4224 | pull request update or merge |
|
4171 | 4225 | """ |
|
4172 | 4226 | |
|
4173 | 4227 | def __init__(self, pull_request, pr_state, back_state=None): |
|
4174 | 4228 | self._pr = pull_request |
|
4175 | 4229 | self._org_state = back_state or pull_request.pull_request_state |
|
4176 | 4230 | self._pr_state = pr_state |
|
4177 | 4231 | self._current_state = None |
|
4178 | 4232 | |
|
4179 | 4233 | def __enter__(self): |
|
4180 | 4234 | log.debug('StateLock: entering set state context of pr %s, setting state to: `%s`', |
|
4181 | 4235 | self._pr, self._pr_state) |
|
4182 | 4236 | self.set_pr_state(self._pr_state) |
|
4183 | 4237 | return self |
|
4184 | 4238 | |
|
4185 | 4239 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
4186 | 4240 | if exc_val is not None or exc_type is not None: |
|
4187 | 4241 | log.error(traceback.format_tb(exc_tb)) |
|
4188 | 4242 | return None |
|
4189 | 4243 | |
|
4190 | 4244 | self.set_pr_state(self._org_state) |
|
4191 | 4245 | log.debug('StateLock: exiting set state context of pr %s, setting state to: `%s`', |
|
4192 | 4246 | self._pr, self._org_state) |
|
4193 | 4247 | |
|
4194 | 4248 | @property |
|
4195 | 4249 | def state(self): |
|
4196 | 4250 | return self._current_state |
|
4197 | 4251 | |
|
4198 | 4252 | def set_pr_state(self, pr_state): |
|
4199 | 4253 | try: |
|
4200 | 4254 | self._pr.pull_request_state = pr_state |
|
4201 | 4255 | Session().add(self._pr) |
|
4202 | 4256 | Session().commit() |
|
4203 | 4257 | self._current_state = pr_state |
|
4204 | 4258 | except Exception: |
|
4205 | 4259 | log.exception('Failed to set PullRequest %s state to %s', self._pr, pr_state) |
|
4206 | 4260 | raise |
|
4207 | 4261 | |
|
4208 | 4262 | |
|
4209 | 4263 | class _PullRequestBase(BaseModel): |
|
4210 | 4264 | """ |
|
4211 | 4265 | Common attributes of pull request and version entries. |
|
4212 | 4266 | """ |
|
4213 | 4267 | |
|
4214 | 4268 | # .status values |
|
4215 | 4269 | STATUS_NEW = 'new' |
|
4216 | 4270 | STATUS_OPEN = 'open' |
|
4217 | 4271 | STATUS_CLOSED = 'closed' |
|
4218 | 4272 | |
|
4219 | 4273 | # available states |
|
4220 | 4274 | STATE_CREATING = 'creating' |
|
4221 | 4275 | STATE_UPDATING = 'updating' |
|
4222 | 4276 | STATE_MERGING = 'merging' |
|
4223 | 4277 | STATE_CREATED = 'created' |
|
4224 | 4278 | |
|
4225 | 4279 | title = Column('title', Unicode(255), nullable=True) |
|
4226 | 4280 | description = Column( |
|
4227 | 4281 | 'description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), |
|
4228 | 4282 | nullable=True) |
|
4229 | 4283 | description_renderer = Column('description_renderer', Unicode(64), nullable=True) |
|
4230 | 4284 | |
|
4231 | 4285 | # new/open/closed status of pull request (not approve/reject/etc) |
|
4232 | 4286 | status = Column('status', Unicode(255), nullable=False, default=STATUS_NEW) |
|
4233 | 4287 | created_on = Column( |
|
4234 | 4288 | 'created_on', DateTime(timezone=False), nullable=False, |
|
4235 | 4289 | default=datetime.datetime.now) |
|
4236 | 4290 | updated_on = Column( |
|
4237 | 4291 | 'updated_on', DateTime(timezone=False), nullable=False, |
|
4238 | 4292 | default=datetime.datetime.now) |
|
4239 | 4293 | |
|
4240 | 4294 | pull_request_state = Column("pull_request_state", String(255), nullable=True) |
|
4241 | 4295 | |
|
4242 | 4296 | @declared_attr |
|
4243 | 4297 | def user_id(cls): |
|
4244 | 4298 | return Column( |
|
4245 | 4299 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=False, |
|
4246 | 4300 | unique=None) |
|
4247 | 4301 | |
|
4248 | 4302 | # 500 revisions max |
|
4249 | 4303 | _revisions = Column( |
|
4250 | 4304 | 'revisions', UnicodeText().with_variant(UnicodeText(20500), 'mysql')) |
|
4251 | 4305 | |
|
4252 | 4306 | common_ancestor_id = Column('common_ancestor_id', Unicode(255), nullable=True) |
|
4253 | 4307 | |
|
4254 | 4308 | @declared_attr |
|
4255 | 4309 | def source_repo_id(cls): |
|
4256 | 4310 | # TODO: dan: rename column to source_repo_id |
|
4257 | 4311 | return Column( |
|
4258 | 4312 | 'org_repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
4259 | 4313 | nullable=False) |
|
4260 | 4314 | |
|
4261 | 4315 | @declared_attr |
|
4262 | 4316 | def pr_source(cls): |
|
4263 | 4317 | return relationship( |
|
4264 | 4318 | 'Repository', |
|
4265 | 4319 | primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id', |
|
4266 | 4320 | overlaps="pull_requests_source" |
|
4267 | 4321 | ) |
|
4268 | 4322 | |
|
4269 | 4323 | _source_ref = Column('org_ref', Unicode(255), nullable=False) |
|
4270 | 4324 | |
|
4271 | 4325 | @hybrid_property |
|
4272 | 4326 | def source_ref(self): |
|
4273 | 4327 | return self._source_ref |
|
4274 | 4328 | |
|
4275 | 4329 | @source_ref.setter |
|
4276 | 4330 | def source_ref(self, val): |
|
4277 | 4331 | parts = (val or '').split(':') |
|
4278 | 4332 | if len(parts) != 3: |
|
4279 | 4333 | raise ValueError( |
|
4280 | 4334 | 'Invalid reference format given: {}, expected X:Y:Z'.format(val)) |
|
4281 | 4335 | self._source_ref = safe_str(val) |
|
4282 | 4336 | |
|
4283 | 4337 | _target_ref = Column('other_ref', Unicode(255), nullable=False) |
|
4284 | 4338 | |
|
4285 | 4339 | @hybrid_property |
|
4286 | 4340 | def target_ref(self): |
|
4287 | 4341 | return self._target_ref |
|
4288 | 4342 | |
|
4289 | 4343 | @target_ref.setter |
|
4290 | 4344 | def target_ref(self, val): |
|
4291 | 4345 | parts = (val or '').split(':') |
|
4292 | 4346 | if len(parts) != 3: |
|
4293 | 4347 | raise ValueError( |
|
4294 | 4348 | 'Invalid reference format given: {}, expected X:Y:Z'.format(val)) |
|
4295 | 4349 | self._target_ref = safe_str(val) |
|
4296 | 4350 | |
|
4297 | 4351 | @declared_attr |
|
4298 | 4352 | def target_repo_id(cls): |
|
4299 | 4353 | # TODO: dan: rename column to target_repo_id |
|
4300 | 4354 | return Column( |
|
4301 | 4355 | 'other_repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
4302 | 4356 | nullable=False) |
|
4303 | 4357 | |
|
4304 | 4358 | @declared_attr |
|
4305 | 4359 | def pr_target(cls): |
|
4306 | 4360 | return relationship( |
|
4307 | 4361 | 'Repository', |
|
4308 | 4362 | primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id', |
|
4309 | 4363 | overlaps="pull_requests_target" |
|
4310 | 4364 | ) |
|
4311 | 4365 | |
|
4312 | 4366 | _shadow_merge_ref = Column('shadow_merge_ref', Unicode(255), nullable=True) |
|
4313 | 4367 | |
|
4314 | 4368 | # TODO: dan: rename column to last_merge_source_rev |
|
4315 | 4369 | _last_merge_source_rev = Column( |
|
4316 | 4370 | 'last_merge_org_rev', String(40), nullable=True) |
|
4317 | 4371 | # TODO: dan: rename column to last_merge_target_rev |
|
4318 | 4372 | _last_merge_target_rev = Column( |
|
4319 | 4373 | 'last_merge_other_rev', String(40), nullable=True) |
|
4320 | 4374 | _last_merge_status = Column('merge_status', Integer(), nullable=True) |
|
4321 | 4375 | last_merge_metadata = Column( |
|
4322 | 4376 | 'last_merge_metadata', MutationObj.as_mutable( |
|
4323 | 4377 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) |
|
4324 | 4378 | |
|
4325 | 4379 | merge_rev = Column('merge_rev', String(40), nullable=True) |
|
4326 | 4380 | |
|
4327 | 4381 | reviewer_data = Column( |
|
4328 | 4382 | 'reviewer_data_json', MutationObj.as_mutable( |
|
4329 | 4383 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) |
|
4330 | 4384 | |
|
4331 | 4385 | @property |
|
4332 | 4386 | def reviewer_data_json(self): |
|
4333 | 4387 | return str_json(self.reviewer_data) |
|
4334 | 4388 | |
|
4335 | 4389 | @property |
|
4336 | 4390 | def last_merge_metadata_parsed(self): |
|
4337 | 4391 | metadata = {} |
|
4338 | 4392 | if not self.last_merge_metadata: |
|
4339 | 4393 | return metadata |
|
4340 | 4394 | |
|
4341 | 4395 | if hasattr(self.last_merge_metadata, 'de_coerce'): |
|
4342 | 4396 | for k, v in self.last_merge_metadata.de_coerce().items(): |
|
4343 | 4397 | if k in ['target_ref', 'source_ref']: |
|
4344 | 4398 | metadata[k] = Reference(v['type'], v['name'], v['commit_id']) |
|
4345 | 4399 | else: |
|
4346 | 4400 | if hasattr(v, 'de_coerce'): |
|
4347 | 4401 | metadata[k] = v.de_coerce() |
|
4348 | 4402 | else: |
|
4349 | 4403 | metadata[k] = v |
|
4350 | 4404 | return metadata |
|
4351 | 4405 | |
|
4352 | 4406 | @property |
|
4353 | 4407 | def work_in_progress(self): |
|
4354 | 4408 | """checks if pull request is work in progress by checking the title""" |
|
4355 | 4409 | title = self.title.upper() |
|
4356 | 4410 | if re.match(r'^(\[WIP\]\s*|WIP:\s*|WIP\s+)', title): |
|
4357 | 4411 | return True |
|
4358 | 4412 | return False |
|
4359 | 4413 | |
|
4360 | 4414 | @property |
|
4361 | 4415 | def title_safe(self): |
|
4362 | 4416 | return self.title\ |
|
4363 | 4417 | .replace('{', '{{')\ |
|
4364 | 4418 | .replace('}', '}}') |
|
4365 | 4419 | |
|
4366 | 4420 | @hybrid_property |
|
4367 | 4421 | def description_safe(self): |
|
4368 | 4422 | from rhodecode.lib import helpers as h |
|
4369 | 4423 | return h.escape(self.description) |
|
4370 | 4424 | |
|
4371 | 4425 | @hybrid_property |
|
4372 | 4426 | def revisions(self): |
|
4373 | 4427 | return self._revisions.split(':') if self._revisions else [] |
|
4374 | 4428 | |
|
4375 | 4429 | @revisions.setter |
|
4376 | 4430 | def revisions(self, val): |
|
4377 | 4431 | self._revisions = ':'.join(val) |
|
4378 | 4432 | |
|
4379 | 4433 | @hybrid_property |
|
4380 | 4434 | def last_merge_status(self): |
|
4381 | 4435 | return safe_int(self._last_merge_status) |
|
4382 | 4436 | |
|
4383 | 4437 | @last_merge_status.setter |
|
4384 | 4438 | def last_merge_status(self, val): |
|
4385 | 4439 | self._last_merge_status = val |
|
4386 | 4440 | |
|
4387 | 4441 | @declared_attr |
|
4388 | 4442 | def author(cls): |
|
4389 | 4443 | return relationship( |
|
4390 | 4444 | 'User', lazy='joined', |
|
4391 | 4445 | #TODO, problem that is somehow :? |
|
4392 | 4446 | #back_populates='user_pull_requests' |
|
4393 | 4447 | ) |
|
4394 | 4448 | |
|
4395 | 4449 | @declared_attr |
|
4396 | 4450 | def source_repo(cls): |
|
4397 | 4451 | return relationship( |
|
4398 | 4452 | 'Repository', |
|
4399 | 4453 | primaryjoin=f'{cls.__name__}.source_repo_id==Repository.repo_id', |
|
4400 | 4454 | overlaps="pr_source" |
|
4401 | 4455 | ) |
|
4402 | 4456 | |
|
4403 | 4457 | @property |
|
4404 | 4458 | def source_ref_parts(self): |
|
4405 | 4459 | return self.unicode_to_reference(self.source_ref) |
|
4406 | 4460 | |
|
4407 | 4461 | @declared_attr |
|
4408 | 4462 | def target_repo(cls): |
|
4409 | 4463 | return relationship( |
|
4410 | 4464 | 'Repository', |
|
4411 | 4465 | primaryjoin=f'{cls.__name__}.target_repo_id==Repository.repo_id', |
|
4412 | 4466 | overlaps="pr_target" |
|
4413 | 4467 | ) |
|
4414 | 4468 | |
|
4415 | 4469 | @property |
|
4416 | 4470 | def target_ref_parts(self): |
|
4417 | 4471 | return self.unicode_to_reference(self.target_ref) |
|
4418 | 4472 | |
|
4419 | 4473 | @property |
|
4420 | 4474 | def shadow_merge_ref(self): |
|
4421 | 4475 | return self.unicode_to_reference(self._shadow_merge_ref) |
|
4422 | 4476 | |
|
4423 | 4477 | @shadow_merge_ref.setter |
|
4424 | 4478 | def shadow_merge_ref(self, ref): |
|
4425 | 4479 | self._shadow_merge_ref = self.reference_to_unicode(ref) |
|
4426 | 4480 | |
|
4427 | 4481 | @staticmethod |
|
4428 | 4482 | def unicode_to_reference(raw): |
|
4429 | 4483 | return unicode_to_reference(raw) |
|
4430 | 4484 | |
|
4431 | 4485 | @staticmethod |
|
4432 | 4486 | def reference_to_unicode(ref): |
|
4433 | 4487 | return reference_to_unicode(ref) |
|
4434 | 4488 | |
|
4435 | 4489 | def get_api_data(self, with_merge_state=True): |
|
4436 | 4490 | from rhodecode.model.pull_request import PullRequestModel |
|
4437 | 4491 | |
|
4438 | 4492 | pull_request = self |
|
4439 | 4493 | if with_merge_state: |
|
4440 | 4494 | merge_response, merge_status, msg = \ |
|
4441 | 4495 | PullRequestModel().merge_status(pull_request) |
|
4442 | 4496 | merge_state = { |
|
4443 | 4497 | 'status': merge_status, |
|
4444 | 4498 | 'message': safe_str(msg), |
|
4445 | 4499 | } |
|
4446 | 4500 | else: |
|
4447 | 4501 | merge_state = {'status': 'not_available', |
|
4448 | 4502 | 'message': 'not_available'} |
|
4449 | 4503 | |
|
4450 | 4504 | merge_data = { |
|
4451 | 4505 | 'clone_url': PullRequestModel().get_shadow_clone_url(pull_request), |
|
4452 | 4506 | 'reference': ( |
|
4453 | 4507 | pull_request.shadow_merge_ref.asdict() |
|
4454 | 4508 | if pull_request.shadow_merge_ref else None), |
|
4455 | 4509 | } |
|
4456 | 4510 | |
|
4457 | 4511 | data = { |
|
4458 | 4512 | 'pull_request_id': pull_request.pull_request_id, |
|
4459 | 4513 | 'url': PullRequestModel().get_url(pull_request), |
|
4460 | 4514 | 'title': pull_request.title, |
|
4461 | 4515 | 'description': pull_request.description, |
|
4462 | 4516 | 'status': pull_request.status, |
|
4463 | 4517 | 'state': pull_request.pull_request_state, |
|
4464 | 4518 | 'created_on': pull_request.created_on, |
|
4465 | 4519 | 'updated_on': pull_request.updated_on, |
|
4466 | 4520 | 'commit_ids': pull_request.revisions, |
|
4467 | 4521 | 'review_status': pull_request.calculated_review_status(), |
|
4468 | 4522 | 'mergeable': merge_state, |
|
4469 | 4523 | 'source': { |
|
4470 | 4524 | 'clone_url': pull_request.source_repo.clone_url(), |
|
4471 | 4525 | 'repository': pull_request.source_repo.repo_name, |
|
4472 | 4526 | 'reference': { |
|
4473 | 4527 | 'name': pull_request.source_ref_parts.name, |
|
4474 | 4528 | 'type': pull_request.source_ref_parts.type, |
|
4475 | 4529 | 'commit_id': pull_request.source_ref_parts.commit_id, |
|
4476 | 4530 | }, |
|
4477 | 4531 | }, |
|
4478 | 4532 | 'target': { |
|
4479 | 4533 | 'clone_url': pull_request.target_repo.clone_url(), |
|
4480 | 4534 | 'repository': pull_request.target_repo.repo_name, |
|
4481 | 4535 | 'reference': { |
|
4482 | 4536 | 'name': pull_request.target_ref_parts.name, |
|
4483 | 4537 | 'type': pull_request.target_ref_parts.type, |
|
4484 | 4538 | 'commit_id': pull_request.target_ref_parts.commit_id, |
|
4485 | 4539 | }, |
|
4486 | 4540 | }, |
|
4487 | 4541 | 'merge': merge_data, |
|
4488 | 4542 | 'author': pull_request.author.get_api_data(include_secrets=False, |
|
4489 | 4543 | details='basic'), |
|
4490 | 4544 | 'reviewers': [ |
|
4491 | 4545 | { |
|
4492 | 4546 | 'user': reviewer.get_api_data(include_secrets=False, |
|
4493 | 4547 | details='basic'), |
|
4494 | 4548 | 'reasons': reasons, |
|
4495 | 4549 | 'review_status': st[0][1].status if st else 'not_reviewed', |
|
4496 | 4550 | } |
|
4497 | 4551 | for obj, reviewer, reasons, mandatory, st in |
|
4498 | 4552 | pull_request.reviewers_statuses() |
|
4499 | 4553 | ] |
|
4500 | 4554 | } |
|
4501 | 4555 | |
|
4502 | 4556 | return data |
|
4503 | 4557 | |
|
4504 | 4558 | def set_state(self, pull_request_state, final_state=None): |
|
4505 | 4559 | """ |
|
4506 | 4560 | # goes from initial state to updating to initial state. |
|
4507 | 4561 | # initial state can be changed by specifying back_state= |
|
4508 | 4562 | with pull_request_obj.set_state(PullRequest.STATE_UPDATING): |
|
4509 | 4563 | pull_request.merge() |
|
4510 | 4564 | |
|
4511 | 4565 | :param pull_request_state: |
|
4512 | 4566 | :param final_state: |
|
4513 | 4567 | |
|
4514 | 4568 | """ |
|
4515 | 4569 | |
|
4516 | 4570 | return _SetState(self, pull_request_state, back_state=final_state) |
|
4517 | 4571 | |
|
4518 | 4572 | |
|
4519 | 4573 | class PullRequest(Base, _PullRequestBase): |
|
4520 | 4574 | __tablename__ = 'pull_requests' |
|
4521 | 4575 | __table_args__ = ( |
|
4522 | 4576 | base_table_args, |
|
4523 | 4577 | ) |
|
4524 | 4578 | LATEST_VER = 'latest' |
|
4525 | 4579 | |
|
4526 | 4580 | pull_request_id = Column( |
|
4527 | 4581 | 'pull_request_id', Integer(), nullable=False, primary_key=True) |
|
4528 | 4582 | |
|
4529 | 4583 | def __repr__(self): |
|
4530 | 4584 | if self.pull_request_id: |
|
4531 | 4585 | return f'<DB:PullRequest #{self.pull_request_id}>' |
|
4532 | 4586 | else: |
|
4533 | 4587 | return f'<DB:PullRequest at {id(self)!r}>' |
|
4534 | 4588 | |
|
4535 | 4589 | reviewers = relationship('PullRequestReviewers', cascade="all, delete-orphan", back_populates='pull_request') |
|
4536 | 4590 | statuses = relationship('ChangesetStatus', cascade="all, delete-orphan", back_populates='pull_request') |
|
4537 | 4591 | comments = relationship('ChangesetComment', cascade="all, delete-orphan", back_populates='pull_request') |
|
4538 | 4592 | versions = relationship('PullRequestVersion', cascade="all, delete-orphan", lazy='dynamic', back_populates='pull_request') |
|
4539 | 4593 | |
|
4540 | 4594 | @classmethod |
|
4541 | 4595 | def get_pr_display_object(cls, pull_request_obj, org_pull_request_obj, |
|
4542 | 4596 | internal_methods=None): |
|
4543 | 4597 | |
|
4544 | 4598 | class PullRequestDisplay(object): |
|
4545 | 4599 | """ |
|
4546 | 4600 | Special object wrapper for showing PullRequest data via Versions |
|
4547 | 4601 | It mimics PR object as close as possible. This is read only object |
|
4548 | 4602 | just for display |
|
4549 | 4603 | """ |
|
4550 | 4604 | |
|
4551 | 4605 | def __init__(self, attrs, internal=None): |
|
4552 | 4606 | self.attrs = attrs |
|
4553 | 4607 | # internal have priority over the given ones via attrs |
|
4554 | 4608 | self.internal = internal or ['versions'] |
|
4555 | 4609 | |
|
4556 | 4610 | def __getattr__(self, item): |
|
4557 | 4611 | if item in self.internal: |
|
4558 | 4612 | return getattr(self, item) |
|
4559 | 4613 | try: |
|
4560 | 4614 | return self.attrs[item] |
|
4561 | 4615 | except KeyError: |
|
4562 | 4616 | raise AttributeError( |
|
4563 | 4617 | '%s object has no attribute %s' % (self, item)) |
|
4564 | 4618 | |
|
4565 | 4619 | def __repr__(self): |
|
4566 | 4620 | pr_id = self.attrs.get('pull_request_id') |
|
4567 | 4621 | return f'<DB:PullRequestDisplay #{pr_id}>' |
|
4568 | 4622 | |
|
4569 | 4623 | def versions(self): |
|
4570 | 4624 | return pull_request_obj.versions.order_by( |
|
4571 | 4625 | PullRequestVersion.pull_request_version_id).all() |
|
4572 | 4626 | |
|
4573 | 4627 | def is_closed(self): |
|
4574 | 4628 | return pull_request_obj.is_closed() |
|
4575 | 4629 | |
|
4576 | 4630 | def is_state_changing(self): |
|
4577 | 4631 | return pull_request_obj.is_state_changing() |
|
4578 | 4632 | |
|
4579 | 4633 | @property |
|
4580 | 4634 | def pull_request_version_id(self): |
|
4581 | 4635 | return getattr(pull_request_obj, 'pull_request_version_id', None) |
|
4582 | 4636 | |
|
4583 | 4637 | @property |
|
4584 | 4638 | def pull_request_last_version(self): |
|
4585 | 4639 | return pull_request_obj.pull_request_last_version |
|
4586 | 4640 | |
|
4587 | 4641 | attrs = StrictAttributeDict(pull_request_obj.get_api_data(with_merge_state=False)) |
|
4588 | 4642 | |
|
4589 | 4643 | attrs.author = StrictAttributeDict( |
|
4590 | 4644 | pull_request_obj.author.get_api_data()) |
|
4591 | 4645 | if pull_request_obj.target_repo: |
|
4592 | 4646 | attrs.target_repo = StrictAttributeDict( |
|
4593 | 4647 | pull_request_obj.target_repo.get_api_data()) |
|
4594 | 4648 | attrs.target_repo.clone_url = pull_request_obj.target_repo.clone_url |
|
4595 | 4649 | |
|
4596 | 4650 | if pull_request_obj.source_repo: |
|
4597 | 4651 | attrs.source_repo = StrictAttributeDict( |
|
4598 | 4652 | pull_request_obj.source_repo.get_api_data()) |
|
4599 | 4653 | attrs.source_repo.clone_url = pull_request_obj.source_repo.clone_url |
|
4600 | 4654 | |
|
4601 | 4655 | attrs.source_ref_parts = pull_request_obj.source_ref_parts |
|
4602 | 4656 | attrs.target_ref_parts = pull_request_obj.target_ref_parts |
|
4603 | 4657 | attrs.revisions = pull_request_obj.revisions |
|
4604 | 4658 | attrs.common_ancestor_id = pull_request_obj.common_ancestor_id |
|
4605 | 4659 | attrs.shadow_merge_ref = org_pull_request_obj.shadow_merge_ref |
|
4606 | 4660 | attrs.reviewer_data = org_pull_request_obj.reviewer_data |
|
4607 | 4661 | attrs.reviewer_data_json = org_pull_request_obj.reviewer_data_json |
|
4608 | 4662 | |
|
4609 | 4663 | return PullRequestDisplay(attrs, internal=internal_methods) |
|
4610 | 4664 | |
|
4611 | 4665 | def is_closed(self): |
|
4612 | 4666 | return self.status == self.STATUS_CLOSED |
|
4613 | 4667 | |
|
4614 | 4668 | def is_state_changing(self): |
|
4615 | 4669 | return self.pull_request_state != PullRequest.STATE_CREATED |
|
4616 | 4670 | |
|
4617 | 4671 | def __json__(self): |
|
4618 | 4672 | return { |
|
4619 | 4673 | 'revisions': self.revisions, |
|
4620 | 4674 | 'versions': self.versions_count |
|
4621 | 4675 | } |
|
4622 | 4676 | |
|
4623 | 4677 | def calculated_review_status(self): |
|
4624 | 4678 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
4625 | 4679 | return ChangesetStatusModel().calculated_review_status(self) |
|
4626 | 4680 | |
|
4627 | 4681 | def reviewers_statuses(self, user=None): |
|
4628 | 4682 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
4629 | 4683 | return ChangesetStatusModel().reviewers_statuses(self, user=user) |
|
4630 | 4684 | |
|
4631 | 4685 | def get_pull_request_reviewers(self, role=None): |
|
4632 | 4686 | qry = PullRequestReviewers.query()\ |
|
4633 | 4687 | .filter(PullRequestReviewers.pull_request_id == self.pull_request_id) |
|
4634 | 4688 | if role: |
|
4635 | 4689 | qry = qry.filter(PullRequestReviewers.role == role) |
|
4636 | 4690 | |
|
4637 | 4691 | return qry.all() |
|
4638 | 4692 | |
|
4639 | 4693 | @property |
|
4640 | 4694 | def reviewers_count(self): |
|
4641 | 4695 | qry = PullRequestReviewers.query()\ |
|
4642 | 4696 | .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\ |
|
4643 | 4697 | .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_REVIEWER) |
|
4644 | 4698 | return qry.count() |
|
4645 | 4699 | |
|
4646 | 4700 | @property |
|
4647 | 4701 | def observers_count(self): |
|
4648 | 4702 | qry = PullRequestReviewers.query()\ |
|
4649 | 4703 | .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\ |
|
4650 | 4704 | .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER) |
|
4651 | 4705 | return qry.count() |
|
4652 | 4706 | |
|
4653 | 4707 | def observers(self): |
|
4654 | 4708 | qry = PullRequestReviewers.query()\ |
|
4655 | 4709 | .filter(PullRequestReviewers.pull_request_id == self.pull_request_id)\ |
|
4656 | 4710 | .filter(PullRequestReviewers.role == PullRequestReviewers.ROLE_OBSERVER)\ |
|
4657 | 4711 | .all() |
|
4658 | 4712 | |
|
4659 | 4713 | for entry in qry: |
|
4660 | 4714 | yield entry, entry.user |
|
4661 | 4715 | |
|
4662 | 4716 | @property |
|
4663 | 4717 | def workspace_id(self): |
|
4664 | 4718 | from rhodecode.model.pull_request import PullRequestModel |
|
4665 | 4719 | return PullRequestModel()._workspace_id(self) |
|
4666 | 4720 | |
|
4667 | 4721 | def get_shadow_repo(self): |
|
4668 | 4722 | workspace_id = self.workspace_id |
|
4669 | 4723 | shadow_repository_path = self.target_repo.get_shadow_repository_path(workspace_id) |
|
4670 | 4724 | if os.path.isdir(shadow_repository_path): |
|
4671 | 4725 | vcs_obj = self.target_repo.scm_instance() |
|
4672 | 4726 | return vcs_obj.get_shadow_instance(shadow_repository_path) |
|
4673 | 4727 | |
|
4674 | 4728 | @property |
|
4675 | 4729 | def versions_count(self): |
|
4676 | 4730 | """ |
|
4677 | 4731 | return number of versions this PR have, e.g a PR that once been |
|
4678 | 4732 | updated will have 2 versions |
|
4679 | 4733 | """ |
|
4680 | 4734 | return self.versions.count() + 1 |
|
4681 | 4735 | |
|
4682 | 4736 | @property |
|
4683 | 4737 | def pull_request_last_version(self): |
|
4684 | 4738 | return self.versions_count |
|
4685 | 4739 | |
|
4686 | 4740 | |
|
4687 | 4741 | class PullRequestVersion(Base, _PullRequestBase): |
|
4688 | 4742 | __tablename__ = 'pull_request_versions' |
|
4689 | 4743 | __table_args__ = ( |
|
4690 | 4744 | base_table_args, |
|
4691 | 4745 | ) |
|
4692 | 4746 | |
|
4693 | 4747 | pull_request_version_id = Column('pull_request_version_id', Integer(), nullable=False, primary_key=True) |
|
4694 | 4748 | pull_request_id = Column('pull_request_id', Integer(), ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
4695 | 4749 | pull_request = relationship('PullRequest', back_populates='versions') |
|
4696 | 4750 | |
|
4697 | 4751 | def __repr__(self): |
|
4698 | 4752 | if self.pull_request_version_id: |
|
4699 | 4753 | return f'<DB:PullRequestVersion #{self.pull_request_version_id}>' |
|
4700 | 4754 | else: |
|
4701 | 4755 | return f'<DB:PullRequestVersion at {id(self)!r}>' |
|
4702 | 4756 | |
|
4703 | 4757 | @property |
|
4704 | 4758 | def reviewers(self): |
|
4705 | 4759 | return self.pull_request.reviewers |
|
4706 | 4760 | |
|
4707 | 4761 | @property |
|
4708 | 4762 | def versions(self): |
|
4709 | 4763 | return self.pull_request.versions |
|
4710 | 4764 | |
|
4711 | 4765 | def is_closed(self): |
|
4712 | 4766 | # calculate from original |
|
4713 | 4767 | return self.pull_request.status == self.STATUS_CLOSED |
|
4714 | 4768 | |
|
4715 | 4769 | def is_state_changing(self): |
|
4716 | 4770 | return self.pull_request.pull_request_state != PullRequest.STATE_CREATED |
|
4717 | 4771 | |
|
4718 | 4772 | def calculated_review_status(self): |
|
4719 | 4773 | return self.pull_request.calculated_review_status() |
|
4720 | 4774 | |
|
4721 | 4775 | def reviewers_statuses(self): |
|
4722 | 4776 | return self.pull_request.reviewers_statuses() |
|
4723 | 4777 | |
|
4724 | 4778 | def observers(self): |
|
4725 | 4779 | return self.pull_request.observers() |
|
4726 | 4780 | |
|
4727 | 4781 | |
|
4728 | 4782 | class PullRequestReviewers(Base, BaseModel): |
|
4729 | 4783 | __tablename__ = 'pull_request_reviewers' |
|
4730 | 4784 | __table_args__ = ( |
|
4731 | 4785 | base_table_args, |
|
4732 | 4786 | ) |
|
4733 | 4787 | ROLE_REVIEWER = 'reviewer' |
|
4734 | 4788 | ROLE_OBSERVER = 'observer' |
|
4735 | 4789 | ROLES = [ROLE_REVIEWER, ROLE_OBSERVER] |
|
4736 | 4790 | |
|
4737 | 4791 | @hybrid_property |
|
4738 | 4792 | def reasons(self): |
|
4739 | 4793 | if not self._reasons: |
|
4740 | 4794 | return [] |
|
4741 | 4795 | return self._reasons |
|
4742 | 4796 | |
|
4743 | 4797 | @reasons.setter |
|
4744 | 4798 | def reasons(self, val): |
|
4745 | 4799 | val = val or [] |
|
4746 | 4800 | if any(not isinstance(x, str) for x in val): |
|
4747 | 4801 | raise Exception('invalid reasons type, must be list of strings') |
|
4748 | 4802 | self._reasons = val |
|
4749 | 4803 | |
|
4750 | 4804 | pull_requests_reviewers_id = Column( |
|
4751 | 4805 | 'pull_requests_reviewers_id', Integer(), nullable=False, |
|
4752 | 4806 | primary_key=True) |
|
4753 | 4807 | pull_request_id = Column( |
|
4754 | 4808 | "pull_request_id", Integer(), |
|
4755 | 4809 | ForeignKey('pull_requests.pull_request_id'), nullable=False) |
|
4756 | 4810 | user_id = Column( |
|
4757 | 4811 | "user_id", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
4758 | 4812 | _reasons = Column( |
|
4759 | 4813 | 'reason', MutationList.as_mutable( |
|
4760 | 4814 | JsonType('list', dialect_map=dict(mysql=UnicodeText(16384))))) |
|
4761 | 4815 | |
|
4762 | 4816 | mandatory = Column("mandatory", Boolean(), nullable=False, default=False) |
|
4763 | 4817 | role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER) |
|
4764 | 4818 | |
|
4765 | 4819 | user = relationship('User') |
|
4766 | 4820 | pull_request = relationship('PullRequest', back_populates='reviewers') |
|
4767 | 4821 | |
|
4768 | 4822 | rule_data = Column( |
|
4769 | 4823 | 'rule_data_json', |
|
4770 | 4824 | JsonType(dialect_map=dict(mysql=UnicodeText(16384)))) |
|
4771 | 4825 | |
|
4772 | 4826 | def rule_user_group_data(self): |
|
4773 | 4827 | """ |
|
4774 | 4828 | Returns the voting user group rule data for this reviewer |
|
4775 | 4829 | """ |
|
4776 | 4830 | |
|
4777 | 4831 | if self.rule_data and 'vote_rule' in self.rule_data: |
|
4778 | 4832 | user_group_data = {} |
|
4779 | 4833 | if 'rule_user_group_entry_id' in self.rule_data: |
|
4780 | 4834 | # means a group with voting rules ! |
|
4781 | 4835 | user_group_data['id'] = self.rule_data['rule_user_group_entry_id'] |
|
4782 | 4836 | user_group_data['name'] = self.rule_data['rule_name'] |
|
4783 | 4837 | user_group_data['vote_rule'] = self.rule_data['vote_rule'] |
|
4784 | 4838 | |
|
4785 | 4839 | return user_group_data |
|
4786 | 4840 | |
|
4787 | 4841 | @classmethod |
|
4788 | 4842 | def get_pull_request_reviewers(cls, pull_request_id, role=None): |
|
4789 | 4843 | qry = PullRequestReviewers.query()\ |
|
4790 | 4844 | .filter(PullRequestReviewers.pull_request_id == pull_request_id) |
|
4791 | 4845 | if role: |
|
4792 | 4846 | qry = qry.filter(PullRequestReviewers.role == role) |
|
4793 | 4847 | |
|
4794 | 4848 | return qry.all() |
|
4795 | 4849 | |
|
4796 | 4850 | def __repr__(self): |
|
4797 | 4851 | return f"<{self.cls_name}('id:{self.pull_requests_reviewers_id}')>" |
|
4798 | 4852 | |
|
4799 | 4853 | |
|
4800 | 4854 | class Notification(Base, BaseModel): |
|
4801 | 4855 | __tablename__ = 'notifications' |
|
4802 | 4856 | __table_args__ = ( |
|
4803 | 4857 | Index('notification_type_idx', 'type'), |
|
4804 | 4858 | base_table_args, |
|
4805 | 4859 | ) |
|
4806 | 4860 | |
|
4807 | 4861 | TYPE_CHANGESET_COMMENT = 'cs_comment' |
|
4808 | 4862 | TYPE_MESSAGE = 'message' |
|
4809 | 4863 | TYPE_MENTION = 'mention' |
|
4810 | 4864 | TYPE_REGISTRATION = 'registration' |
|
4811 | 4865 | TYPE_PULL_REQUEST = 'pull_request' |
|
4812 | 4866 | TYPE_PULL_REQUEST_COMMENT = 'pull_request_comment' |
|
4813 | 4867 | TYPE_PULL_REQUEST_UPDATE = 'pull_request_update' |
|
4814 | 4868 | |
|
4815 | 4869 | notification_id = Column('notification_id', Integer(), nullable=False, primary_key=True) |
|
4816 | 4870 | subject = Column('subject', Unicode(512), nullable=True) |
|
4817 | 4871 | body = Column('body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) |
|
4818 | 4872 | created_by = Column("created_by", Integer(), ForeignKey('users.user_id'), nullable=True) |
|
4819 | 4873 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
4820 | 4874 | type_ = Column('type', Unicode(255)) |
|
4821 | 4875 | |
|
4822 | 4876 | created_by_user = relationship('User', back_populates='user_created_notifications') |
|
4823 | 4877 | notifications_to_users = relationship('UserNotification', lazy='joined', cascade="all, delete-orphan", back_populates='notification') |
|
4824 | 4878 | |
|
4825 | 4879 | @property |
|
4826 | 4880 | def recipients(self): |
|
4827 | 4881 | return [x.user for x in UserNotification.query()\ |
|
4828 | 4882 | .filter(UserNotification.notification == self)\ |
|
4829 | 4883 | .order_by(UserNotification.user_id.asc()).all()] |
|
4830 | 4884 | |
|
4831 | 4885 | @classmethod |
|
4832 | 4886 | def create(cls, created_by, subject, body, recipients, type_=None): |
|
4833 | 4887 | if type_ is None: |
|
4834 | 4888 | type_ = Notification.TYPE_MESSAGE |
|
4835 | 4889 | |
|
4836 | 4890 | notification = cls() |
|
4837 | 4891 | notification.created_by_user = created_by |
|
4838 | 4892 | notification.subject = subject |
|
4839 | 4893 | notification.body = body |
|
4840 | 4894 | notification.type_ = type_ |
|
4841 | 4895 | notification.created_on = datetime.datetime.now() |
|
4842 | 4896 | |
|
4843 | 4897 | # For each recipient link the created notification to his account |
|
4844 | 4898 | for u in recipients: |
|
4845 | 4899 | assoc = UserNotification() |
|
4846 | 4900 | assoc.user_id = u.user_id |
|
4847 | 4901 | assoc.notification = notification |
|
4848 | 4902 | |
|
4849 | 4903 | # if created_by is inside recipients mark his notification |
|
4850 | 4904 | # as read |
|
4851 | 4905 | if u.user_id == created_by.user_id: |
|
4852 | 4906 | assoc.read = True |
|
4853 | 4907 | Session().add(assoc) |
|
4854 | 4908 | |
|
4855 | 4909 | Session().add(notification) |
|
4856 | 4910 | |
|
4857 | 4911 | return notification |
|
4858 | 4912 | |
|
4859 | 4913 | |
|
4860 | 4914 | class UserNotification(Base, BaseModel): |
|
4861 | 4915 | __tablename__ = 'user_to_notification' |
|
4862 | 4916 | __table_args__ = ( |
|
4863 | 4917 | UniqueConstraint('user_id', 'notification_id'), |
|
4864 | 4918 | base_table_args |
|
4865 | 4919 | ) |
|
4866 | 4920 | |
|
4867 | 4921 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
4868 | 4922 | notification_id = Column("notification_id", Integer(), ForeignKey('notifications.notification_id'), primary_key=True) |
|
4869 | 4923 | read = Column('read', Boolean, default=False) |
|
4870 | 4924 | sent_on = Column('sent_on', DateTime(timezone=False), nullable=True, unique=None) |
|
4871 | 4925 | |
|
4872 | 4926 | user = relationship('User', lazy="joined", back_populates='notifications') |
|
4873 | 4927 | notification = relationship('Notification', lazy="joined", order_by=lambda: Notification.created_on.desc(), back_populates='notifications_to_users') |
|
4874 | 4928 | |
|
4875 | 4929 | def mark_as_read(self): |
|
4876 | 4930 | self.read = True |
|
4877 | 4931 | Session().add(self) |
|
4878 | 4932 | |
|
4879 | 4933 | |
|
4880 | 4934 | class UserNotice(Base, BaseModel): |
|
4881 | 4935 | __tablename__ = 'user_notices' |
|
4882 | 4936 | __table_args__ = ( |
|
4883 | 4937 | base_table_args |
|
4884 | 4938 | ) |
|
4885 | 4939 | |
|
4886 | 4940 | NOTIFICATION_TYPE_MESSAGE = 'message' |
|
4887 | 4941 | NOTIFICATION_TYPE_NOTICE = 'notice' |
|
4888 | 4942 | |
|
4889 | 4943 | NOTIFICATION_LEVEL_INFO = 'info' |
|
4890 | 4944 | NOTIFICATION_LEVEL_WARNING = 'warning' |
|
4891 | 4945 | NOTIFICATION_LEVEL_ERROR = 'error' |
|
4892 | 4946 | |
|
4893 | 4947 | user_notice_id = Column('gist_id', Integer(), primary_key=True) |
|
4894 | 4948 | |
|
4895 | 4949 | notice_subject = Column('notice_subject', Unicode(512), nullable=True) |
|
4896 | 4950 | notice_body = Column('notice_body', UnicodeText().with_variant(UnicodeText(50000), 'mysql'), nullable=True) |
|
4897 | 4951 | |
|
4898 | 4952 | notice_read = Column('notice_read', Boolean, default=False) |
|
4899 | 4953 | |
|
4900 | 4954 | notification_level = Column('notification_level', String(1024), default=NOTIFICATION_LEVEL_INFO) |
|
4901 | 4955 | notification_type = Column('notification_type', String(1024), default=NOTIFICATION_TYPE_NOTICE) |
|
4902 | 4956 | |
|
4903 | 4957 | notice_created_by = Column('notice_created_by', Integer(), ForeignKey('users.user_id'), nullable=True) |
|
4904 | 4958 | notice_created_on = Column('notice_created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
4905 | 4959 | |
|
4906 | 4960 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id')) |
|
4907 | 4961 | user = relationship('User', lazy="joined", primaryjoin='User.user_id==UserNotice.user_id') |
|
4908 | 4962 | |
|
4909 | 4963 | @classmethod |
|
4910 | 4964 | def create_for_user(cls, user, subject, body, notice_level=NOTIFICATION_LEVEL_INFO, allow_duplicate=False): |
|
4911 | 4965 | |
|
4912 | 4966 | if notice_level not in [cls.NOTIFICATION_LEVEL_ERROR, |
|
4913 | 4967 | cls.NOTIFICATION_LEVEL_WARNING, |
|
4914 | 4968 | cls.NOTIFICATION_LEVEL_INFO]: |
|
4915 | 4969 | return |
|
4916 | 4970 | |
|
4917 | 4971 | from rhodecode.model.user import UserModel |
|
4918 | 4972 | user = UserModel().get_user(user) |
|
4919 | 4973 | |
|
4920 | 4974 | new_notice = UserNotice() |
|
4921 | 4975 | if not allow_duplicate: |
|
4922 | 4976 | existing_msg = UserNotice().query() \ |
|
4923 | 4977 | .filter(UserNotice.user == user) \ |
|
4924 | 4978 | .filter(UserNotice.notice_body == body) \ |
|
4925 | 4979 | .filter(UserNotice.notice_read == false()) \ |
|
4926 | 4980 | .scalar() |
|
4927 | 4981 | if existing_msg: |
|
4928 | 4982 | log.warning('Ignoring duplicate notice for user %s', user) |
|
4929 | 4983 | return |
|
4930 | 4984 | |
|
4931 | 4985 | new_notice.user = user |
|
4932 | 4986 | new_notice.notice_subject = subject |
|
4933 | 4987 | new_notice.notice_body = body |
|
4934 | 4988 | new_notice.notification_level = notice_level |
|
4935 | 4989 | Session().add(new_notice) |
|
4936 | 4990 | Session().commit() |
|
4937 | 4991 | |
|
4938 | 4992 | |
|
4939 | 4993 | class Gist(Base, BaseModel): |
|
4940 | 4994 | __tablename__ = 'gists' |
|
4941 | 4995 | __table_args__ = ( |
|
4942 | 4996 | Index('g_gist_access_id_idx', 'gist_access_id'), |
|
4943 | 4997 | Index('g_created_on_idx', 'created_on'), |
|
4944 | 4998 | base_table_args |
|
4945 | 4999 | ) |
|
4946 | 5000 | |
|
4947 | 5001 | GIST_PUBLIC = 'public' |
|
4948 | 5002 | GIST_PRIVATE = 'private' |
|
4949 | 5003 | DEFAULT_FILENAME = 'gistfile1.txt' |
|
4950 | 5004 | |
|
4951 | 5005 | ACL_LEVEL_PUBLIC = 'acl_public' |
|
4952 | 5006 | ACL_LEVEL_PRIVATE = 'acl_private' |
|
4953 | 5007 | |
|
4954 | 5008 | gist_id = Column('gist_id', Integer(), primary_key=True) |
|
4955 | 5009 | gist_access_id = Column('gist_access_id', Unicode(250)) |
|
4956 | 5010 | gist_description = Column('gist_description', UnicodeText().with_variant(UnicodeText(1024), 'mysql')) |
|
4957 | 5011 | gist_owner = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=True) |
|
4958 | 5012 | gist_expires = Column('gist_expires', Float(53), nullable=False) |
|
4959 | 5013 | gist_type = Column('gist_type', Unicode(128), nullable=False) |
|
4960 | 5014 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
4961 | 5015 | modified_at = Column('modified_at', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
4962 | 5016 | acl_level = Column('acl_level', Unicode(128), nullable=True) |
|
4963 | 5017 | |
|
4964 | 5018 | owner = relationship('User', back_populates='user_gists') |
|
4965 | 5019 | |
|
4966 | 5020 | def __repr__(self): |
|
4967 | 5021 | return f'<Gist:[{self.gist_type}]{self.gist_access_id}>' |
|
4968 | 5022 | |
|
4969 | 5023 | @hybrid_property |
|
4970 | 5024 | def description_safe(self): |
|
4971 | 5025 | from rhodecode.lib import helpers as h |
|
4972 | 5026 | return h.escape(self.gist_description) |
|
4973 | 5027 | |
|
4974 | 5028 | @classmethod |
|
4975 | 5029 | def get_or_404(cls, id_): |
|
4976 | 5030 | from pyramid.httpexceptions import HTTPNotFound |
|
4977 | 5031 | |
|
4978 | 5032 | res = cls.query().filter(cls.gist_access_id == id_).scalar() |
|
4979 | 5033 | if not res: |
|
4980 | 5034 | log.debug('WARN: No DB entry with id %s', id_) |
|
4981 | 5035 | raise HTTPNotFound() |
|
4982 | 5036 | return res |
|
4983 | 5037 | |
|
4984 | 5038 | @classmethod |
|
4985 | 5039 | def get_by_access_id(cls, gist_access_id): |
|
4986 | 5040 | return cls.query().filter(cls.gist_access_id == gist_access_id).scalar() |
|
4987 | 5041 | |
|
4988 | 5042 | def gist_url(self): |
|
4989 | 5043 | from rhodecode.model.gist import GistModel |
|
4990 | 5044 | return GistModel().get_url(self) |
|
4991 | 5045 | |
|
4992 | 5046 | @classmethod |
|
4993 | 5047 | def base_path(cls): |
|
4994 | 5048 | """ |
|
4995 | 5049 | Returns base path when all gists are stored |
|
4996 | 5050 | |
|
4997 | 5051 | :param cls: |
|
4998 | 5052 | """ |
|
4999 | 5053 | from rhodecode.model.gist import GIST_STORE_LOC |
|
5000 | 5054 | from rhodecode.lib.utils import get_rhodecode_repo_store_path |
|
5001 | 5055 | repo_store_path = get_rhodecode_repo_store_path() |
|
5002 | 5056 | return os.path.join(repo_store_path, GIST_STORE_LOC) |
|
5003 | 5057 | |
|
5004 | 5058 | def get_api_data(self): |
|
5005 | 5059 | """ |
|
5006 | 5060 | Common function for generating gist related data for API |
|
5007 | 5061 | """ |
|
5008 | 5062 | gist = self |
|
5009 | 5063 | data = { |
|
5010 | 5064 | 'gist_id': gist.gist_id, |
|
5011 | 5065 | 'type': gist.gist_type, |
|
5012 | 5066 | 'access_id': gist.gist_access_id, |
|
5013 | 5067 | 'description': gist.gist_description, |
|
5014 | 5068 | 'url': gist.gist_url(), |
|
5015 | 5069 | 'expires': gist.gist_expires, |
|
5016 | 5070 | 'created_on': gist.created_on, |
|
5017 | 5071 | 'modified_at': gist.modified_at, |
|
5018 | 5072 | 'content': None, |
|
5019 | 5073 | 'acl_level': gist.acl_level, |
|
5020 | 5074 | } |
|
5021 | 5075 | return data |
|
5022 | 5076 | |
|
5023 | 5077 | def __json__(self): |
|
5024 | data = dict( | |
|
5025 | ) | |
|
5078 | data = dict() | |
|
5026 | 5079 | data.update(self.get_api_data()) |
|
5027 | 5080 | return data |
|
5028 | 5081 | # SCM functions |
|
5029 | 5082 | |
|
5030 | 5083 | def scm_instance(self, **kwargs): |
|
5031 | 5084 | """ |
|
5032 | 5085 | Get an instance of VCS Repository |
|
5033 | 5086 | |
|
5034 | 5087 | :param kwargs: |
|
5035 | 5088 | """ |
|
5036 | 5089 | from rhodecode.model.gist import GistModel |
|
5037 | 5090 | full_repo_path = os.path.join(self.base_path(), self.gist_access_id) |
|
5038 | 5091 | return get_vcs_instance( |
|
5039 | 5092 | repo_path=safe_str(full_repo_path), create=False, |
|
5040 | 5093 | _vcs_alias=GistModel.vcs_backend) |
|
5041 | 5094 | |
|
5042 | 5095 | |
|
5043 | 5096 | class ExternalIdentity(Base, BaseModel): |
|
5044 | 5097 | __tablename__ = 'external_identities' |
|
5045 | 5098 | __table_args__ = ( |
|
5046 | 5099 | Index('local_user_id_idx', 'local_user_id'), |
|
5047 | 5100 | Index('external_id_idx', 'external_id'), |
|
5048 | 5101 | base_table_args |
|
5049 | 5102 | ) |
|
5050 | 5103 | |
|
5051 | 5104 | external_id = Column('external_id', Unicode(255), default='', primary_key=True) |
|
5052 | 5105 | external_username = Column('external_username', Unicode(1024), default='') |
|
5053 | 5106 | local_user_id = Column('local_user_id', Integer(), ForeignKey('users.user_id'), primary_key=True) |
|
5054 | 5107 | provider_name = Column('provider_name', Unicode(255), default='', primary_key=True) |
|
5055 | 5108 | access_token = Column('access_token', String(1024), default='') |
|
5056 | 5109 | alt_token = Column('alt_token', String(1024), default='') |
|
5057 | 5110 | token_secret = Column('token_secret', String(1024), default='') |
|
5058 | 5111 | |
|
5059 | 5112 | @classmethod |
|
5060 | 5113 | def by_external_id_and_provider(cls, external_id, provider_name, local_user_id=None): |
|
5061 | 5114 | """ |
|
5062 | 5115 | Returns ExternalIdentity instance based on search params |
|
5063 | 5116 | |
|
5064 | 5117 | :param external_id: |
|
5065 | 5118 | :param provider_name: |
|
5066 | 5119 | :return: ExternalIdentity |
|
5067 | 5120 | """ |
|
5068 | 5121 | query = cls.query() |
|
5069 | 5122 | query = query.filter(cls.external_id == external_id) |
|
5070 | 5123 | query = query.filter(cls.provider_name == provider_name) |
|
5071 | 5124 | if local_user_id: |
|
5072 | 5125 | query = query.filter(cls.local_user_id == local_user_id) |
|
5073 | 5126 | return query.first() |
|
5074 | 5127 | |
|
5075 | 5128 | @classmethod |
|
5076 | 5129 | def user_by_external_id_and_provider(cls, external_id, provider_name): |
|
5077 | 5130 | """ |
|
5078 | 5131 | Returns User instance based on search params |
|
5079 | 5132 | |
|
5080 | 5133 | :param external_id: |
|
5081 | 5134 | :param provider_name: |
|
5082 | 5135 | :return: User |
|
5083 | 5136 | """ |
|
5084 | 5137 | query = User.query() |
|
5085 | 5138 | query = query.filter(cls.external_id == external_id) |
|
5086 | 5139 | query = query.filter(cls.provider_name == provider_name) |
|
5087 | 5140 | query = query.filter(User.user_id == cls.local_user_id) |
|
5088 | 5141 | return query.first() |
|
5089 | 5142 | |
|
5090 | 5143 | @classmethod |
|
5091 | 5144 | def by_local_user_id(cls, local_user_id): |
|
5092 | 5145 | """ |
|
5093 | 5146 | Returns all tokens for user |
|
5094 | 5147 | |
|
5095 | 5148 | :param local_user_id: |
|
5096 | 5149 | :return: ExternalIdentity |
|
5097 | 5150 | """ |
|
5098 | 5151 | query = cls.query() |
|
5099 | 5152 | query = query.filter(cls.local_user_id == local_user_id) |
|
5100 | 5153 | return query |
|
5101 | 5154 | |
|
5102 | 5155 | @classmethod |
|
5103 | 5156 | def load_provider_plugin(cls, plugin_id): |
|
5104 | 5157 | from rhodecode.authentication.base import loadplugin |
|
5105 | 5158 | _plugin_id = 'egg:rhodecode-enterprise-ee#{}'.format(plugin_id) |
|
5106 | 5159 | auth_plugin = loadplugin(_plugin_id) |
|
5107 | 5160 | return auth_plugin |
|
5108 | 5161 | |
|
5109 | 5162 | |
|
5110 | 5163 | class Integration(Base, BaseModel): |
|
5111 | 5164 | __tablename__ = 'integrations' |
|
5112 | 5165 | __table_args__ = ( |
|
5113 | 5166 | base_table_args |
|
5114 | 5167 | ) |
|
5115 | 5168 | |
|
5116 | 5169 | integration_id = Column('integration_id', Integer(), primary_key=True) |
|
5117 | 5170 | integration_type = Column('integration_type', String(255)) |
|
5118 | 5171 | enabled = Column('enabled', Boolean(), nullable=False) |
|
5119 | 5172 | name = Column('name', String(255), nullable=False) |
|
5120 | 5173 | child_repos_only = Column('child_repos_only', Boolean(), nullable=False, default=False) |
|
5121 | 5174 | |
|
5122 | 5175 | settings = Column( |
|
5123 | 5176 | 'settings_json', MutationObj.as_mutable( |
|
5124 | 5177 | JsonType(dialect_map=dict(mysql=UnicodeText(16384))))) |
|
5125 | 5178 | repo_id = Column('repo_id', Integer(), ForeignKey('repositories.repo_id'), nullable=True, unique=None, default=None) |
|
5126 | 5179 | repo = relationship('Repository', lazy='joined', back_populates='integrations') |
|
5127 | 5180 | |
|
5128 | 5181 | repo_group_id = Column('repo_group_id', Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
5129 | 5182 | repo_group = relationship('RepoGroup', lazy='joined', back_populates='integrations') |
|
5130 | 5183 | |
|
5131 | 5184 | @property |
|
5132 | 5185 | def scope(self): |
|
5133 | 5186 | if self.repo: |
|
5134 | 5187 | return repr(self.repo) |
|
5135 | 5188 | if self.repo_group: |
|
5136 | 5189 | if self.child_repos_only: |
|
5137 | 5190 | return repr(self.repo_group) + ' (child repos only)' |
|
5138 | 5191 | else: |
|
5139 | 5192 | return repr(self.repo_group) + ' (recursive)' |
|
5140 | 5193 | if self.child_repos_only: |
|
5141 | 5194 | return 'root_repos' |
|
5142 | 5195 | return 'global' |
|
5143 | 5196 | |
|
5144 | 5197 | def __repr__(self): |
|
5145 | 5198 | return '<Integration(%r, %r)>' % (self.integration_type, self.scope) |
|
5146 | 5199 | |
|
5147 | 5200 | |
|
5148 | 5201 | class RepoReviewRuleUser(Base, BaseModel): |
|
5149 | 5202 | __tablename__ = 'repo_review_rules_users' |
|
5150 | 5203 | __table_args__ = ( |
|
5151 | 5204 | base_table_args |
|
5152 | 5205 | ) |
|
5153 | 5206 | ROLE_REVIEWER = 'reviewer' |
|
5154 | 5207 | ROLE_OBSERVER = 'observer' |
|
5155 | 5208 | ROLES = [ROLE_REVIEWER, ROLE_OBSERVER] |
|
5156 | 5209 | |
|
5157 | 5210 | repo_review_rule_user_id = Column('repo_review_rule_user_id', Integer(), primary_key=True) |
|
5158 | 5211 | repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) |
|
5159 | 5212 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False) |
|
5160 | 5213 | mandatory = Column("mandatory", Boolean(), nullable=False, default=False) |
|
5161 | 5214 | role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER) |
|
5162 | 5215 | user = relationship('User', back_populates='user_review_rules') |
|
5163 | 5216 | |
|
5164 | 5217 | def rule_data(self): |
|
5165 | 5218 | return { |
|
5166 | 5219 | 'mandatory': self.mandatory, |
|
5167 | 5220 | 'role': self.role, |
|
5168 | 5221 | } |
|
5169 | 5222 | |
|
5170 | 5223 | |
|
5171 | 5224 | class RepoReviewRuleUserGroup(Base, BaseModel): |
|
5172 | 5225 | __tablename__ = 'repo_review_rules_users_groups' |
|
5173 | 5226 | __table_args__ = ( |
|
5174 | 5227 | base_table_args |
|
5175 | 5228 | ) |
|
5176 | 5229 | |
|
5177 | 5230 | VOTE_RULE_ALL = -1 |
|
5178 | 5231 | ROLE_REVIEWER = 'reviewer' |
|
5179 | 5232 | ROLE_OBSERVER = 'observer' |
|
5180 | 5233 | ROLES = [ROLE_REVIEWER, ROLE_OBSERVER] |
|
5181 | 5234 | |
|
5182 | 5235 | repo_review_rule_users_group_id = Column('repo_review_rule_users_group_id', Integer(), primary_key=True) |
|
5183 | 5236 | repo_review_rule_id = Column("repo_review_rule_id", Integer(), ForeignKey('repo_review_rules.repo_review_rule_id')) |
|
5184 | 5237 | users_group_id = Column("users_group_id", Integer(), ForeignKey('users_groups.users_group_id'), nullable=False) |
|
5185 | 5238 | mandatory = Column("mandatory", Boolean(), nullable=False, default=False) |
|
5186 | 5239 | role = Column('role', Unicode(255), nullable=True, default=ROLE_REVIEWER) |
|
5187 | 5240 | vote_rule = Column("vote_rule", Integer(), nullable=True, default=VOTE_RULE_ALL) |
|
5188 | 5241 | users_group = relationship('UserGroup') |
|
5189 | 5242 | |
|
5190 | 5243 | def rule_data(self): |
|
5191 | 5244 | return { |
|
5192 | 5245 | 'mandatory': self.mandatory, |
|
5193 | 5246 | 'role': self.role, |
|
5194 | 5247 | 'vote_rule': self.vote_rule |
|
5195 | 5248 | } |
|
5196 | 5249 | |
|
5197 | 5250 | @property |
|
5198 | 5251 | def vote_rule_label(self): |
|
5199 | 5252 | if not self.vote_rule or self.vote_rule == self.VOTE_RULE_ALL: |
|
5200 | 5253 | return 'all must vote' |
|
5201 | 5254 | else: |
|
5202 | 5255 | return 'min. vote {}'.format(self.vote_rule) |
|
5203 | 5256 | |
|
5204 | 5257 | |
|
5205 | 5258 | class RepoReviewRule(Base, BaseModel): |
|
5206 | 5259 | __tablename__ = 'repo_review_rules' |
|
5207 | 5260 | __table_args__ = ( |
|
5208 | 5261 | base_table_args |
|
5209 | 5262 | ) |
|
5210 | 5263 | |
|
5211 | 5264 | repo_review_rule_id = Column( |
|
5212 | 5265 | 'repo_review_rule_id', Integer(), primary_key=True) |
|
5213 | 5266 | repo_id = Column( |
|
5214 | 5267 | "repo_id", Integer(), ForeignKey('repositories.repo_id')) |
|
5215 | 5268 | repo = relationship('Repository', back_populates='review_rules') |
|
5216 | 5269 | |
|
5217 | 5270 | review_rule_name = Column('review_rule_name', String(255)) |
|
5218 | 5271 | _branch_pattern = Column("branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob |
|
5219 | 5272 | _target_branch_pattern = Column("target_branch_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob |
|
5220 | 5273 | _file_pattern = Column("file_pattern", UnicodeText().with_variant(UnicodeText(255), 'mysql'), default='*') # glob |
|
5221 | 5274 | |
|
5222 | 5275 | use_authors_for_review = Column("use_authors_for_review", Boolean(), nullable=False, default=False) |
|
5223 | 5276 | |
|
5224 | 5277 | # Legacy fields, just for backward compat |
|
5225 | 5278 | _forbid_author_to_review = Column("forbid_author_to_review", Boolean(), nullable=False, default=False) |
|
5226 | 5279 | _forbid_commit_author_to_review = Column("forbid_commit_author_to_review", Boolean(), nullable=False, default=False) |
|
5227 | 5280 | |
|
5228 | 5281 | pr_author = Column("pr_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True) |
|
5229 | 5282 | commit_author = Column("commit_author", UnicodeText().with_variant(UnicodeText(255), 'mysql'), nullable=True) |
|
5230 | 5283 | |
|
5231 | 5284 | forbid_adding_reviewers = Column("forbid_adding_reviewers", Boolean(), nullable=False, default=False) |
|
5232 | 5285 | |
|
5233 | 5286 | rule_users = relationship('RepoReviewRuleUser') |
|
5234 | 5287 | rule_user_groups = relationship('RepoReviewRuleUserGroup') |
|
5235 | 5288 | |
|
5236 | 5289 | def _validate_pattern(self, value): |
|
5237 | 5290 | re.compile('^' + glob2re(value) + '$') |
|
5238 | 5291 | |
|
5239 | 5292 | @hybrid_property |
|
5240 | 5293 | def source_branch_pattern(self): |
|
5241 | 5294 | return self._branch_pattern or '*' |
|
5242 | 5295 | |
|
5243 | 5296 | @source_branch_pattern.setter |
|
5244 | 5297 | def source_branch_pattern(self, value): |
|
5245 | 5298 | self._validate_pattern(value) |
|
5246 | 5299 | self._branch_pattern = value or '*' |
|
5247 | 5300 | |
|
5248 | 5301 | @hybrid_property |
|
5249 | 5302 | def target_branch_pattern(self): |
|
5250 | 5303 | return self._target_branch_pattern or '*' |
|
5251 | 5304 | |
|
5252 | 5305 | @target_branch_pattern.setter |
|
5253 | 5306 | def target_branch_pattern(self, value): |
|
5254 | 5307 | self._validate_pattern(value) |
|
5255 | 5308 | self._target_branch_pattern = value or '*' |
|
5256 | 5309 | |
|
5257 | 5310 | @hybrid_property |
|
5258 | 5311 | def file_pattern(self): |
|
5259 | 5312 | return self._file_pattern or '*' |
|
5260 | 5313 | |
|
5261 | 5314 | @file_pattern.setter |
|
5262 | 5315 | def file_pattern(self, value): |
|
5263 | 5316 | self._validate_pattern(value) |
|
5264 | 5317 | self._file_pattern = value or '*' |
|
5265 | 5318 | |
|
5266 | 5319 | @hybrid_property |
|
5267 | 5320 | def forbid_pr_author_to_review(self): |
|
5268 | 5321 | return self.pr_author == 'forbid_pr_author' |
|
5269 | 5322 | |
|
5270 | 5323 | @hybrid_property |
|
5271 | 5324 | def include_pr_author_to_review(self): |
|
5272 | 5325 | return self.pr_author == 'include_pr_author' |
|
5273 | 5326 | |
|
5274 | 5327 | @hybrid_property |
|
5275 | 5328 | def forbid_commit_author_to_review(self): |
|
5276 | 5329 | return self.commit_author == 'forbid_commit_author' |
|
5277 | 5330 | |
|
5278 | 5331 | @hybrid_property |
|
5279 | 5332 | def include_commit_author_to_review(self): |
|
5280 | 5333 | return self.commit_author == 'include_commit_author' |
|
5281 | 5334 | |
|
5282 | 5335 | def matches(self, source_branch, target_branch, files_changed): |
|
5283 | 5336 | """ |
|
5284 | 5337 | Check if this review rule matches a branch/files in a pull request |
|
5285 | 5338 | |
|
5286 | 5339 | :param source_branch: source branch name for the commit |
|
5287 | 5340 | :param target_branch: target branch name for the commit |
|
5288 | 5341 | :param files_changed: list of file paths changed in the pull request |
|
5289 | 5342 | """ |
|
5290 | 5343 | |
|
5291 | 5344 | source_branch = source_branch or '' |
|
5292 | 5345 | target_branch = target_branch or '' |
|
5293 | 5346 | files_changed = files_changed or [] |
|
5294 | 5347 | |
|
5295 | 5348 | branch_matches = True |
|
5296 | 5349 | if source_branch or target_branch: |
|
5297 | 5350 | if self.source_branch_pattern == '*': |
|
5298 | 5351 | source_branch_match = True |
|
5299 | 5352 | else: |
|
5300 | 5353 | if self.source_branch_pattern.startswith('re:'): |
|
5301 | 5354 | source_pattern = self.source_branch_pattern[3:] |
|
5302 | 5355 | else: |
|
5303 | 5356 | source_pattern = '^' + glob2re(self.source_branch_pattern) + '$' |
|
5304 | 5357 | source_branch_regex = re.compile(source_pattern) |
|
5305 | 5358 | source_branch_match = bool(source_branch_regex.search(source_branch)) |
|
5306 | 5359 | if self.target_branch_pattern == '*': |
|
5307 | 5360 | target_branch_match = True |
|
5308 | 5361 | else: |
|
5309 | 5362 | if self.target_branch_pattern.startswith('re:'): |
|
5310 | 5363 | target_pattern = self.target_branch_pattern[3:] |
|
5311 | 5364 | else: |
|
5312 | 5365 | target_pattern = '^' + glob2re(self.target_branch_pattern) + '$' |
|
5313 | 5366 | target_branch_regex = re.compile(target_pattern) |
|
5314 | 5367 | target_branch_match = bool(target_branch_regex.search(target_branch)) |
|
5315 | 5368 | |
|
5316 | 5369 | branch_matches = source_branch_match and target_branch_match |
|
5317 | 5370 | |
|
5318 | 5371 | files_matches = True |
|
5319 | 5372 | if self.file_pattern != '*': |
|
5320 | 5373 | files_matches = False |
|
5321 | 5374 | if self.file_pattern.startswith('re:'): |
|
5322 | 5375 | file_pattern = self.file_pattern[3:] |
|
5323 | 5376 | else: |
|
5324 | 5377 | file_pattern = glob2re(self.file_pattern) |
|
5325 | 5378 | file_regex = re.compile(file_pattern) |
|
5326 | 5379 | for file_data in files_changed: |
|
5327 | 5380 | filename = file_data.get('filename') |
|
5328 | 5381 | |
|
5329 | 5382 | if file_regex.search(filename): |
|
5330 | 5383 | files_matches = True |
|
5331 | 5384 | break |
|
5332 | 5385 | |
|
5333 | 5386 | return branch_matches and files_matches |
|
5334 | 5387 | |
|
5335 | 5388 | @property |
|
5336 | 5389 | def review_users(self): |
|
5337 | 5390 | """ Returns the users which this rule applies to """ |
|
5338 | 5391 | |
|
5339 | 5392 | users = collections.OrderedDict() |
|
5340 | 5393 | |
|
5341 | 5394 | for rule_user in self.rule_users: |
|
5342 | 5395 | if rule_user.user.active: |
|
5343 | 5396 | if rule_user.user not in users: |
|
5344 | 5397 | users[rule_user.user.username] = { |
|
5345 | 5398 | 'user': rule_user.user, |
|
5346 | 5399 | 'source': 'user', |
|
5347 | 5400 | 'source_data': {}, |
|
5348 | 5401 | 'data': rule_user.rule_data() |
|
5349 | 5402 | } |
|
5350 | 5403 | |
|
5351 | 5404 | for rule_user_group in self.rule_user_groups: |
|
5352 | 5405 | source_data = { |
|
5353 | 5406 | 'user_group_id': rule_user_group.users_group.users_group_id, |
|
5354 | 5407 | 'name': rule_user_group.users_group.users_group_name, |
|
5355 | 5408 | 'members': len(rule_user_group.users_group.members) |
|
5356 | 5409 | } |
|
5357 | 5410 | for member in rule_user_group.users_group.members: |
|
5358 | 5411 | if member.user.active: |
|
5359 | 5412 | key = member.user.username |
|
5360 | 5413 | if key in users: |
|
5361 | 5414 | # skip this member as we have him already |
|
5362 | 5415 | # this prevents from override the "first" matched |
|
5363 | 5416 | # users with duplicates in multiple groups |
|
5364 | 5417 | continue |
|
5365 | 5418 | |
|
5366 | 5419 | users[key] = { |
|
5367 | 5420 | 'user': member.user, |
|
5368 | 5421 | 'source': 'user_group', |
|
5369 | 5422 | 'source_data': source_data, |
|
5370 | 5423 | 'data': rule_user_group.rule_data() |
|
5371 | 5424 | } |
|
5372 | 5425 | |
|
5373 | 5426 | return users |
|
5374 | 5427 | |
|
5375 | 5428 | def user_group_vote_rule(self, user_id): |
|
5376 | 5429 | |
|
5377 | 5430 | rules = [] |
|
5378 | 5431 | if not self.rule_user_groups: |
|
5379 | 5432 | return rules |
|
5380 | 5433 | |
|
5381 | 5434 | for user_group in self.rule_user_groups: |
|
5382 | 5435 | user_group_members = [x.user_id for x in user_group.users_group.members] |
|
5383 | 5436 | if user_id in user_group_members: |
|
5384 | 5437 | rules.append(user_group) |
|
5385 | 5438 | return rules |
|
5386 | 5439 | |
|
5387 | 5440 | def __repr__(self): |
|
5388 | 5441 | return f'<RepoReviewerRule(id={self.repo_review_rule_id}, repo={self.repo!r})>' |
|
5389 | 5442 | |
|
5390 | 5443 | |
|
5391 | 5444 | class ScheduleEntry(Base, BaseModel): |
|
5392 | 5445 | __tablename__ = 'schedule_entries' |
|
5393 | 5446 | __table_args__ = ( |
|
5394 | 5447 | UniqueConstraint('schedule_name', name='s_schedule_name_idx'), |
|
5395 | 5448 | UniqueConstraint('task_uid', name='s_task_uid_idx'), |
|
5396 | 5449 | base_table_args, |
|
5397 | 5450 | ) |
|
5398 | 5451 | SCHEDULE_TYPE_INTEGER = "integer" |
|
5399 | 5452 | SCHEDULE_TYPE_CRONTAB = "crontab" |
|
5400 | 5453 | |
|
5401 | 5454 | schedule_types = [SCHEDULE_TYPE_CRONTAB, SCHEDULE_TYPE_INTEGER] |
|
5402 | 5455 | schedule_entry_id = Column('schedule_entry_id', Integer(), primary_key=True) |
|
5403 | 5456 | |
|
5404 | 5457 | schedule_name = Column("schedule_name", String(255), nullable=False, unique=None, default=None) |
|
5405 | 5458 | schedule_description = Column("schedule_description", String(10000), nullable=True, unique=None, default=None) |
|
5406 | 5459 | schedule_enabled = Column("schedule_enabled", Boolean(), nullable=False, unique=None, default=True) |
|
5407 | 5460 | |
|
5408 | 5461 | _schedule_type = Column("schedule_type", String(255), nullable=False, unique=None, default=None) |
|
5409 | 5462 | schedule_definition = Column('schedule_definition_json', MutationObj.as_mutable(JsonType(default=lambda: "", dialect_map=dict(mysql=LONGTEXT())))) |
|
5410 | 5463 | |
|
5411 | 5464 | schedule_last_run = Column('schedule_last_run', DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
5412 | 5465 | schedule_total_run_count = Column('schedule_total_run_count', Integer(), nullable=True, unique=None, default=0) |
|
5413 | 5466 | |
|
5414 | 5467 | # task |
|
5415 | 5468 | task_uid = Column("task_uid", String(255), nullable=False, unique=None, default=None) |
|
5416 | 5469 | task_dot_notation = Column("task_dot_notation", String(4096), nullable=False, unique=None, default=None) |
|
5417 | 5470 | task_args = Column('task_args_json', MutationObj.as_mutable(JsonType(default=list, dialect_map=dict(mysql=LONGTEXT())))) |
|
5418 | 5471 | task_kwargs = Column('task_kwargs_json', MutationObj.as_mutable(JsonType(default=dict, dialect_map=dict(mysql=LONGTEXT())))) |
|
5419 | 5472 | |
|
5420 | 5473 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
5421 | 5474 | updated_on = Column('updated_on', DateTime(timezone=False), nullable=True, unique=None, default=None) |
|
5422 | 5475 | |
|
5423 | 5476 | @hybrid_property |
|
5424 | 5477 | def schedule_type(self): |
|
5425 | 5478 | return self._schedule_type |
|
5426 | 5479 | |
|
5427 | 5480 | @schedule_type.setter |
|
5428 | 5481 | def schedule_type(self, val): |
|
5429 | 5482 | if val not in self.schedule_types: |
|
5430 | 5483 | raise ValueError('Value must be on of `{}` and got `{}`'.format( |
|
5431 | 5484 | val, self.schedule_type)) |
|
5432 | 5485 | |
|
5433 | 5486 | self._schedule_type = val |
|
5434 | 5487 | |
|
5435 | 5488 | @classmethod |
|
5436 | 5489 | def get_uid(cls, obj): |
|
5437 | 5490 | args = obj.task_args |
|
5438 | 5491 | kwargs = obj.task_kwargs |
|
5439 | 5492 | if isinstance(args, JsonRaw): |
|
5440 | 5493 | try: |
|
5441 | 5494 | args = json.loads(args) |
|
5442 | 5495 | except ValueError: |
|
5443 | 5496 | args = tuple() |
|
5444 | 5497 | |
|
5445 | 5498 | if isinstance(kwargs, JsonRaw): |
|
5446 | 5499 | try: |
|
5447 | 5500 | kwargs = json.loads(kwargs) |
|
5448 | 5501 | except ValueError: |
|
5449 | 5502 | kwargs = dict() |
|
5450 | 5503 | |
|
5451 | 5504 | dot_notation = obj.task_dot_notation |
|
5452 | 5505 | val = '.'.join(map(safe_str, [ |
|
5453 | 5506 | sorted(dot_notation), args, sorted(kwargs.items())])) |
|
5454 | 5507 | return sha1(safe_bytes(val)) |
|
5455 | 5508 | |
|
5456 | 5509 | @classmethod |
|
5457 | 5510 | def get_by_schedule_name(cls, schedule_name): |
|
5458 | 5511 | return cls.query().filter(cls.schedule_name == schedule_name).scalar() |
|
5459 | 5512 | |
|
5460 | 5513 | @classmethod |
|
5461 | 5514 | def get_by_schedule_id(cls, schedule_id): |
|
5462 | 5515 | return cls.query().filter(cls.schedule_entry_id == schedule_id).scalar() |
|
5463 | 5516 | |
|
5464 | 5517 | @property |
|
5465 | 5518 | def task(self): |
|
5466 | 5519 | return self.task_dot_notation |
|
5467 | 5520 | |
|
5468 | 5521 | @property |
|
5469 | 5522 | def schedule(self): |
|
5470 | 5523 | from rhodecode.lib.celerylib.utils import raw_2_schedule |
|
5471 | 5524 | schedule = raw_2_schedule(self.schedule_definition, self.schedule_type) |
|
5472 | 5525 | return schedule |
|
5473 | 5526 | |
|
5474 | 5527 | @property |
|
5475 | 5528 | def args(self): |
|
5476 | 5529 | try: |
|
5477 | 5530 | return list(self.task_args or []) |
|
5478 | 5531 | except ValueError: |
|
5479 | 5532 | return list() |
|
5480 | 5533 | |
|
5481 | 5534 | @property |
|
5482 | 5535 | def kwargs(self): |
|
5483 | 5536 | try: |
|
5484 | 5537 | return dict(self.task_kwargs or {}) |
|
5485 | 5538 | except ValueError: |
|
5486 | 5539 | return dict() |
|
5487 | 5540 | |
|
5488 | 5541 | def _as_raw(self, val, indent=False): |
|
5489 | 5542 | if hasattr(val, 'de_coerce'): |
|
5490 | 5543 | val = val.de_coerce() |
|
5491 | 5544 | if val: |
|
5492 | 5545 | if indent: |
|
5493 | 5546 | val = ext_json.formatted_str_json(val) |
|
5494 | 5547 | else: |
|
5495 | 5548 | val = ext_json.str_json(val) |
|
5496 | 5549 | |
|
5497 | 5550 | return val |
|
5498 | 5551 | |
|
5499 | 5552 | @property |
|
5500 | 5553 | def schedule_definition_raw(self): |
|
5501 | 5554 | return self._as_raw(self.schedule_definition) |
|
5502 | 5555 | |
|
5503 | 5556 | def args_raw(self, indent=False): |
|
5504 | 5557 | return self._as_raw(self.task_args, indent) |
|
5505 | 5558 | |
|
5506 | 5559 | def kwargs_raw(self, indent=False): |
|
5507 | 5560 | return self._as_raw(self.task_kwargs, indent) |
|
5508 | 5561 | |
|
5509 | 5562 | def __repr__(self): |
|
5510 | 5563 | return f'<DB:ScheduleEntry({self.schedule_entry_id}:{self.schedule_name})>' |
|
5511 | 5564 | |
|
5512 | 5565 | |
|
5513 | 5566 | @event.listens_for(ScheduleEntry, 'before_update') |
|
5514 | 5567 | def update_task_uid(mapper, connection, target): |
|
5515 | 5568 | target.task_uid = ScheduleEntry.get_uid(target) |
|
5516 | 5569 | |
|
5517 | 5570 | |
|
5518 | 5571 | @event.listens_for(ScheduleEntry, 'before_insert') |
|
5519 | 5572 | def set_task_uid(mapper, connection, target): |
|
5520 | 5573 | target.task_uid = ScheduleEntry.get_uid(target) |
|
5521 | 5574 | |
|
5522 | 5575 | |
|
5523 | 5576 | class _BaseBranchPerms(BaseModel): |
|
5524 | 5577 | @classmethod |
|
5525 | 5578 | def compute_hash(cls, value): |
|
5526 | 5579 | return sha1_safe(value) |
|
5527 | 5580 | |
|
5528 | 5581 | @hybrid_property |
|
5529 | 5582 | def branch_pattern(self): |
|
5530 | 5583 | return self._branch_pattern or '*' |
|
5531 | 5584 | |
|
5532 | 5585 | @hybrid_property |
|
5533 | 5586 | def branch_hash(self): |
|
5534 | 5587 | return self._branch_hash |
|
5535 | 5588 | |
|
5536 | 5589 | def _validate_glob(self, value): |
|
5537 | 5590 | re.compile('^' + glob2re(value) + '$') |
|
5538 | 5591 | |
|
5539 | 5592 | @branch_pattern.setter |
|
5540 | 5593 | def branch_pattern(self, value): |
|
5541 | 5594 | self._validate_glob(value) |
|
5542 | 5595 | self._branch_pattern = value or '*' |
|
5543 | 5596 | # set the Hash when setting the branch pattern |
|
5544 | 5597 | self._branch_hash = self.compute_hash(self._branch_pattern) |
|
5545 | 5598 | |
|
5546 | 5599 | def matches(self, branch): |
|
5547 | 5600 | """ |
|
5548 | 5601 | Check if this the branch matches entry |
|
5549 | 5602 | |
|
5550 | 5603 | :param branch: branch name for the commit |
|
5551 | 5604 | """ |
|
5552 | 5605 | |
|
5553 | 5606 | branch = branch or '' |
|
5554 | 5607 | |
|
5555 | 5608 | branch_matches = True |
|
5556 | 5609 | if branch: |
|
5557 | 5610 | branch_regex = re.compile('^' + glob2re(self.branch_pattern) + '$') |
|
5558 | 5611 | branch_matches = bool(branch_regex.search(branch)) |
|
5559 | 5612 | |
|
5560 | 5613 | return branch_matches |
|
5561 | 5614 | |
|
5562 | 5615 | |
|
5563 | 5616 | class UserToRepoBranchPermission(Base, _BaseBranchPerms): |
|
5564 | 5617 | __tablename__ = 'user_to_repo_branch_permissions' |
|
5565 | 5618 | __table_args__ = ( |
|
5566 | 5619 | base_table_args |
|
5567 | 5620 | ) |
|
5568 | 5621 | |
|
5569 | 5622 | branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) |
|
5570 | 5623 | |
|
5571 | 5624 | repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
5572 | 5625 | repo = relationship('Repository', back_populates='user_branch_perms') |
|
5573 | 5626 | |
|
5574 | 5627 | permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
5575 | 5628 | permission = relationship('Permission') |
|
5576 | 5629 | |
|
5577 | 5630 | rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('repo_to_perm.repo_to_perm_id'), nullable=False, unique=None, default=None) |
|
5578 | 5631 | user_repo_to_perm = relationship('UserRepoToPerm', back_populates='branch_perm_entry') |
|
5579 | 5632 | |
|
5580 | 5633 | rule_order = Column('rule_order', Integer(), nullable=False) |
|
5581 | 5634 | _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob |
|
5582 | 5635 | _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) |
|
5583 | 5636 | |
|
5584 | 5637 | def __repr__(self): |
|
5585 | 5638 | return f'<UserBranchPermission({self.user_repo_to_perm} => {self.branch_pattern!r})>' |
|
5586 | 5639 | |
|
5587 | 5640 | |
|
5588 | 5641 | class UserGroupToRepoBranchPermission(Base, _BaseBranchPerms): |
|
5589 | 5642 | __tablename__ = 'user_group_to_repo_branch_permissions' |
|
5590 | 5643 | __table_args__ = ( |
|
5591 | 5644 | base_table_args |
|
5592 | 5645 | ) |
|
5593 | 5646 | |
|
5594 | 5647 | branch_rule_id = Column('branch_rule_id', Integer(), primary_key=True) |
|
5595 | 5648 | |
|
5596 | 5649 | repository_id = Column('repository_id', Integer(), ForeignKey('repositories.repo_id'), nullable=False, unique=None, default=None) |
|
5597 | 5650 | repo = relationship('Repository', back_populates='user_group_branch_perms') |
|
5598 | 5651 | |
|
5599 | 5652 | permission_id = Column('permission_id', Integer(), ForeignKey('permissions.permission_id'), nullable=False, unique=None, default=None) |
|
5600 | 5653 | permission = relationship('Permission') |
|
5601 | 5654 | |
|
5602 | 5655 | rule_to_perm_id = Column('rule_to_perm_id', Integer(), ForeignKey('users_group_repo_to_perm.users_group_to_perm_id'), nullable=False, unique=None, default=None) |
|
5603 | 5656 | user_group_repo_to_perm = relationship('UserGroupRepoToPerm', back_populates='user_group_branch_perms') |
|
5604 | 5657 | |
|
5605 | 5658 | rule_order = Column('rule_order', Integer(), nullable=False) |
|
5606 | 5659 | _branch_pattern = Column('branch_pattern', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), default='*') # glob |
|
5607 | 5660 | _branch_hash = Column('branch_hash', UnicodeText().with_variant(UnicodeText(2048), 'mysql')) |
|
5608 | 5661 | |
|
5609 | 5662 | def __repr__(self): |
|
5610 | 5663 | return f'<UserBranchPermission({self.user_group_repo_to_perm} => {self.branch_pattern!r})>' |
|
5611 | 5664 | |
|
5612 | 5665 | |
|
5613 | 5666 | class UserBookmark(Base, BaseModel): |
|
5614 | 5667 | __tablename__ = 'user_bookmarks' |
|
5615 | 5668 | __table_args__ = ( |
|
5616 | 5669 | UniqueConstraint('user_id', 'bookmark_repo_id'), |
|
5617 | 5670 | UniqueConstraint('user_id', 'bookmark_repo_group_id'), |
|
5618 | 5671 | UniqueConstraint('user_id', 'bookmark_position'), |
|
5619 | 5672 | base_table_args |
|
5620 | 5673 | ) |
|
5621 | 5674 | |
|
5622 | 5675 | user_bookmark_id = Column("user_bookmark_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
5623 | 5676 | user_id = Column("user_id", Integer(), ForeignKey('users.user_id'), nullable=False, unique=None, default=None) |
|
5624 | 5677 | position = Column("bookmark_position", Integer(), nullable=False) |
|
5625 | 5678 | title = Column("bookmark_title", String(255), nullable=True, unique=None, default=None) |
|
5626 | 5679 | redirect_url = Column("bookmark_redirect_url", String(10240), nullable=True, unique=None, default=None) |
|
5627 | 5680 | created_on = Column("created_on", DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
5628 | 5681 | |
|
5629 | 5682 | bookmark_repo_id = Column("bookmark_repo_id", Integer(), ForeignKey("repositories.repo_id"), nullable=True, unique=None, default=None) |
|
5630 | 5683 | bookmark_repo_group_id = Column("bookmark_repo_group_id", Integer(), ForeignKey("groups.group_id"), nullable=True, unique=None, default=None) |
|
5631 | 5684 | |
|
5632 | 5685 | user = relationship("User") |
|
5633 | 5686 | |
|
5634 | 5687 | repository = relationship("Repository") |
|
5635 | 5688 | repository_group = relationship("RepoGroup") |
|
5636 | 5689 | |
|
5637 | 5690 | @classmethod |
|
5638 | 5691 | def get_by_position_for_user(cls, position, user_id): |
|
5639 | 5692 | return cls.query() \ |
|
5640 | 5693 | .filter(UserBookmark.user_id == user_id) \ |
|
5641 | 5694 | .filter(UserBookmark.position == position).scalar() |
|
5642 | 5695 | |
|
5643 | 5696 | @classmethod |
|
5644 | 5697 | def get_bookmarks_for_user(cls, user_id, cache=True): |
|
5645 | 5698 | bookmarks = select( |
|
5646 | 5699 | UserBookmark.title, |
|
5647 | 5700 | UserBookmark.position, |
|
5648 | 5701 | ) \ |
|
5649 | 5702 | .add_columns(Repository.repo_id, Repository.repo_type, Repository.repo_name) \ |
|
5650 | 5703 | .add_columns(RepoGroup.group_id, RepoGroup.group_name) \ |
|
5651 | 5704 | .where(UserBookmark.user_id == user_id) \ |
|
5652 | 5705 | .outerjoin(Repository, Repository.repo_id == UserBookmark.bookmark_repo_id) \ |
|
5653 | 5706 | .outerjoin(RepoGroup, RepoGroup.group_id == UserBookmark.bookmark_repo_group_id) \ |
|
5654 | 5707 | .order_by(UserBookmark.position.asc()) |
|
5655 | 5708 | |
|
5656 | 5709 | if cache: |
|
5657 | 5710 | bookmarks = bookmarks.options( |
|
5658 | 5711 | FromCache("sql_cache_short", f"get_user_{user_id}_bookmarks") |
|
5659 | 5712 | ) |
|
5660 | 5713 | |
|
5661 | 5714 | return Session().execute(bookmarks).all() |
|
5662 | 5715 | |
|
5663 | 5716 | def __repr__(self): |
|
5664 | 5717 | return f'<UserBookmark({self.position} @ {self.redirect_url!r})>' |
|
5665 | 5718 | |
|
5666 | 5719 | |
|
5667 | 5720 | class FileStore(Base, BaseModel): |
|
5668 | 5721 | __tablename__ = 'file_store' |
|
5669 | 5722 | __table_args__ = ( |
|
5670 | 5723 | base_table_args |
|
5671 | 5724 | ) |
|
5672 | 5725 | |
|
5673 | 5726 | file_store_id = Column('file_store_id', Integer(), primary_key=True) |
|
5674 | 5727 | file_uid = Column('file_uid', String(1024), nullable=False) |
|
5675 | 5728 | file_display_name = Column('file_display_name', UnicodeText().with_variant(UnicodeText(2048), 'mysql'), nullable=True) |
|
5676 | 5729 | file_description = Column('file_description', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=True) |
|
5677 | 5730 | file_org_name = Column('file_org_name', UnicodeText().with_variant(UnicodeText(10240), 'mysql'), nullable=False) |
|
5678 | 5731 | |
|
5679 | 5732 | # sha256 hash |
|
5680 | 5733 | file_hash = Column('file_hash', String(512), nullable=False) |
|
5681 | 5734 | file_size = Column('file_size', BigInteger(), nullable=False) |
|
5682 | 5735 | |
|
5683 | 5736 | created_on = Column('created_on', DateTime(timezone=False), nullable=False, default=datetime.datetime.now) |
|
5684 | 5737 | accessed_on = Column('accessed_on', DateTime(timezone=False), nullable=True) |
|
5685 | 5738 | accessed_count = Column('accessed_count', Integer(), default=0) |
|
5686 | 5739 | |
|
5687 | 5740 | enabled = Column('enabled', Boolean(), nullable=False, default=True) |
|
5688 | 5741 | |
|
5689 | 5742 | # if repo/repo_group reference is set, check for permissions |
|
5690 | 5743 | check_acl = Column('check_acl', Boolean(), nullable=False, default=True) |
|
5691 | 5744 | |
|
5692 | 5745 | # hidden defines an attachment that should be hidden from showing in artifact listing |
|
5693 | 5746 | hidden = Column('hidden', Boolean(), nullable=False, default=False) |
|
5694 | 5747 | |
|
5695 | 5748 | user_id = Column('user_id', Integer(), ForeignKey('users.user_id'), nullable=False) |
|
5696 | 5749 | upload_user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.user_id', back_populates='artifacts') |
|
5697 | 5750 | |
|
5698 | 5751 | file_metadata = relationship('FileStoreMetadata', lazy='joined') |
|
5699 | 5752 | |
|
5700 | 5753 | # scope limited to user, which requester have access to |
|
5701 | 5754 | scope_user_id = Column( |
|
5702 | 5755 | 'scope_user_id', Integer(), ForeignKey('users.user_id'), |
|
5703 | 5756 | nullable=True, unique=None, default=None) |
|
5704 | 5757 | user = relationship('User', lazy='joined', primaryjoin='User.user_id==FileStore.scope_user_id', back_populates='scope_artifacts') |
|
5705 | 5758 | |
|
5706 | 5759 | # scope limited to user group, which requester have access to |
|
5707 | 5760 | scope_user_group_id = Column( |
|
5708 | 5761 | 'scope_user_group_id', Integer(), ForeignKey('users_groups.users_group_id'), |
|
5709 | 5762 | nullable=True, unique=None, default=None) |
|
5710 | 5763 | user_group = relationship('UserGroup', lazy='joined') |
|
5711 | 5764 | |
|
5712 | 5765 | # scope limited to repo, which requester have access to |
|
5713 | 5766 | scope_repo_id = Column( |
|
5714 | 5767 | 'scope_repo_id', Integer(), ForeignKey('repositories.repo_id'), |
|
5715 | 5768 | nullable=True, unique=None, default=None) |
|
5716 | 5769 | repo = relationship('Repository', lazy='joined') |
|
5717 | 5770 | |
|
5718 | 5771 | # scope limited to repo group, which requester have access to |
|
5719 | 5772 | scope_repo_group_id = Column( |
|
5720 | 5773 | 'scope_repo_group_id', Integer(), ForeignKey('groups.group_id'), |
|
5721 | 5774 | nullable=True, unique=None, default=None) |
|
5722 | 5775 | repo_group = relationship('RepoGroup', lazy='joined') |
|
5723 | 5776 | |
|
5724 | 5777 | @classmethod |
|
5725 | 5778 | def get_scope(cls, scope_type, scope_id): |
|
5726 | 5779 | if scope_type == 'repo': |
|
5727 | 5780 | return f'repo:{scope_id}' |
|
5728 | 5781 | elif scope_type == 'repo-group': |
|
5729 | 5782 | return f'repo-group:{scope_id}' |
|
5730 | 5783 | elif scope_type == 'user': |
|
5731 | 5784 | return f'user:{scope_id}' |
|
5732 | 5785 | elif scope_type == 'user-group': |
|
5733 | 5786 | return f'user-group:{scope_id}' |
|
5734 | 5787 | else: |
|
5735 | 5788 | return scope_type |
|
5736 | 5789 | |
|
5737 | 5790 | @classmethod |
|
5738 | 5791 | def get_by_store_uid(cls, file_store_uid, safe=False): |
|
5739 | 5792 | if safe: |
|
5740 | 5793 | return FileStore.query().filter(FileStore.file_uid == file_store_uid).first() |
|
5741 | 5794 | else: |
|
5742 | 5795 | return FileStore.query().filter(FileStore.file_uid == file_store_uid).scalar() |
|
5743 | 5796 | |
|
5744 | 5797 | @classmethod |
|
5745 | 5798 | def create(cls, file_uid, filename, file_hash, file_size, file_display_name='', |
|
5746 | 5799 | file_description='', enabled=True, hidden=False, check_acl=True, |
|
5747 | 5800 | user_id=None, scope_user_id=None, scope_repo_id=None, scope_repo_group_id=None): |
|
5748 | 5801 | |
|
5749 | 5802 | store_entry = FileStore() |
|
5750 | 5803 | store_entry.file_uid = file_uid |
|
5751 | 5804 | store_entry.file_display_name = file_display_name |
|
5752 | 5805 | store_entry.file_org_name = filename |
|
5753 | 5806 | store_entry.file_size = file_size |
|
5754 | 5807 | store_entry.file_hash = file_hash |
|
5755 | 5808 | store_entry.file_description = file_description |
|
5756 | 5809 | |
|
5757 | 5810 | store_entry.check_acl = check_acl |
|
5758 | 5811 | store_entry.enabled = enabled |
|
5759 | 5812 | store_entry.hidden = hidden |
|
5760 | 5813 | |
|
5761 | 5814 | store_entry.user_id = user_id |
|
5762 | 5815 | store_entry.scope_user_id = scope_user_id |
|
5763 | 5816 | store_entry.scope_repo_id = scope_repo_id |
|
5764 | 5817 | store_entry.scope_repo_group_id = scope_repo_group_id |
|
5765 | 5818 | |
|
5766 | 5819 | return store_entry |
|
5767 | 5820 | |
|
5768 | 5821 | @classmethod |
|
5769 | 5822 | def store_metadata(cls, file_store_id, args, commit=True): |
|
5770 | 5823 | file_store = FileStore.get(file_store_id) |
|
5771 | 5824 | if file_store is None: |
|
5772 | 5825 | return |
|
5773 | 5826 | |
|
5774 | 5827 | for section, key, value, value_type in args: |
|
5775 | 5828 | has_key = FileStoreMetadata().query() \ |
|
5776 | 5829 | .filter(FileStoreMetadata.file_store_id == file_store.file_store_id) \ |
|
5777 | 5830 | .filter(FileStoreMetadata.file_store_meta_section == section) \ |
|
5778 | 5831 | .filter(FileStoreMetadata.file_store_meta_key == key) \ |
|
5779 | 5832 | .scalar() |
|
5780 | 5833 | if has_key: |
|
5781 | 5834 | msg = 'key `{}` already defined under section `{}` for this file.'\ |
|
5782 | 5835 | .format(key, section) |
|
5783 | 5836 | raise ArtifactMetadataDuplicate(msg, err_section=section, err_key=key) |
|
5784 | 5837 | |
|
5785 | 5838 | # NOTE(marcink): raises ArtifactMetadataBadValueType |
|
5786 | 5839 | FileStoreMetadata.valid_value_type(value_type) |
|
5787 | 5840 | |
|
5788 | 5841 | meta_entry = FileStoreMetadata() |
|
5789 | 5842 | meta_entry.file_store = file_store |
|
5790 | 5843 | meta_entry.file_store_meta_section = section |
|
5791 | 5844 | meta_entry.file_store_meta_key = key |
|
5792 | 5845 | meta_entry.file_store_meta_value_type = value_type |
|
5793 | 5846 | meta_entry.file_store_meta_value = value |
|
5794 | 5847 | |
|
5795 | 5848 | Session().add(meta_entry) |
|
5796 | 5849 | |
|
5797 | 5850 | try: |
|
5798 | 5851 | if commit: |
|
5799 | 5852 | Session().commit() |
|
5800 | 5853 | except IntegrityError: |
|
5801 | 5854 | Session().rollback() |
|
5802 | 5855 | raise ArtifactMetadataDuplicate('Duplicate section/key found for this file.') |
|
5803 | 5856 | |
|
5804 | 5857 | @classmethod |
|
5805 | 5858 | def bump_access_counter(cls, file_uid, commit=True): |
|
5806 | 5859 | FileStore().query()\ |
|
5807 | 5860 | .filter(FileStore.file_uid == file_uid)\ |
|
5808 | 5861 | .update({FileStore.accessed_count: (FileStore.accessed_count + 1), |
|
5809 | 5862 | FileStore.accessed_on: datetime.datetime.now()}) |
|
5810 | 5863 | if commit: |
|
5811 | 5864 | Session().commit() |
|
5812 | 5865 | |
|
5813 | 5866 | def __json__(self): |
|
5814 | 5867 | data = { |
|
5815 | 5868 | 'filename': self.file_display_name, |
|
5816 | 5869 | 'filename_org': self.file_org_name, |
|
5817 | 5870 | 'file_uid': self.file_uid, |
|
5818 | 5871 | 'description': self.file_description, |
|
5819 | 5872 | 'hidden': self.hidden, |
|
5820 | 5873 | 'size': self.file_size, |
|
5821 | 5874 | 'created_on': self.created_on, |
|
5822 | 5875 | 'uploaded_by': self.upload_user.get_api_data(details='basic'), |
|
5823 | 5876 | 'downloaded_times': self.accessed_count, |
|
5824 | 5877 | 'sha256': self.file_hash, |
|
5825 | 5878 | 'metadata': self.file_metadata, |
|
5826 | 5879 | } |
|
5827 | 5880 | |
|
5828 | 5881 | return data |
|
5829 | 5882 | |
|
5830 | 5883 | def __repr__(self): |
|
5831 | 5884 | return f'<FileStore({self.file_store_id})>' |
|
5832 | 5885 | |
|
5833 | 5886 | |
|
5834 | 5887 | class FileStoreMetadata(Base, BaseModel): |
|
5835 | 5888 | __tablename__ = 'file_store_metadata' |
|
5836 | 5889 | __table_args__ = ( |
|
5837 | 5890 | UniqueConstraint('file_store_id', 'file_store_meta_section_hash', 'file_store_meta_key_hash'), |
|
5838 | 5891 | Index('file_store_meta_section_idx', 'file_store_meta_section', mysql_length=255), |
|
5839 | 5892 | Index('file_store_meta_key_idx', 'file_store_meta_key', mysql_length=255), |
|
5840 | 5893 | base_table_args |
|
5841 | 5894 | ) |
|
5842 | 5895 | SETTINGS_TYPES = { |
|
5843 | 5896 | 'str': safe_str, |
|
5844 | 5897 | 'int': safe_int, |
|
5845 | 5898 | 'unicode': safe_str, |
|
5846 | 5899 | 'bool': str2bool, |
|
5847 | 5900 | 'list': functools.partial(aslist, sep=',') |
|
5848 | 5901 | } |
|
5849 | 5902 | |
|
5850 | 5903 | file_store_meta_id = Column( |
|
5851 | 5904 | "file_store_meta_id", Integer(), nullable=False, unique=True, default=None, |
|
5852 | 5905 | primary_key=True) |
|
5853 | 5906 | _file_store_meta_section = Column( |
|
5854 | 5907 | "file_store_meta_section", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), |
|
5855 | 5908 | nullable=True, unique=None, default=None) |
|
5856 | 5909 | _file_store_meta_section_hash = Column( |
|
5857 | 5910 | "file_store_meta_section_hash", String(255), |
|
5858 | 5911 | nullable=True, unique=None, default=None) |
|
5859 | 5912 | _file_store_meta_key = Column( |
|
5860 | 5913 | "file_store_meta_key", UnicodeText().with_variant(UnicodeText(1024), 'mysql'), |
|
5861 | 5914 | nullable=True, unique=None, default=None) |
|
5862 | 5915 | _file_store_meta_key_hash = Column( |
|
5863 | 5916 | "file_store_meta_key_hash", String(255), nullable=True, unique=None, default=None) |
|
5864 | 5917 | _file_store_meta_value = Column( |
|
5865 | 5918 | "file_store_meta_value", UnicodeText().with_variant(UnicodeText(20480), 'mysql'), |
|
5866 | 5919 | nullable=True, unique=None, default=None) |
|
5867 | 5920 | _file_store_meta_value_type = Column( |
|
5868 | 5921 | "file_store_meta_value_type", String(255), nullable=True, unique=None, |
|
5869 | 5922 | default='unicode') |
|
5870 | 5923 | |
|
5871 | 5924 | file_store_id = Column( |
|
5872 | 5925 | 'file_store_id', Integer(), ForeignKey('file_store.file_store_id'), |
|
5873 | 5926 | nullable=True, unique=None, default=None) |
|
5874 | 5927 | |
|
5875 | 5928 | file_store = relationship('FileStore', lazy='joined', viewonly=True) |
|
5876 | 5929 | |
|
5877 | 5930 | @classmethod |
|
5878 | 5931 | def valid_value_type(cls, value): |
|
5879 | 5932 | if value.split('.')[0] not in cls.SETTINGS_TYPES: |
|
5880 | 5933 | raise ArtifactMetadataBadValueType( |
|
5881 | 5934 | 'value_type must be one of %s got %s' % (cls.SETTINGS_TYPES.keys(), value)) |
|
5882 | 5935 | |
|
5883 | 5936 | @hybrid_property |
|
5884 | 5937 | def file_store_meta_section(self): |
|
5885 | 5938 | return self._file_store_meta_section |
|
5886 | 5939 | |
|
5887 | 5940 | @file_store_meta_section.setter |
|
5888 | 5941 | def file_store_meta_section(self, value): |
|
5889 | 5942 | self._file_store_meta_section = value |
|
5890 | 5943 | self._file_store_meta_section_hash = _hash_key(value) |
|
5891 | 5944 | |
|
5892 | 5945 | @hybrid_property |
|
5893 | 5946 | def file_store_meta_key(self): |
|
5894 | 5947 | return self._file_store_meta_key |
|
5895 | 5948 | |
|
5896 | 5949 | @file_store_meta_key.setter |
|
5897 | 5950 | def file_store_meta_key(self, value): |
|
5898 | 5951 | self._file_store_meta_key = value |
|
5899 | 5952 | self._file_store_meta_key_hash = _hash_key(value) |
|
5900 | 5953 | |
|
5901 | 5954 | @hybrid_property |
|
5902 | 5955 | def file_store_meta_value(self): |
|
5903 | 5956 | val = self._file_store_meta_value |
|
5904 | 5957 | |
|
5905 | 5958 | if self._file_store_meta_value_type: |
|
5906 | 5959 | # e.g unicode.encrypted == unicode |
|
5907 | 5960 | _type = self._file_store_meta_value_type.split('.')[0] |
|
5908 | 5961 | # decode the encrypted value if it's encrypted field type |
|
5909 | 5962 | if '.encrypted' in self._file_store_meta_value_type: |
|
5910 | 5963 | cipher = EncryptedTextValue() |
|
5911 | 5964 | val = safe_str(cipher.process_result_value(val, None)) |
|
5912 | 5965 | # do final type conversion |
|
5913 | 5966 | converter = self.SETTINGS_TYPES.get(_type) or self.SETTINGS_TYPES['unicode'] |
|
5914 | 5967 | val = converter(val) |
|
5915 | 5968 | |
|
5916 | 5969 | return val |
|
5917 | 5970 | |
|
5918 | 5971 | @file_store_meta_value.setter |
|
5919 | 5972 | def file_store_meta_value(self, val): |
|
5920 | 5973 | val = safe_str(val) |
|
5921 | 5974 | # encode the encrypted value |
|
5922 | 5975 | if '.encrypted' in self.file_store_meta_value_type: |
|
5923 | 5976 | cipher = EncryptedTextValue() |
|
5924 | 5977 | val = safe_str(cipher.process_bind_param(val, None)) |
|
5925 | 5978 | self._file_store_meta_value = val |
|
5926 | 5979 | |
|
5927 | 5980 | @hybrid_property |
|
5928 | 5981 | def file_store_meta_value_type(self): |
|
5929 | 5982 | return self._file_store_meta_value_type |
|
5930 | 5983 | |
|
5931 | 5984 | @file_store_meta_value_type.setter |
|
5932 | 5985 | def file_store_meta_value_type(self, val): |
|
5933 | 5986 | # e.g unicode.encrypted |
|
5934 | 5987 | self.valid_value_type(val) |
|
5935 | 5988 | self._file_store_meta_value_type = val |
|
5936 | 5989 | |
|
5937 | 5990 | def __json__(self): |
|
5938 | 5991 | data = { |
|
5939 | 5992 | 'artifact': self.file_store.file_uid, |
|
5940 | 5993 | 'section': self.file_store_meta_section, |
|
5941 | 5994 | 'key': self.file_store_meta_key, |
|
5942 | 5995 | 'value': self.file_store_meta_value, |
|
5943 | 5996 | } |
|
5944 | 5997 | |
|
5945 | 5998 | return data |
|
5946 | 5999 | |
|
5947 | 6000 | def __repr__(self): |
|
5948 | 6001 | return '<%s[%s]%s=>%s]>' % (self.cls_name, self.file_store_meta_section, |
|
5949 | 6002 | self.file_store_meta_key, self.file_store_meta_value) |
|
5950 | 6003 | |
|
5951 | 6004 | |
|
5952 | 6005 | class DbMigrateVersion(Base, BaseModel): |
|
5953 | 6006 | __tablename__ = 'db_migrate_version' |
|
5954 | 6007 | __table_args__ = ( |
|
5955 | 6008 | base_table_args, |
|
5956 | 6009 | ) |
|
5957 | 6010 | |
|
5958 | 6011 | repository_id = Column('repository_id', String(250), primary_key=True) |
|
5959 | 6012 | repository_path = Column('repository_path', Text) |
|
5960 | 6013 | version = Column('version', Integer) |
|
5961 | 6014 | |
|
5962 | 6015 | @classmethod |
|
5963 | 6016 | def set_version(cls, version): |
|
5964 | 6017 | """ |
|
5965 | 6018 | Helper for forcing a different version, usually for debugging purposes via ishell. |
|
5966 | 6019 | """ |
|
5967 | 6020 | ver = DbMigrateVersion.query().first() |
|
5968 | 6021 | ver.version = version |
|
5969 | 6022 | Session().commit() |
|
5970 | 6023 | |
|
5971 | 6024 | |
|
5972 | 6025 | class DbSession(Base, BaseModel): |
|
5973 | 6026 | __tablename__ = 'db_session' |
|
5974 | 6027 | __table_args__ = ( |
|
5975 | 6028 | base_table_args, |
|
5976 | 6029 | ) |
|
5977 | 6030 | |
|
5978 | 6031 | def __repr__(self): |
|
5979 | 6032 | return f'<DB:DbSession({self.id})>' |
|
5980 | 6033 | |
|
5981 | 6034 | id = Column('id', Integer()) |
|
5982 | 6035 | namespace = Column('namespace', String(255), primary_key=True) |
|
5983 | 6036 | accessed = Column('accessed', DateTime, nullable=False) |
|
5984 | 6037 | created = Column('created', DateTime, nullable=False) |
|
5985 | 6038 | data = Column('data', PickleType, nullable=False) |
@@ -1,652 +1,655 b'' | |||
|
1 | 1 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | """ |
|
20 | 20 | this is forms validation classes |
|
21 | 21 | http://formencode.org/module-formencode.validators.html |
|
22 | 22 | for list off all availible validators |
|
23 | 23 | |
|
24 | 24 | we can create our own validators |
|
25 | 25 | |
|
26 | 26 | The table below outlines the options which can be used in a schema in addition to the validators themselves |
|
27 | 27 | pre_validators [] These validators will be applied before the schema |
|
28 | 28 | chained_validators [] These validators will be applied after the schema |
|
29 | 29 | allow_extra_fields False If True, then it is not an error when keys that aren't associated with a validator are present |
|
30 | 30 | filter_extra_fields False If True, then keys that aren't associated with a validator are removed |
|
31 | 31 | if_key_missing NoDefault If this is given, then any keys that aren't available but are expected will be replaced with this value (and then validated). This does not override a present .if_missing attribute on validators. NoDefault is a special FormEncode class to mean that no default values has been specified and therefore missing keys shouldn't take a default value. |
|
32 | 32 | ignore_key_missing False If True, then missing keys will be missing in the result, if the validator doesn't have .if_missing on it already |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | <name> = formencode.validators.<name of validator> |
|
36 | 36 | <name> must equal form name |
|
37 | 37 | list=[1,2,3,4,5] |
|
38 | 38 | for SELECT use formencode.All(OneOf(list), Int()) |
|
39 | 39 | |
|
40 | 40 | """ |
|
41 | 41 | |
|
42 | 42 | import deform |
|
43 | 43 | import logging |
|
44 | 44 | import formencode |
|
45 | 45 | |
|
46 | 46 | from pkg_resources import resource_filename |
|
47 | 47 | from formencode import All, Pipe |
|
48 | 48 | |
|
49 | 49 | from pyramid.threadlocal import get_current_request |
|
50 | 50 | |
|
51 | 51 | from rhodecode import BACKENDS |
|
52 | 52 | from rhodecode.lib import helpers |
|
53 | 53 | from rhodecode.model import validators as v |
|
54 | 54 | |
|
55 | 55 | log = logging.getLogger(__name__) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | deform_templates = resource_filename('deform', 'templates') |
|
59 | 59 | rhodecode_templates = resource_filename('rhodecode', 'templates/forms') |
|
60 | 60 | search_path = (rhodecode_templates, deform_templates) |
|
61 | 61 | |
|
62 | 62 | |
|
63 | 63 | class RhodecodeFormZPTRendererFactory(deform.ZPTRendererFactory): |
|
64 | 64 | """ Subclass of ZPTRendererFactory to add rhodecode context variables """ |
|
65 | 65 | def __call__(self, template_name, **kw): |
|
66 | 66 | kw['h'] = helpers |
|
67 | 67 | kw['request'] = get_current_request() |
|
68 | 68 | return self.load(template_name)(**kw) |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | form_renderer = RhodecodeFormZPTRendererFactory(search_path) |
|
72 | 72 | deform.Form.set_default_renderer(form_renderer) |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | def LoginForm(localizer): |
|
76 | 76 | _ = localizer |
|
77 | 77 | |
|
78 | 78 | class _LoginForm(formencode.Schema): |
|
79 | 79 | allow_extra_fields = True |
|
80 | 80 | filter_extra_fields = True |
|
81 | 81 | username = v.UnicodeString( |
|
82 | 82 | strip=True, |
|
83 | 83 | min=1, |
|
84 | 84 | not_empty=True, |
|
85 | 85 | messages={ |
|
86 | 86 | 'empty': _('Please enter a login'), |
|
87 | 87 | 'tooShort': _('Enter a value %(min)i characters long or more') |
|
88 | 88 | } |
|
89 | 89 | ) |
|
90 | 90 | |
|
91 | 91 | password = v.UnicodeString( |
|
92 | 92 | strip=False, |
|
93 | 93 | min=3, |
|
94 | 94 | max=72, |
|
95 | 95 | not_empty=True, |
|
96 | 96 | messages={ |
|
97 | 97 | 'empty': _('Please enter a password'), |
|
98 | 98 | 'tooShort': _('Enter %(min)i characters or more')} |
|
99 | 99 | ) |
|
100 | 100 | |
|
101 | 101 | remember = v.StringBoolean(if_missing=False) |
|
102 | 102 | |
|
103 | 103 | chained_validators = [v.ValidAuth(localizer)] |
|
104 | 104 | return _LoginForm |
|
105 | 105 | |
|
106 | 106 | |
|
107 | 107 | def TOTPForm(localizer, user, allow_recovery_code_use=False): |
|
108 | 108 | _ = localizer |
|
109 | 109 | |
|
110 | 110 | class _TOTPForm(formencode.Schema): |
|
111 | 111 | allow_extra_fields = True |
|
112 | 112 | filter_extra_fields = False |
|
113 | 113 | totp = v.Regex(r'^(?:\d{6}|[A-Z0-9]{32})$') |
|
114 | secret_totp = v.String() | |
|
114 | 115 | |
|
115 | 116 | def to_python(self, value, state=None): |
|
116 | 117 | validation_checks = [user.is_totp_valid] |
|
117 | 118 | if allow_recovery_code_use: |
|
118 | 119 | validation_checks.append(user.is_2fa_recovery_code_valid) |
|
119 | 120 | form_data = super().to_python(value, state) |
|
120 | 121 | received_code = form_data['totp'] |
|
121 | if not any(map(lambda x: x(received_code), validation_checks)): | |
|
122 | secret = form_data.get('secret_totp') | |
|
123 | ||
|
124 | if not any(map(lambda func: func(received_code, secret), validation_checks)): | |
|
122 | 125 | error_msg = _('Code is invalid. Try again!') |
|
123 | 126 | raise formencode.Invalid(error_msg, v, state, error_dict={'totp': error_msg}) |
|
124 |
return |
|
|
127 | return form_data | |
|
125 | 128 | |
|
126 | 129 | return _TOTPForm |
|
127 | 130 | |
|
128 | 131 | |
|
129 | 132 | def UserForm(localizer, edit=False, available_languages=None, old_data=None): |
|
130 | 133 | old_data = old_data or {} |
|
131 | 134 | available_languages = available_languages or [] |
|
132 | 135 | _ = localizer |
|
133 | 136 | |
|
134 | 137 | class _UserForm(formencode.Schema): |
|
135 | 138 | allow_extra_fields = True |
|
136 | 139 | filter_extra_fields = True |
|
137 | 140 | username = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
138 | 141 | v.ValidUsername(localizer, edit, old_data)) |
|
139 | 142 | if edit: |
|
140 | 143 | new_password = All( |
|
141 | 144 | v.ValidPassword(localizer), |
|
142 | 145 | v.UnicodeString(strip=False, min=6, max=72, not_empty=False) |
|
143 | 146 | ) |
|
144 | 147 | password_confirmation = All( |
|
145 | 148 | v.ValidPassword(localizer), |
|
146 | 149 | v.UnicodeString(strip=False, min=6, max=72, not_empty=False), |
|
147 | 150 | ) |
|
148 | 151 | admin = v.StringBoolean(if_missing=False) |
|
149 | 152 | else: |
|
150 | 153 | password = All( |
|
151 | 154 | v.ValidPassword(localizer), |
|
152 | 155 | v.UnicodeString(strip=False, min=6, max=72, not_empty=True) |
|
153 | 156 | ) |
|
154 | 157 | password_confirmation = All( |
|
155 | 158 | v.ValidPassword(localizer), |
|
156 | 159 | v.UnicodeString(strip=False, min=6, max=72, not_empty=False) |
|
157 | 160 | ) |
|
158 | 161 | |
|
159 | 162 | password_change = v.StringBoolean(if_missing=False) |
|
160 | 163 | create_repo_group = v.StringBoolean(if_missing=False) |
|
161 | 164 | |
|
162 | 165 | active = v.StringBoolean(if_missing=False) |
|
163 | 166 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
164 | 167 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
165 | 168 | email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True)) |
|
166 | 169 | description = v.UnicodeString(strip=True, min=1, max=250, not_empty=False, |
|
167 | 170 | if_missing='') |
|
168 | 171 | extern_name = v.UnicodeString(strip=True) |
|
169 | 172 | extern_type = v.UnicodeString(strip=True) |
|
170 | 173 | language = v.OneOf(available_languages, hideList=False, |
|
171 | 174 | testValueList=True, if_missing=None) |
|
172 | 175 | chained_validators = [v.ValidPasswordsMatch(localizer)] |
|
173 | 176 | return _UserForm |
|
174 | 177 | |
|
175 | 178 | |
|
176 | 179 | def UserGroupForm(localizer, edit=False, old_data=None, allow_disabled=False): |
|
177 | 180 | old_data = old_data or {} |
|
178 | 181 | _ = localizer |
|
179 | 182 | |
|
180 | 183 | class _UserGroupForm(formencode.Schema): |
|
181 | 184 | allow_extra_fields = True |
|
182 | 185 | filter_extra_fields = True |
|
183 | 186 | |
|
184 | 187 | users_group_name = All( |
|
185 | 188 | v.UnicodeString(strip=True, min=1, not_empty=True), |
|
186 | 189 | v.ValidUserGroup(localizer, edit, old_data) |
|
187 | 190 | ) |
|
188 | 191 | user_group_description = v.UnicodeString(strip=True, min=1, |
|
189 | 192 | not_empty=False) |
|
190 | 193 | |
|
191 | 194 | users_group_active = v.StringBoolean(if_missing=False) |
|
192 | 195 | |
|
193 | 196 | if edit: |
|
194 | 197 | # this is user group owner |
|
195 | 198 | user = All( |
|
196 | 199 | v.UnicodeString(not_empty=True), |
|
197 | 200 | v.ValidRepoUser(localizer, allow_disabled)) |
|
198 | 201 | return _UserGroupForm |
|
199 | 202 | |
|
200 | 203 | |
|
201 | 204 | def RepoGroupForm(localizer, edit=False, old_data=None, available_groups=None, |
|
202 | 205 | can_create_in_root=False, allow_disabled=False): |
|
203 | 206 | _ = localizer |
|
204 | 207 | old_data = old_data or {} |
|
205 | 208 | available_groups = available_groups or [] |
|
206 | 209 | |
|
207 | 210 | class _RepoGroupForm(formencode.Schema): |
|
208 | 211 | allow_extra_fields = True |
|
209 | 212 | filter_extra_fields = False |
|
210 | 213 | |
|
211 | 214 | group_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
212 | 215 | v.SlugifyName(localizer),) |
|
213 | 216 | group_description = v.UnicodeString(strip=True, min=1, |
|
214 | 217 | not_empty=False) |
|
215 | 218 | group_copy_permissions = v.StringBoolean(if_missing=False) |
|
216 | 219 | |
|
217 | 220 | group_parent_id = v.OneOf(available_groups, hideList=False, |
|
218 | 221 | testValueList=True, not_empty=True) |
|
219 | 222 | enable_locking = v.StringBoolean(if_missing=False) |
|
220 | 223 | chained_validators = [ |
|
221 | 224 | v.ValidRepoGroup(localizer, edit, old_data, can_create_in_root)] |
|
222 | 225 | |
|
223 | 226 | if edit: |
|
224 | 227 | # this is repo group owner |
|
225 | 228 | user = All( |
|
226 | 229 | v.UnicodeString(not_empty=True), |
|
227 | 230 | v.ValidRepoUser(localizer, allow_disabled)) |
|
228 | 231 | return _RepoGroupForm |
|
229 | 232 | |
|
230 | 233 | |
|
231 | 234 | def RegisterForm(localizer, edit=False, old_data=None): |
|
232 | 235 | _ = localizer |
|
233 | 236 | old_data = old_data or {} |
|
234 | 237 | |
|
235 | 238 | class _RegisterForm(formencode.Schema): |
|
236 | 239 | allow_extra_fields = True |
|
237 | 240 | filter_extra_fields = True |
|
238 | 241 | username = All( |
|
239 | 242 | v.ValidUsername(localizer, edit, old_data), |
|
240 | 243 | v.UnicodeString(strip=True, min=1, not_empty=True) |
|
241 | 244 | ) |
|
242 | 245 | password = All( |
|
243 | 246 | v.ValidPassword(localizer), |
|
244 | 247 | v.UnicodeString(strip=False, min=6, max=72, not_empty=True) |
|
245 | 248 | ) |
|
246 | 249 | password_confirmation = All( |
|
247 | 250 | v.ValidPassword(localizer), |
|
248 | 251 | v.UnicodeString(strip=False, min=6, max=72, not_empty=True) |
|
249 | 252 | ) |
|
250 | 253 | active = v.StringBoolean(if_missing=False) |
|
251 | 254 | firstname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
252 | 255 | lastname = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
253 | 256 | email = All(v.UniqSystemEmail(localizer, old_data), v.Email(not_empty=True)) |
|
254 | 257 | |
|
255 | 258 | chained_validators = [v.ValidPasswordsMatch(localizer)] |
|
256 | 259 | return _RegisterForm |
|
257 | 260 | |
|
258 | 261 | |
|
259 | 262 | def PasswordResetForm(localizer): |
|
260 | 263 | _ = localizer |
|
261 | 264 | |
|
262 | 265 | class _PasswordResetForm(formencode.Schema): |
|
263 | 266 | allow_extra_fields = True |
|
264 | 267 | filter_extra_fields = True |
|
265 | 268 | email = All(v.ValidSystemEmail(localizer), v.Email(not_empty=True)) |
|
266 | 269 | return _PasswordResetForm |
|
267 | 270 | |
|
268 | 271 | |
|
269 | 272 | def RepoForm(localizer, edit=False, old_data=None, repo_groups=None, allow_disabled=False): |
|
270 | 273 | _ = localizer |
|
271 | 274 | old_data = old_data or {} |
|
272 | 275 | repo_groups = repo_groups or [] |
|
273 | 276 | supported_backends = BACKENDS.keys() |
|
274 | 277 | |
|
275 | 278 | class _RepoForm(formencode.Schema): |
|
276 | 279 | allow_extra_fields = True |
|
277 | 280 | filter_extra_fields = False |
|
278 | 281 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
279 | 282 | v.SlugifyName(localizer), v.CannotHaveGitSuffix(localizer)) |
|
280 | 283 | repo_group = All(v.CanWriteGroup(localizer, old_data), |
|
281 | 284 | v.OneOf(repo_groups, hideList=True)) |
|
282 | 285 | repo_type = v.OneOf(supported_backends, required=False, |
|
283 | 286 | if_missing=old_data.get('repo_type')) |
|
284 | 287 | repo_description = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
285 | 288 | repo_private = v.StringBoolean(if_missing=False) |
|
286 | 289 | repo_copy_permissions = v.StringBoolean(if_missing=False) |
|
287 | 290 | clone_uri = All(v.UnicodeString(strip=True, min=1, not_empty=False)) |
|
288 | 291 | |
|
289 | 292 | repo_enable_statistics = v.StringBoolean(if_missing=False) |
|
290 | 293 | repo_enable_downloads = v.StringBoolean(if_missing=False) |
|
291 | 294 | repo_enable_locking = v.StringBoolean(if_missing=False) |
|
292 | 295 | |
|
293 | 296 | if edit: |
|
294 | 297 | # this is repo owner |
|
295 | 298 | user = All( |
|
296 | 299 | v.UnicodeString(not_empty=True), |
|
297 | 300 | v.ValidRepoUser(localizer, allow_disabled)) |
|
298 | 301 | clone_uri_change = v.UnicodeString( |
|
299 | 302 | not_empty=False, if_missing=v.Missing) |
|
300 | 303 | |
|
301 | 304 | chained_validators = [v.ValidCloneUri(localizer), |
|
302 | 305 | v.ValidRepoName(localizer, edit, old_data)] |
|
303 | 306 | return _RepoForm |
|
304 | 307 | |
|
305 | 308 | |
|
306 | 309 | def RepoPermsForm(localizer): |
|
307 | 310 | _ = localizer |
|
308 | 311 | |
|
309 | 312 | class _RepoPermsForm(formencode.Schema): |
|
310 | 313 | allow_extra_fields = True |
|
311 | 314 | filter_extra_fields = False |
|
312 | 315 | chained_validators = [v.ValidPerms(localizer, type_='repo')] |
|
313 | 316 | return _RepoPermsForm |
|
314 | 317 | |
|
315 | 318 | |
|
316 | 319 | def RepoGroupPermsForm(localizer, valid_recursive_choices): |
|
317 | 320 | _ = localizer |
|
318 | 321 | |
|
319 | 322 | class _RepoGroupPermsForm(formencode.Schema): |
|
320 | 323 | allow_extra_fields = True |
|
321 | 324 | filter_extra_fields = False |
|
322 | 325 | recursive = v.OneOf(valid_recursive_choices) |
|
323 | 326 | chained_validators = [v.ValidPerms(localizer, type_='repo_group')] |
|
324 | 327 | return _RepoGroupPermsForm |
|
325 | 328 | |
|
326 | 329 | |
|
327 | 330 | def UserGroupPermsForm(localizer): |
|
328 | 331 | _ = localizer |
|
329 | 332 | |
|
330 | 333 | class _UserPermsForm(formencode.Schema): |
|
331 | 334 | allow_extra_fields = True |
|
332 | 335 | filter_extra_fields = False |
|
333 | 336 | chained_validators = [v.ValidPerms(localizer, type_='user_group')] |
|
334 | 337 | return _UserPermsForm |
|
335 | 338 | |
|
336 | 339 | |
|
337 | 340 | def RepoFieldForm(localizer): |
|
338 | 341 | _ = localizer |
|
339 | 342 | |
|
340 | 343 | class _RepoFieldForm(formencode.Schema): |
|
341 | 344 | filter_extra_fields = True |
|
342 | 345 | allow_extra_fields = True |
|
343 | 346 | |
|
344 | 347 | new_field_key = All(v.FieldKey(localizer), |
|
345 | 348 | v.UnicodeString(strip=True, min=3, not_empty=True)) |
|
346 | 349 | new_field_value = v.UnicodeString(not_empty=False, if_missing='') |
|
347 | 350 | new_field_type = v.OneOf(['str', 'unicode', 'list', 'tuple'], |
|
348 | 351 | if_missing='str') |
|
349 | 352 | new_field_label = v.UnicodeString(not_empty=False) |
|
350 | 353 | new_field_desc = v.UnicodeString(not_empty=False) |
|
351 | 354 | return _RepoFieldForm |
|
352 | 355 | |
|
353 | 356 | |
|
354 | 357 | def RepoForkForm(localizer, edit=False, old_data=None, |
|
355 | 358 | supported_backends=BACKENDS.keys(), repo_groups=None): |
|
356 | 359 | _ = localizer |
|
357 | 360 | old_data = old_data or {} |
|
358 | 361 | repo_groups = repo_groups or [] |
|
359 | 362 | |
|
360 | 363 | class _RepoForkForm(formencode.Schema): |
|
361 | 364 | allow_extra_fields = True |
|
362 | 365 | filter_extra_fields = False |
|
363 | 366 | repo_name = All(v.UnicodeString(strip=True, min=1, not_empty=True), |
|
364 | 367 | v.SlugifyName(localizer)) |
|
365 | 368 | repo_group = All(v.CanWriteGroup(localizer, ), |
|
366 | 369 | v.OneOf(repo_groups, hideList=True)) |
|
367 | 370 | repo_type = All(v.ValidForkType(localizer, old_data), v.OneOf(supported_backends)) |
|
368 | 371 | description = v.UnicodeString(strip=True, min=1, not_empty=True) |
|
369 | 372 | private = v.StringBoolean(if_missing=False) |
|
370 | 373 | copy_permissions = v.StringBoolean(if_missing=False) |
|
371 | 374 | fork_parent_id = v.UnicodeString() |
|
372 | 375 | chained_validators = [v.ValidForkName(localizer, edit, old_data)] |
|
373 | 376 | return _RepoForkForm |
|
374 | 377 | |
|
375 | 378 | |
|
376 | 379 | def ApplicationSettingsForm(localizer): |
|
377 | 380 | _ = localizer |
|
378 | 381 | |
|
379 | 382 | class _ApplicationSettingsForm(formencode.Schema): |
|
380 | 383 | allow_extra_fields = True |
|
381 | 384 | filter_extra_fields = False |
|
382 | 385 | rhodecode_title = v.UnicodeString(strip=True, max=40, not_empty=False) |
|
383 | 386 | rhodecode_realm = v.UnicodeString(strip=True, min=1, not_empty=True) |
|
384 | 387 | rhodecode_pre_code = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
385 | 388 | rhodecode_post_code = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
386 | 389 | rhodecode_captcha_public_key = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
387 | 390 | rhodecode_captcha_private_key = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
388 | 391 | rhodecode_create_personal_repo_group = v.StringBoolean(if_missing=False) |
|
389 | 392 | rhodecode_personal_repo_group_pattern = v.UnicodeString(strip=True, min=1, not_empty=False) |
|
390 | 393 | return _ApplicationSettingsForm |
|
391 | 394 | |
|
392 | 395 | |
|
393 | 396 | def ApplicationVisualisationForm(localizer): |
|
394 | 397 | from rhodecode.model.db import Repository |
|
395 | 398 | _ = localizer |
|
396 | 399 | |
|
397 | 400 | class _ApplicationVisualisationForm(formencode.Schema): |
|
398 | 401 | allow_extra_fields = True |
|
399 | 402 | filter_extra_fields = False |
|
400 | 403 | rhodecode_show_public_icon = v.StringBoolean(if_missing=False) |
|
401 | 404 | rhodecode_show_private_icon = v.StringBoolean(if_missing=False) |
|
402 | 405 | rhodecode_stylify_metatags = v.StringBoolean(if_missing=False) |
|
403 | 406 | |
|
404 | 407 | rhodecode_repository_fields = v.StringBoolean(if_missing=False) |
|
405 | 408 | rhodecode_lightweight_journal = v.StringBoolean(if_missing=False) |
|
406 | 409 | rhodecode_dashboard_items = v.Int(min=5, not_empty=True) |
|
407 | 410 | rhodecode_admin_grid_items = v.Int(min=5, not_empty=True) |
|
408 | 411 | rhodecode_show_version = v.StringBoolean(if_missing=False) |
|
409 | 412 | rhodecode_use_gravatar = v.StringBoolean(if_missing=False) |
|
410 | 413 | rhodecode_markup_renderer = v.OneOf(['markdown', 'rst']) |
|
411 | 414 | rhodecode_gravatar_url = v.UnicodeString(min=3) |
|
412 | 415 | rhodecode_clone_uri_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI) |
|
413 | 416 | rhodecode_clone_uri_id_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_ID) |
|
414 | 417 | rhodecode_clone_uri_ssh_tmpl = v.UnicodeString(not_empty=False, if_empty=Repository.DEFAULT_CLONE_URI_SSH) |
|
415 | 418 | rhodecode_support_url = v.UnicodeString() |
|
416 | 419 | rhodecode_show_revision_number = v.StringBoolean(if_missing=False) |
|
417 | 420 | rhodecode_show_sha_length = v.Int(min=4, not_empty=True) |
|
418 | 421 | return _ApplicationVisualisationForm |
|
419 | 422 | |
|
420 | 423 | |
|
421 | 424 | class _BaseVcsSettingsForm(formencode.Schema): |
|
422 | 425 | |
|
423 | 426 | allow_extra_fields = True |
|
424 | 427 | filter_extra_fields = False |
|
425 | 428 | hooks_changegroup_repo_size = v.StringBoolean(if_missing=False) |
|
426 | 429 | hooks_changegroup_push_logger = v.StringBoolean(if_missing=False) |
|
427 | 430 | hooks_outgoing_pull_logger = v.StringBoolean(if_missing=False) |
|
428 | 431 | |
|
429 | 432 | # PR/Code-review |
|
430 | 433 | rhodecode_pr_merge_enabled = v.StringBoolean(if_missing=False) |
|
431 | 434 | rhodecode_use_outdated_comments = v.StringBoolean(if_missing=False) |
|
432 | 435 | |
|
433 | 436 | # hg |
|
434 | 437 | extensions_largefiles = v.StringBoolean(if_missing=False) |
|
435 | 438 | extensions_evolve = v.StringBoolean(if_missing=False) |
|
436 | 439 | phases_publish = v.StringBoolean(if_missing=False) |
|
437 | 440 | |
|
438 | 441 | rhodecode_hg_use_rebase_for_merging = v.StringBoolean(if_missing=False) |
|
439 | 442 | rhodecode_hg_close_branch_before_merging = v.StringBoolean(if_missing=False) |
|
440 | 443 | |
|
441 | 444 | # git |
|
442 | 445 | vcs_git_lfs_enabled = v.StringBoolean(if_missing=False) |
|
443 | 446 | rhodecode_git_use_rebase_for_merging = v.StringBoolean(if_missing=False) |
|
444 | 447 | rhodecode_git_close_branch_before_merging = v.StringBoolean(if_missing=False) |
|
445 | 448 | |
|
446 | 449 | # cache |
|
447 | 450 | rhodecode_diff_cache = v.StringBoolean(if_missing=False) |
|
448 | 451 | |
|
449 | 452 | |
|
450 | 453 | def ApplicationUiSettingsForm(localizer): |
|
451 | 454 | _ = localizer |
|
452 | 455 | |
|
453 | 456 | class _ApplicationUiSettingsForm(_BaseVcsSettingsForm): |
|
454 | 457 | web_push_ssl = v.StringBoolean(if_missing=False) |
|
455 | 458 | largefiles_usercache = All( |
|
456 | 459 | v.ValidPath(localizer), |
|
457 | 460 | v.UnicodeString(strip=True, min=2, not_empty=True)) |
|
458 | 461 | vcs_git_lfs_store_location = All( |
|
459 | 462 | v.ValidPath(localizer), |
|
460 | 463 | v.UnicodeString(strip=True, min=2, not_empty=True)) |
|
461 | 464 | extensions_hggit = v.StringBoolean(if_missing=False) |
|
462 | 465 | new_svn_branch = v.ValidSvnPattern(localizer, section='vcs_svn_branch') |
|
463 | 466 | new_svn_tag = v.ValidSvnPattern(localizer, section='vcs_svn_tag') |
|
464 | 467 | return _ApplicationUiSettingsForm |
|
465 | 468 | |
|
466 | 469 | |
|
467 | 470 | def RepoVcsSettingsForm(localizer, repo_name): |
|
468 | 471 | _ = localizer |
|
469 | 472 | |
|
470 | 473 | class _RepoVcsSettingsForm(_BaseVcsSettingsForm): |
|
471 | 474 | inherit_global_settings = v.StringBoolean(if_missing=False) |
|
472 | 475 | new_svn_branch = v.ValidSvnPattern(localizer, |
|
473 | 476 | section='vcs_svn_branch', repo_name=repo_name) |
|
474 | 477 | new_svn_tag = v.ValidSvnPattern(localizer, |
|
475 | 478 | section='vcs_svn_tag', repo_name=repo_name) |
|
476 | 479 | return _RepoVcsSettingsForm |
|
477 | 480 | |
|
478 | 481 | |
|
479 | 482 | def LabsSettingsForm(localizer): |
|
480 | 483 | _ = localizer |
|
481 | 484 | |
|
482 | 485 | class _LabSettingsForm(formencode.Schema): |
|
483 | 486 | allow_extra_fields = True |
|
484 | 487 | filter_extra_fields = False |
|
485 | 488 | return _LabSettingsForm |
|
486 | 489 | |
|
487 | 490 | |
|
488 | 491 | def ApplicationPermissionsForm( |
|
489 | 492 | localizer, register_choices, password_reset_choices, |
|
490 | 493 | extern_activate_choices): |
|
491 | 494 | _ = localizer |
|
492 | 495 | |
|
493 | 496 | class _DefaultPermissionsForm(formencode.Schema): |
|
494 | 497 | allow_extra_fields = True |
|
495 | 498 | filter_extra_fields = True |
|
496 | 499 | |
|
497 | 500 | anonymous = v.StringBoolean(if_missing=False) |
|
498 | 501 | default_register = v.OneOf(register_choices) |
|
499 | 502 | default_register_message = v.UnicodeString() |
|
500 | 503 | default_password_reset = v.OneOf(password_reset_choices) |
|
501 | 504 | default_extern_activate = v.OneOf(extern_activate_choices) |
|
502 | 505 | return _DefaultPermissionsForm |
|
503 | 506 | |
|
504 | 507 | |
|
505 | 508 | def ObjectPermissionsForm(localizer, repo_perms_choices, group_perms_choices, |
|
506 | 509 | user_group_perms_choices): |
|
507 | 510 | _ = localizer |
|
508 | 511 | |
|
509 | 512 | class _ObjectPermissionsForm(formencode.Schema): |
|
510 | 513 | allow_extra_fields = True |
|
511 | 514 | filter_extra_fields = True |
|
512 | 515 | overwrite_default_repo = v.StringBoolean(if_missing=False) |
|
513 | 516 | overwrite_default_group = v.StringBoolean(if_missing=False) |
|
514 | 517 | overwrite_default_user_group = v.StringBoolean(if_missing=False) |
|
515 | 518 | |
|
516 | 519 | default_repo_perm = v.OneOf(repo_perms_choices) |
|
517 | 520 | default_group_perm = v.OneOf(group_perms_choices) |
|
518 | 521 | default_user_group_perm = v.OneOf(user_group_perms_choices) |
|
519 | 522 | |
|
520 | 523 | return _ObjectPermissionsForm |
|
521 | 524 | |
|
522 | 525 | |
|
523 | 526 | def BranchPermissionsForm(localizer, branch_perms_choices): |
|
524 | 527 | _ = localizer |
|
525 | 528 | |
|
526 | 529 | class _BranchPermissionsForm(formencode.Schema): |
|
527 | 530 | allow_extra_fields = True |
|
528 | 531 | filter_extra_fields = True |
|
529 | 532 | overwrite_default_branch = v.StringBoolean(if_missing=False) |
|
530 | 533 | default_branch_perm = v.OneOf(branch_perms_choices) |
|
531 | 534 | |
|
532 | 535 | return _BranchPermissionsForm |
|
533 | 536 | |
|
534 | 537 | |
|
535 | 538 | def UserPermissionsForm(localizer, create_choices, create_on_write_choices, |
|
536 | 539 | repo_group_create_choices, user_group_create_choices, |
|
537 | 540 | fork_choices, inherit_default_permissions_choices): |
|
538 | 541 | _ = localizer |
|
539 | 542 | |
|
540 | 543 | class _DefaultPermissionsForm(formencode.Schema): |
|
541 | 544 | allow_extra_fields = True |
|
542 | 545 | filter_extra_fields = True |
|
543 | 546 | |
|
544 | 547 | anonymous = v.StringBoolean(if_missing=False) |
|
545 | 548 | |
|
546 | 549 | default_repo_create = v.OneOf(create_choices) |
|
547 | 550 | default_repo_create_on_write = v.OneOf(create_on_write_choices) |
|
548 | 551 | default_user_group_create = v.OneOf(user_group_create_choices) |
|
549 | 552 | default_repo_group_create = v.OneOf(repo_group_create_choices) |
|
550 | 553 | default_fork_create = v.OneOf(fork_choices) |
|
551 | 554 | default_inherit_default_permissions = v.OneOf(inherit_default_permissions_choices) |
|
552 | 555 | return _DefaultPermissionsForm |
|
553 | 556 | |
|
554 | 557 | |
|
555 | 558 | def UserIndividualPermissionsForm(localizer): |
|
556 | 559 | _ = localizer |
|
557 | 560 | |
|
558 | 561 | class _DefaultPermissionsForm(formencode.Schema): |
|
559 | 562 | allow_extra_fields = True |
|
560 | 563 | filter_extra_fields = True |
|
561 | 564 | |
|
562 | 565 | inherit_default_permissions = v.StringBoolean(if_missing=False) |
|
563 | 566 | return _DefaultPermissionsForm |
|
564 | 567 | |
|
565 | 568 | |
|
566 | 569 | def DefaultsForm(localizer, edit=False, old_data=None, supported_backends=BACKENDS.keys()): |
|
567 | 570 | _ = localizer |
|
568 | 571 | old_data = old_data or {} |
|
569 | 572 | |
|
570 | 573 | class _DefaultsForm(formencode.Schema): |
|
571 | 574 | allow_extra_fields = True |
|
572 | 575 | filter_extra_fields = True |
|
573 | 576 | default_repo_type = v.OneOf(supported_backends) |
|
574 | 577 | default_repo_private = v.StringBoolean(if_missing=False) |
|
575 | 578 | default_repo_enable_statistics = v.StringBoolean(if_missing=False) |
|
576 | 579 | default_repo_enable_downloads = v.StringBoolean(if_missing=False) |
|
577 | 580 | default_repo_enable_locking = v.StringBoolean(if_missing=False) |
|
578 | 581 | return _DefaultsForm |
|
579 | 582 | |
|
580 | 583 | |
|
581 | 584 | def AuthSettingsForm(localizer): |
|
582 | 585 | _ = localizer |
|
583 | 586 | |
|
584 | 587 | class _AuthSettingsForm(formencode.Schema): |
|
585 | 588 | allow_extra_fields = True |
|
586 | 589 | filter_extra_fields = True |
|
587 | 590 | auth_plugins = All(v.ValidAuthPlugins(localizer), |
|
588 | 591 | v.UniqueListFromString(localizer)(not_empty=True)) |
|
589 | 592 | return _AuthSettingsForm |
|
590 | 593 | |
|
591 | 594 | |
|
592 | 595 | def UserExtraEmailForm(localizer): |
|
593 | 596 | _ = localizer |
|
594 | 597 | |
|
595 | 598 | class _UserExtraEmailForm(formencode.Schema): |
|
596 | 599 | email = All(v.UniqSystemEmail(localizer), v.Email(not_empty=True)) |
|
597 | 600 | return _UserExtraEmailForm |
|
598 | 601 | |
|
599 | 602 | |
|
600 | 603 | def UserExtraIpForm(localizer): |
|
601 | 604 | _ = localizer |
|
602 | 605 | |
|
603 | 606 | class _UserExtraIpForm(formencode.Schema): |
|
604 | 607 | ip = v.ValidIp(localizer)(not_empty=True) |
|
605 | 608 | return _UserExtraIpForm |
|
606 | 609 | |
|
607 | 610 | |
|
608 | 611 | def PullRequestForm(localizer, repo_id): |
|
609 | 612 | _ = localizer |
|
610 | 613 | |
|
611 | 614 | class ReviewerForm(formencode.Schema): |
|
612 | 615 | user_id = v.Int(not_empty=True) |
|
613 | 616 | reasons = All() |
|
614 | 617 | rules = All(v.UniqueList(localizer, convert=int)()) |
|
615 | 618 | mandatory = v.StringBoolean() |
|
616 | 619 | role = v.String(if_missing='reviewer') |
|
617 | 620 | |
|
618 | 621 | class ObserverForm(formencode.Schema): |
|
619 | 622 | user_id = v.Int(not_empty=True) |
|
620 | 623 | reasons = All() |
|
621 | 624 | rules = All(v.UniqueList(localizer, convert=int)()) |
|
622 | 625 | mandatory = v.StringBoolean() |
|
623 | 626 | role = v.String(if_missing='observer') |
|
624 | 627 | |
|
625 | 628 | class _PullRequestForm(formencode.Schema): |
|
626 | 629 | allow_extra_fields = True |
|
627 | 630 | filter_extra_fields = True |
|
628 | 631 | |
|
629 | 632 | common_ancestor = v.UnicodeString(strip=True, required=True) |
|
630 | 633 | source_repo = v.UnicodeString(strip=True, required=True) |
|
631 | 634 | source_ref = v.UnicodeString(strip=True, required=True) |
|
632 | 635 | target_repo = v.UnicodeString(strip=True, required=True) |
|
633 | 636 | target_ref = v.UnicodeString(strip=True, required=True) |
|
634 | 637 | revisions = All(#v.NotReviewedRevisions(localizer, repo_id)(), |
|
635 | 638 | v.UniqueList(localizer)(not_empty=True)) |
|
636 | 639 | review_members = formencode.ForEach(ReviewerForm()) |
|
637 | 640 | observer_members = formencode.ForEach(ObserverForm()) |
|
638 | 641 | pullrequest_title = v.UnicodeString(strip=True, required=True, min=1, max=255) |
|
639 | 642 | pullrequest_desc = v.UnicodeString(strip=True, required=False) |
|
640 | 643 | description_renderer = v.UnicodeString(strip=True, required=False) |
|
641 | 644 | |
|
642 | 645 | return _PullRequestForm |
|
643 | 646 | |
|
644 | 647 | |
|
645 | 648 | def IssueTrackerPatternsForm(localizer): |
|
646 | 649 | _ = localizer |
|
647 | 650 | |
|
648 | 651 | class _IssueTrackerPatternsForm(formencode.Schema): |
|
649 | 652 | allow_extra_fields = True |
|
650 | 653 | filter_extra_fields = False |
|
651 | 654 | chained_validators = [v.ValidPattern(localizer)] |
|
652 | 655 | return _IssueTrackerPatternsForm |
@@ -1,416 +1,419 b'' | |||
|
1 | 1 | |
|
2 | 2 | /****************************************************************************** |
|
3 | 3 | * * |
|
4 | 4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
5 | 5 | * * |
|
6 | 6 | * * |
|
7 | 7 | * This file is automatically generated when the app starts up with * |
|
8 | 8 | * generate_js_files = true * |
|
9 | 9 | * * |
|
10 | 10 | * To add a route here pass jsroute=True to the route definition in the app * |
|
11 | 11 | * * |
|
12 | 12 | ******************************************************************************/ |
|
13 | 13 | function registerRCRoutes() { |
|
14 | 14 | // routes registration |
|
15 | 15 | pyroutes.register('admin_artifacts', '/_admin/artifacts', []); |
|
16 | 16 | pyroutes.register('admin_artifacts_data', '/_admin/artifacts-data', []); |
|
17 | 17 | pyroutes.register('admin_artifacts_delete', '/_admin/artifacts/%(uid)s/delete', ['uid']); |
|
18 | 18 | pyroutes.register('admin_artifacts_show_all', '/_admin/artifacts', []); |
|
19 | 19 | pyroutes.register('admin_artifacts_show_info', '/_admin/artifacts/%(uid)s', ['uid']); |
|
20 | 20 | pyroutes.register('admin_artifacts_update', '/_admin/artifacts/%(uid)s/update', ['uid']); |
|
21 | 21 | pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']); |
|
22 | 22 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); |
|
23 | 23 | pyroutes.register('admin_automation', '/_admin/automation', []); |
|
24 | 24 | pyroutes.register('admin_automation_update', '/_admin/automation/%(entry_id)s/update', ['entry_id']); |
|
25 | 25 | pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []); |
|
26 | 26 | pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []); |
|
27 | 27 | pyroutes.register('admin_home', '/_admin', []); |
|
28 | 28 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); |
|
29 | 29 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); |
|
30 | 30 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); |
|
31 | 31 | pyroutes.register('admin_permissions_branch', '/_admin/permissions/branch', []); |
|
32 | 32 | pyroutes.register('admin_permissions_branch_update', '/_admin/permissions/branch/update', []); |
|
33 | 33 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); |
|
34 | 34 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); |
|
35 | 35 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); |
|
36 | 36 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); |
|
37 | 37 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); |
|
38 | 38 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); |
|
39 | 39 | pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []); |
|
40 | 40 | pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []); |
|
41 | 41 | pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []); |
|
42 | 42 | pyroutes.register('admin_scheduler', '/_admin/scheduler', []); |
|
43 | 43 | pyroutes.register('admin_scheduler_show_tasks', '/_admin/scheduler/_tasks', []); |
|
44 | 44 | pyroutes.register('admin_settings', '/_admin/settings', []); |
|
45 | 45 | pyroutes.register('admin_settings_email', '/_admin/settings/email', []); |
|
46 | 46 | pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []); |
|
47 | 47 | pyroutes.register('admin_settings_exception_tracker', '/_admin/settings/exceptions', []); |
|
48 | 48 | pyroutes.register('admin_settings_exception_tracker_delete', '/_admin/settings/exceptions/%(exception_id)s/delete', ['exception_id']); |
|
49 | 49 | pyroutes.register('admin_settings_exception_tracker_delete_all', '/_admin/settings/exceptions_delete_all', []); |
|
50 | 50 | pyroutes.register('admin_settings_exception_tracker_show', '/_admin/settings/exceptions/%(exception_id)s', ['exception_id']); |
|
51 | 51 | pyroutes.register('admin_settings_global', '/_admin/settings/global', []); |
|
52 | 52 | pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []); |
|
53 | 53 | pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []); |
|
54 | 54 | pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []); |
|
55 | 55 | pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []); |
|
56 | 56 | pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []); |
|
57 | 57 | pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []); |
|
58 | 58 | pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []); |
|
59 | 59 | pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []); |
|
60 | 60 | pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []); |
|
61 | 61 | pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []); |
|
62 | 62 | pyroutes.register('admin_settings_license', '/_admin/settings/license', []); |
|
63 | 63 | pyroutes.register('admin_settings_license_unlock', '/_admin/settings/license_unlock', []); |
|
64 | 64 | pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []); |
|
65 | 65 | pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []); |
|
66 | 66 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); |
|
67 | 67 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); |
|
68 | 68 | pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []); |
|
69 | 69 | pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []); |
|
70 | 70 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); |
|
71 | 71 | pyroutes.register('admin_settings_scheduler_create', '/_admin/scheduler/create', []); |
|
72 | 72 | pyroutes.register('admin_settings_scheduler_delete', '/_admin/scheduler/%(schedule_id)s/delete', ['schedule_id']); |
|
73 | 73 | pyroutes.register('admin_settings_scheduler_edit', '/_admin/scheduler/%(schedule_id)s', ['schedule_id']); |
|
74 | 74 | pyroutes.register('admin_settings_scheduler_execute', '/_admin/scheduler/%(schedule_id)s/execute', ['schedule_id']); |
|
75 | 75 | pyroutes.register('admin_settings_scheduler_new', '/_admin/scheduler/new', []); |
|
76 | 76 | pyroutes.register('admin_settings_scheduler_update', '/_admin/scheduler/%(schedule_id)s/update', ['schedule_id']); |
|
77 | 77 | pyroutes.register('admin_settings_search', '/_admin/settings/search', []); |
|
78 | 78 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); |
|
79 | 79 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); |
|
80 | 80 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); |
|
81 | 81 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); |
|
82 | 82 | pyroutes.register('admin_settings_update', '/_admin/settings/update', []); |
|
83 | 83 | pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []); |
|
84 | 84 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); |
|
85 | 85 | pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []); |
|
86 | 86 | pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []); |
|
87 | 87 | pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []); |
|
88 | 88 | pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []); |
|
89 | 89 | pyroutes.register('apiv2', '/_admin/api', []); |
|
90 | 90 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed-atom', ['repo_name']); |
|
91 | 91 | pyroutes.register('atom_feed_home_old', '/%(repo_name)s/feed/atom', ['repo_name']); |
|
92 | 92 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); |
|
93 | 93 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); |
|
94 | 94 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); |
|
95 | 95 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); |
|
96 | 96 | pyroutes.register('channelstream_proxy', '/_channelstream', []); |
|
97 | 97 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); |
|
98 | pyroutes.register('check_2fa', '/_admin/check_2fa', []); | |
|
98 | 99 | pyroutes.register('commit_draft_comments_submit', '/%(repo_name)s/changeset/%(commit_id)s/draft_comments_submit', ['repo_name', 'commit_id']); |
|
99 | 100 | pyroutes.register('debug_style_email', '/_admin/debug_style/email/%(email_id)s', ['email_id']); |
|
100 | 101 | pyroutes.register('debug_style_email_plain_rendered', '/_admin/debug_style/email-rendered/%(email_id)s', ['email_id']); |
|
101 | 102 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); |
|
102 | 103 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); |
|
103 | 104 | pyroutes.register('download_file', '/_file_store/download/%(fid)s', ['fid']); |
|
104 | 105 | pyroutes.register('download_file_by_token', '/_file_store/token-download/%(_auth_token)s/%(fid)s', ['_auth_token', 'fid']); |
|
105 | 106 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
106 | 107 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); |
|
107 | 108 | pyroutes.register('edit_repo_advanced_archive', '/%(repo_name)s/settings/advanced/archive', ['repo_name']); |
|
108 | 109 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); |
|
109 | 110 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); |
|
110 | 111 | pyroutes.register('edit_repo_advanced_hooks', '/%(repo_name)s/settings/advanced/hooks', ['repo_name']); |
|
111 | 112 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); |
|
112 | 113 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); |
|
113 | 114 | pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']); |
|
114 | 115 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); |
|
115 | 116 | pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']); |
|
116 | 117 | pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']); |
|
117 | 118 | pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']); |
|
118 | 119 | pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']); |
|
119 | 120 | pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']); |
|
120 | 121 | pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']); |
|
121 | 122 | pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']); |
|
122 | 123 | pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']); |
|
123 | 124 | pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']); |
|
124 | 125 | pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']); |
|
125 | 126 | pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']); |
|
126 | 127 | pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']); |
|
127 | 128 | pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); |
|
128 | 129 | pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); |
|
129 | 130 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
130 | 131 | pyroutes.register('edit_repo_perms_branch', '/%(repo_name)s/settings/branch_permissions', ['repo_name']); |
|
131 | 132 | pyroutes.register('edit_repo_perms_branch_delete', '/%(repo_name)s/settings/branch_permissions/%(rule_id)s/delete', ['repo_name', 'rule_id']); |
|
132 | 133 | pyroutes.register('edit_repo_perms_set_private', '/%(repo_name)s/settings/permissions/set_private', ['repo_name']); |
|
133 | 134 | pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']); |
|
134 | 135 | pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']); |
|
135 | 136 | pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']); |
|
136 | 137 | pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']); |
|
137 | 138 | pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']); |
|
138 | 139 | pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']); |
|
139 | 140 | pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']); |
|
140 | 141 | pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']); |
|
141 | 142 | pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']); |
|
142 | 143 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); |
|
143 | 144 | pyroutes.register('edit_user_audit_logs_download', '/_admin/users/%(user_id)s/edit/audit/download', ['user_id']); |
|
144 | 145 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); |
|
145 | 146 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); |
|
146 | 147 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); |
|
147 | 148 | pyroutes.register('edit_user_auth_tokens_view', '/_admin/users/%(user_id)s/edit/auth_tokens/view', ['user_id']); |
|
148 | 149 | pyroutes.register('edit_user_caches', '/_admin/users/%(user_id)s/edit/caches', ['user_id']); |
|
149 | 150 | pyroutes.register('edit_user_caches_update', '/_admin/users/%(user_id)s/edit/caches/update', ['user_id']); |
|
150 | 151 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); |
|
151 | 152 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); |
|
152 | 153 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); |
|
153 | 154 | pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']); |
|
154 | 155 | pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']); |
|
155 | 156 | pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']); |
|
156 | 157 | pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']); |
|
157 | 158 | pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']); |
|
158 | 159 | pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']); |
|
159 | 160 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); |
|
160 | 161 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); |
|
161 | 162 | pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']); |
|
162 | 163 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); |
|
163 | 164 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); |
|
164 | 165 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); |
|
165 | 166 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); |
|
166 | 167 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); |
|
167 | 168 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); |
|
168 | 169 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); |
|
169 | 170 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); |
|
170 | 171 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); |
|
171 | 172 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); |
|
172 | 173 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); |
|
173 | 174 | pyroutes.register('favicon', '/favicon.ico', []); |
|
174 | 175 | pyroutes.register('file_preview', '/_file_preview', []); |
|
175 | 176 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); |
|
176 | 177 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); |
|
177 | 178 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); |
|
178 | 179 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); |
|
179 | 180 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); |
|
180 | 181 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/rev/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); |
|
181 | 182 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/rev/%(revision)s', ['gist_id', 'revision']); |
|
182 | 183 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); |
|
183 | 184 | pyroutes.register('gists_create', '/_admin/gists/create', []); |
|
184 | 185 | pyroutes.register('gists_new', '/_admin/gists/new', []); |
|
185 | 186 | pyroutes.register('gists_show', '/_admin/gists', []); |
|
186 | 187 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); |
|
187 | 188 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); |
|
188 | 189 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); |
|
189 | 190 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); |
|
190 | 191 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); |
|
191 | 192 | pyroutes.register('goto_switcher_data', '/_goto_data', []); |
|
192 | 193 | pyroutes.register('home', '/', []); |
|
193 | 194 | pyroutes.register('hovercard_pull_request', '/_hovercard/pull_request/%(pull_request_id)s', ['pull_request_id']); |
|
194 | 195 | pyroutes.register('hovercard_repo_commit', '/_hovercard/commit/%(repo_name)s/%(commit_id)s', ['repo_name', 'commit_id']); |
|
195 | 196 | pyroutes.register('hovercard_user', '/_hovercard/user/%(user_id)s', ['user_id']); |
|
196 | 197 | pyroutes.register('hovercard_user_group', '/_hovercard/user_group/%(user_group_id)s', ['user_group_id']); |
|
197 | 198 | pyroutes.register('hovercard_username', '/_hovercard/username/%(username)s', ['username']); |
|
198 | 199 | pyroutes.register('journal', '/_admin/journal', []); |
|
199 | 200 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); |
|
200 | 201 | pyroutes.register('journal_public', '/_admin/public_journal', []); |
|
201 | 202 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); |
|
202 | 203 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); |
|
203 | 204 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); |
|
204 | 205 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); |
|
205 | 206 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); |
|
206 | 207 | pyroutes.register('login', '/_admin/login', []); |
|
207 | 208 | pyroutes.register('logout', '/_admin/logout', []); |
|
208 | 209 | pyroutes.register('main_page_repo_groups_data', '/_home_repo_groups', []); |
|
209 | 210 | pyroutes.register('main_page_repos_data', '/_home_repos', []); |
|
210 | 211 | pyroutes.register('markup_preview', '/_markup_preview', []); |
|
211 | 212 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); |
|
212 | 213 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); |
|
213 | 214 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); |
|
214 | 215 | pyroutes.register('my_account_auth_tokens_view', '/_admin/my_account/auth_tokens/view', []); |
|
215 | 216 | pyroutes.register('my_account_bookmarks', '/_admin/my_account/bookmarks', []); |
|
216 | 217 | pyroutes.register('my_account_bookmarks_update', '/_admin/my_account/bookmarks/update', []); |
|
217 | 218 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); |
|
218 | 219 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); |
|
219 | 220 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); |
|
220 | 221 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); |
|
222 | pyroutes.register('my_account_enable_2fa', '/_admin/my_account/enable_2fa', []); | |
|
223 | pyroutes.register('my_account_enable_2fa_save', '/_admin/my_account/enable_2fa_save', []); | |
|
221 | 224 | pyroutes.register('my_account_external_identity', '/_admin/my_account/external-identity', []); |
|
222 | 225 | pyroutes.register('my_account_external_identity_delete', '/_admin/my_account/external-identity/delete', []); |
|
223 | 226 | pyroutes.register('my_account_goto_bookmark', '/_admin/my_account/bookmark/%(bookmark_id)s', ['bookmark_id']); |
|
224 | 227 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); |
|
225 | 228 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); |
|
226 | 229 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); |
|
227 | pyroutes.register('check_2fa', '/_admin/check_2fa', []); | |
|
228 | pyroutes.register('my_account_configure_2fa', '/_admin/my_account/configure_2fa', []); | |
|
229 | pyroutes.register('my_account_regenerate_2fa_recovery_codes', '/_admin/my_account/regenerate_recovery_codes', []); | |
|
230 | 230 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); |
|
231 | 231 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); |
|
232 | 232 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); |
|
233 | 233 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); |
|
234 | 234 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); |
|
235 | 235 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); |
|
236 | pyroutes.register('my_account_regenerate_2fa_recovery_codes', '/_admin/my_account/regenerate_recovery_codes', []); | |
|
236 | 237 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); |
|
238 | pyroutes.register('my_account_show_2fa_recovery_codes', '/_admin/my_account/recovery_codes', []); | |
|
237 | 239 | pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []); |
|
238 | 240 | pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []); |
|
239 | 241 | pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []); |
|
240 | 242 | pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []); |
|
241 | 243 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); |
|
242 | 244 | pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []); |
|
243 | 245 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); |
|
244 | 246 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); |
|
245 | 247 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications_mark_all_read', []); |
|
246 | 248 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); |
|
247 | 249 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); |
|
248 | 250 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); |
|
249 | 251 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); |
|
250 | 252 | pyroutes.register('ops_healthcheck', '/_admin/ops/status', []); |
|
251 | 253 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); |
|
252 | 254 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); |
|
253 | 255 | pyroutes.register('plugin_admin_chat', '/_admin/plugin_admin_chat/%(action)s', ['action']); |
|
254 | 256 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); |
|
255 | 257 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); |
|
256 | 258 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); |
|
257 | 259 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); |
|
258 | 260 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); |
|
259 | 261 | pyroutes.register('pullrequest_comment_edit', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/edit', ['repo_name', 'pull_request_id', 'comment_id']); |
|
260 | 262 | pyroutes.register('pullrequest_comments', '/%(repo_name)s/pull-request/%(pull_request_id)s/comments', ['repo_name', 'pull_request_id']); |
|
261 | 263 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); |
|
262 | 264 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); |
|
263 | 265 | pyroutes.register('pullrequest_draft_comments_submit', '/%(repo_name)s/pull-request/%(pull_request_id)s/draft_comments_submit', ['repo_name', 'pull_request_id']); |
|
264 | 266 | pyroutes.register('pullrequest_drafts', '/%(repo_name)s/pull-request/%(pull_request_id)s/drafts', ['repo_name', 'pull_request_id']); |
|
265 | 267 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); |
|
266 | 268 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
267 | 269 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
268 | 270 | pyroutes.register('pullrequest_repo_targets', '/%(repo_name)s/pull-request/repo-targets', ['repo_name']); |
|
269 | 271 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
270 | 272 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
271 | 273 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); |
|
272 | 274 | pyroutes.register('pullrequest_todos', '/%(repo_name)s/pull-request/%(pull_request_id)s/todos', ['repo_name', 'pull_request_id']); |
|
273 | 275 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); |
|
274 | 276 | pyroutes.register('register', '/_admin/register', []); |
|
275 | 277 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
276 | 278 | pyroutes.register('repo_artifacts_data', '/%(repo_name)s/artifacts_data', ['repo_name']); |
|
277 | 279 | pyroutes.register('repo_artifacts_delete', '/%(repo_name)s/artifacts/delete/%(uid)s', ['repo_name', 'uid']); |
|
278 | 280 | pyroutes.register('repo_artifacts_get', '/%(repo_name)s/artifacts/download/%(uid)s', ['repo_name', 'uid']); |
|
279 | 281 | pyroutes.register('repo_artifacts_info', '/%(repo_name)s/artifacts/info/%(uid)s', ['repo_name', 'uid']); |
|
280 | 282 | pyroutes.register('repo_artifacts_list', '/%(repo_name)s/artifacts', ['repo_name']); |
|
281 | 283 | pyroutes.register('repo_artifacts_new', '/%(repo_name)s/artifacts/new', ['repo_name']); |
|
282 | 284 | pyroutes.register('repo_artifacts_store', '/%(repo_name)s/artifacts/store', ['repo_name']); |
|
283 | 285 | pyroutes.register('repo_artifacts_stream_script', '/_file_store/stream-upload-script', []); |
|
284 | 286 | pyroutes.register('repo_artifacts_stream_store', '/_file_store/stream-upload', []); |
|
285 | 287 | pyroutes.register('repo_artifacts_update', '/%(repo_name)s/artifacts/update/%(uid)s', ['repo_name', 'uid']); |
|
286 | 288 | pyroutes.register('repo_automation', '/%(repo_name)s/settings/automation', ['repo_name']); |
|
287 | 289 | pyroutes.register('repo_automation_update', '/%(repo_name)s/settings/automation/%(entry_id)s/update', ['repo_name', 'entry_id']); |
|
288 | 290 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); |
|
289 | 291 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
290 | 292 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
291 | 293 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); |
|
292 | 294 | pyroutes.register('repo_commit_comment_attachment_upload', '/%(repo_name)s/changeset/%(commit_id)s/comment/attachment_upload', ['repo_name', 'commit_id']); |
|
293 | 295 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); |
|
294 | 296 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); |
|
295 | 297 | pyroutes.register('repo_commit_comment_edit', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/edit', ['repo_name', 'commit_id', 'comment_id']); |
|
296 | 298 | pyroutes.register('repo_commit_comment_history_view', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/history_view/%(comment_history_id)s', ['repo_name', 'commit_id', 'comment_id', 'comment_history_id']); |
|
297 | 299 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); |
|
298 | 300 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); |
|
299 | 301 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); |
|
300 | 302 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); |
|
301 | 303 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); |
|
302 | 304 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); |
|
303 | 305 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
304 | 306 | pyroutes.register('repo_commits', '/%(repo_name)s/commits', ['repo_name']); |
|
305 | 307 | pyroutes.register('repo_commits_elements', '/%(repo_name)s/commits_elements', ['repo_name']); |
|
306 | 308 | pyroutes.register('repo_commits_elements_file', '/%(repo_name)s/commits_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
307 | 309 | pyroutes.register('repo_commits_file', '/%(repo_name)s/commits/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
308 | 310 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
309 | 311 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); |
|
310 | 312 | pyroutes.register('repo_create', '/_admin/repos/create', []); |
|
311 | 313 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); |
|
312 | 314 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); |
|
313 | 315 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); |
|
314 | 316 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
315 | 317 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
316 | 318 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
317 | 319 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
318 | 320 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
319 | 321 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
320 | 322 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
321 | 323 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
322 | 324 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); |
|
323 | 325 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
324 | 326 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
325 | 327 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
326 | 328 | pyroutes.register('repo_files_check_head', '/%(repo_name)s/check_head/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
327 | 329 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
328 | 330 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
329 | 331 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); |
|
330 | 332 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); |
|
331 | 333 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
332 | 334 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
333 | 335 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
334 | 336 | pyroutes.register('repo_files_replace_binary', '/%(repo_name)s/replace_binary/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
335 | 337 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
336 | 338 | pyroutes.register('repo_files_upload_file', '/%(repo_name)s/upload_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
337 | 339 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); |
|
338 | 340 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); |
|
339 | 341 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); |
|
340 | 342 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); |
|
341 | 343 | pyroutes.register('repo_group_create', '/_admin/repo_group/create', []); |
|
342 | 344 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); |
|
343 | 345 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); |
|
344 | 346 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); |
|
345 | 347 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); |
|
346 | 348 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']); |
|
347 | 349 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']); |
|
348 | 350 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']); |
|
349 | 351 | pyroutes.register('repo_group_list_data', '/_repo_groups', []); |
|
350 | 352 | pyroutes.register('repo_group_new', '/_admin/repo_group/new', []); |
|
351 | 353 | pyroutes.register('repo_groups', '/_admin/repo_groups', []); |
|
352 | 354 | pyroutes.register('repo_groups_data', '/_admin/repo_groups_data', []); |
|
353 | 355 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); |
|
354 | 356 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); |
|
355 | 357 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); |
|
356 | 358 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); |
|
357 | 359 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); |
|
358 | 360 | pyroutes.register('repo_list_data', '/_repos', []); |
|
359 | 361 | pyroutes.register('repo_new', '/_admin/repos/new', []); |
|
360 | 362 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
361 | 363 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
362 | 364 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
363 | 365 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
364 | 366 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); |
|
365 | 367 | pyroutes.register('repo_reviewers_review_rule_delete', '/%(repo_name)s/settings/review/rules/%(rule_id)s/delete', ['repo_name', 'rule_id']); |
|
366 | 368 | pyroutes.register('repo_reviewers_review_rule_edit', '/%(repo_name)s/settings/review/rules/%(rule_id)s', ['repo_name', 'rule_id']); |
|
367 | 369 | pyroutes.register('repo_reviewers_review_rule_new', '/%(repo_name)s/settings/review/rules/new', ['repo_name']); |
|
368 | 370 | pyroutes.register('repo_settings_quick_actions', '/%(repo_name)s/settings/quick-action', ['repo_name']); |
|
369 | 371 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
370 | 372 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); |
|
371 | 373 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); |
|
372 | 374 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); |
|
373 | 375 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); |
|
374 | 376 | pyroutes.register('repos', '/_admin/repos', []); |
|
375 | 377 | pyroutes.register('repos_data', '/_admin/repos_data', []); |
|
376 | 378 | pyroutes.register('reset_password', '/_admin/password_reset', []); |
|
377 | 379 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); |
|
378 | 380 | pyroutes.register('robots', '/robots.txt', []); |
|
379 | 381 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed-rss', ['repo_name']); |
|
380 | 382 | pyroutes.register('rss_feed_home_old', '/%(repo_name)s/feed/rss', ['repo_name']); |
|
381 | 383 | pyroutes.register('search', '/_admin/search', []); |
|
382 | 384 | pyroutes.register('search_repo', '/%(repo_name)s/_search', ['repo_name']); |
|
383 | 385 | pyroutes.register('search_repo_alt', '/%(repo_name)s/search', ['repo_name']); |
|
384 | 386 | pyroutes.register('search_repo_group', '/%(repo_group_name)s/_search', ['repo_group_name']); |
|
387 | pyroutes.register('setup_2fa', '/_admin/setup_2fa', []); | |
|
385 | 388 | pyroutes.register('store_user_session_value', '/_store_session_attr', []); |
|
386 | 389 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); |
|
387 | 390 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); |
|
388 | 391 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); |
|
389 | 392 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
390 | 393 | pyroutes.register('upload_file', '/_file_store/upload', []); |
|
391 | 394 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
392 | 395 | pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']); |
|
393 | 396 | pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']); |
|
394 | 397 | pyroutes.register('user_disable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_disable', ['user_id']); |
|
395 | 398 | pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']); |
|
396 | 399 | pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']); |
|
397 | 400 | pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']); |
|
398 | 401 | pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']); |
|
399 | 402 | pyroutes.register('user_enable_force_password_reset', '/_admin/users/%(user_id)s/password_reset_enable', ['user_id']); |
|
400 | 403 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); |
|
401 | 404 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); |
|
402 | 405 | pyroutes.register('user_group_profile', '/_profile_user_group/%(user_group_name)s', ['user_group_name']); |
|
403 | 406 | pyroutes.register('user_groups', '/_admin/user_groups', []); |
|
404 | 407 | pyroutes.register('user_groups_create', '/_admin/user_groups/create', []); |
|
405 | 408 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); |
|
406 | 409 | pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']); |
|
407 | 410 | pyroutes.register('user_groups_new', '/_admin/user_groups/new', []); |
|
408 | 411 | pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']); |
|
409 | 412 | pyroutes.register('user_notice_dismiss', '/_admin/users/%(user_id)s/notice_dismiss', ['user_id']); |
|
410 | 413 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); |
|
411 | 414 | pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']); |
|
412 | 415 | pyroutes.register('users', '/_admin/users', []); |
|
413 | 416 | pyroutes.register('users_create', '/_admin/users/create', []); |
|
414 | 417 | pyroutes.register('users_data', '/_admin/users_data', []); |
|
415 | 418 | pyroutes.register('users_new', '/_admin/users/new', []); |
|
416 | 419 | } |
@@ -1,123 +1,134 b'' | |||
|
1 | 1 | <%namespace name="base" file="/base/base.mako"/> |
|
2 | 2 | |
|
3 | 3 | <div class="panel panel-default"> |
|
4 | 4 | <div class="panel-heading"> |
|
5 | 5 | <h3 class="panel-title">${_('Enable/Disable 2FA for your account')}</h3> |
|
6 | 6 | </div> |
|
7 | ${h.secure_form(h.route_path('my_account_enable_2fa_save'), request=request)} | |
|
7 | 8 | <div class="panel-body"> |
|
8 | 9 | <div class="form"> |
|
9 | 10 | <div class="fields"> |
|
10 | 11 | <div class="field"> |
|
11 | 12 | <div class="label"> |
|
12 | 13 | <label>${_('2FA status')}:</label> |
|
13 | 14 | </div> |
|
14 | 15 | <div class="checkboxes"> |
|
15 | ||
|
16 | <div class="form-check"> | |
|
17 | <label class="form-check-label"> | |
|
18 | <input type="radio" id="2faEnabled" value="1" ${'checked' if c.state_of_2fa else ''}> | |
|
19 | ${_('Enabled')} | |
|
20 | </label> | |
|
21 | <label class="form-check-label"> | |
|
22 | <input type="radio" id="2faDisabled" value="0" ${'checked' if not c.state_of_2fa else ''}> | |
|
23 | ${_('Disabled')} | |
|
24 | </label> | |
|
25 | </div> | |
|
26 | 16 | % if c.locked_2fa: |
|
27 | 17 | <span class="help-block">${_('2FA settings cannot be changed here, because 2FA was forced enabled by RhodeCode Administrator.')}</span> |
|
18 | ||
|
19 | % else: | |
|
20 | <div class="form-check"> | |
|
21 | <input type="radio" id="2faEnabled" name="2fa_status" value="1" ${'checked=1' if c.state_of_2fa else ''}/> | |
|
22 | <label for="2faEnabled">${_('Enable 2FA')}</label> | |
|
23 | ||
|
24 | <input type="radio" id="2faDisabled" name="2fa_status" value="0" ${'checked=1' if not c.state_of_2fa else ''} /> | |
|
25 | <label for="2faDisabled">${_('Disable 2FA')}</label> | |
|
26 | </div> | |
|
28 | 27 | % endif |
|
28 | ||
|
29 | 29 | </div> |
|
30 | 30 | </div> |
|
31 | 31 | </div> |
|
32 | 32 | <button id="saveBtn" class="btn btn-primary" ${'disabled' if c.locked_2fa else ''}>${_('Save')}</button> |
|
33 | 33 | </div> |
|
34 | 34 | </div> |
|
35 | ${h.end_form()} | |
|
35 | 36 | </div> |
|
36 | 37 | |
|
37 | 38 | % if c.state_of_2fa: |
|
39 | ||
|
40 | ||
|
41 | % if not c.user_seen_2fa_recovery_codes: | |
|
42 | ||
|
43 | <div class="panel panel-warning"> | |
|
44 | <div class="panel-heading" id="advanced-archive"> | |
|
45 | <h3 class="panel-title">${_('2FA Recovery codes')} <a class="permalink" href="#advanced-archive"> ΒΆ</a></h3> | |
|
46 | </div> | |
|
47 | <div class="panel-body"> | |
|
48 | <p> | |
|
49 | ${_('You have not seen your 2FA recovery codes yet.')} | |
|
50 | ${_('Please save them in a safe place, or you will lose access to your account in case of lost access to authenticator app.')} | |
|
51 | </p> | |
|
52 | <br/> | |
|
53 | <a href="${request.route_path('my_account_enable_2fa', _query={'show-recovery-codes': 1})}" class="btn btn-primary">${_('Show recovery codes')}</a> | |
|
54 | </div> | |
|
55 | </div> | |
|
56 | % endif | |
|
57 | ||
|
58 | ||
|
59 | ${h.secure_form(h.route_path('my_account_regenerate_2fa_recovery_codes'), request=request)} | |
|
38 | 60 | <div class="panel panel-default"> |
|
39 | 61 | <div class="panel-heading"> |
|
40 | 62 | <h3 class="panel-title">${_('Regenerate 2FA recovery codes for your account')}</h3> |
|
41 | 63 | </div> |
|
42 | 64 | <div class="panel-body"> |
|
43 | 65 | <form id="2faForm"> |
|
44 | <input type="text" name="totp" placeholder="${_('Verify the code from the app')}" pattern="\d{6}" | |
|
45 | style="width: 20%"> | |
|
46 | <button type="button" class="btn btn-primary" onclick="submitForm()">Verify</button> | |
|
66 | <input type="text" name="totp" placeholder="${_('Verify the code from the app')}" pattern="\d{6}" style="width: 20%"> | |
|
67 | <button type="submit" class="btn btn-primary">${_('Verify and generate new codes')}</button> | |
|
47 | 68 | </form> |
|
48 | <div id="result"></div> | |
|
49 | 69 | </div> |
|
50 | 70 | |
|
51 | 71 | </div> |
|
72 | ${h.end_form()} | |
|
73 | % endif | |
|
52 | 74 |
|
|
53 | % endif | |
|
54 | 75 | |
|
55 | 76 | <script> |
|
56 | function submitForm() { | |
|
57 | let formData = new FormData(document.getElementById("2faForm")); | |
|
58 | let xhr = new XMLHttpRequest(); | |
|
59 | 77 | |
|
60 | let success = function (response) { | |
|
61 | let recovery_codes = response.recovery_codes; | |
|
62 | showRecoveryCodesPopup(recovery_codes); | |
|
63 | } | |
|
64 | ||
|
65 | xhr.onreadystatechange = function () { | |
|
66 | if (xhr.readyState == 4 && xhr.status == 200) { | |
|
67 | let responseDoc = new DOMParser().parseFromString(xhr.responseText, "text/html"); | |
|
68 | let contentToDisplay = responseDoc.querySelector('#formErrors'); | |
|
69 | if (contentToDisplay) { | |
|
70 | document.getElementById("result").innerHTML = contentToDisplay.innerHTML; | |
|
71 | } else { | |
|
72 | let regenerate_url = pyroutes.url('my_account_regenerate_2fa_recovery_codes'); | |
|
73 | ajaxPOST(regenerate_url, {'csrf_token': CSRF_TOKEN}, success); | |
|
74 | } | |
|
75 | } | |
|
76 | }; | |
|
77 | let url = pyroutes.url('check_2fa'); | |
|
78 | xhr.open("POST", url, true); | |
|
79 | xhr.send(formData); | |
|
80 | } | |
|
81 | ||
|
82 | document.getElementById('2faEnabled').addEventListener('click', function () { | |
|
83 | document.getElementById('2faDisabled').checked = false; | |
|
84 | }); | |
|
85 | document.getElementById('2faDisabled').addEventListener('click', function () { | |
|
86 | document.getElementById('2faEnabled').checked = false; | |
|
87 | }); | |
|
88 | ||
|
89 | function getStateValue() { | |
|
90 | if (document.getElementById('2faEnabled').checked) { | |
|
91 | return '1'; | |
|
92 | } else { | |
|
93 | return '0'; | |
|
94 | } | |
|
95 | }; | |
|
96 | ||
|
97 | function saveChanges(state) { | |
|
98 | ||
|
99 | let post_data = {'state': state, 'csrf_token': CSRF_TOKEN}; | |
|
100 | let url = pyroutes.url('my_account_configure_2fa'); | |
|
101 | ||
|
102 | ajaxPOST(url, post_data, function(){}, function(){}) | |
|
103 | } | |
|
104 | ||
|
105 | document.getElementById('saveBtn').addEventListener('click', function () { | |
|
106 | var state = getStateValue(); | |
|
107 | saveChanges(state); | |
|
108 | }); | |
|
109 | ||
|
110 | function showRecoveryCodesPopup(recoveryCodes) { | |
|
111 | let funcData = {'recoveryCodes': recoveryCodes} | |
|
112 | let recoveryCodesHtml = renderTemplate('recoveryCodes', funcData) | |
|
78 | function showRecoveryCodesPopup() { | |
|
113 | 79 | |
|
114 | 80 | SwalNoAnimation.fire({ |
|
115 | allowOutsideClick: false, | |
|
116 | confirmButtonText: _gettext('I Copied the codes'), | |
|
117 | title: _gettext('2FA Recovery Codes'), | |
|
118 | html: recoveryCodesHtml | |
|
81 | title: _gettext('2FA recovery codes'), | |
|
82 | html: '<span>Should you ever lose your phone or access to your one time password secret, each of these recovery codes can be used one time each to regain access to your account. Please save them in a safe place, or you will lose access to your account.</span>', | |
|
83 | showCancelButton: false, | |
|
84 | showConfirmButton: true, | |
|
85 | showLoaderOnConfirm: true, | |
|
86 | confirmButtonText: _gettext('Show now'), | |
|
87 | allowOutsideClick: function () { | |
|
88 | !Swal.isLoading() | |
|
89 | }, | |
|
90 | ||
|
91 | preConfirm: function () { | |
|
92 | ||
|
93 | var postData = { | |
|
94 | 'csrf_token': CSRF_TOKEN | |
|
95 | }; | |
|
96 | return new Promise(function (resolve, reject) { | |
|
97 | $.ajax({ | |
|
98 | type: 'POST', | |
|
99 | data: postData, | |
|
100 | url: pyroutes.url('my_account_show_2fa_recovery_codes'), | |
|
101 | headers: {'X-PARTIAL-XHR': true} | |
|
102 | }) | |
|
103 | .done(function (data) { | |
|
104 | resolve(data); | |
|
105 | }) | |
|
106 | .fail(function (jqXHR, textStatus, errorThrown) { | |
|
107 | var message = formatErrorMessage(jqXHR, textStatus, errorThrown); | |
|
108 | ajaxErrorSwal(message); | |
|
109 | }); | |
|
110 | }) | |
|
111 | } | |
|
112 | ||
|
119 | 113 | }) |
|
120 | ||
|
114 | .then(function (result) { | |
|
115 | if (result.value) { | |
|
116 | let funcData = {'recoveryCodes': result.value.recovery_codes} | |
|
117 | let recoveryCodesHtml = renderTemplate('recoveryCodes', funcData); | |
|
118 | SwalNoAnimation.fire({ | |
|
119 | allowOutsideClick: false, | |
|
120 | confirmButtonText: _gettext('I Copied the codes'), | |
|
121 | title: _gettext('2FA Recovery Codes'), | |
|
122 | html: recoveryCodesHtml | |
|
123 | }).then(function (result) { | |
|
124 | if (result.isConfirmed) { | |
|
125 | window.location.reload() | |
|
126 | } | |
|
127 | }) | |
|
128 | } | |
|
129 | }) | |
|
121 | 130 | } |
|
122 | ||
|
131 | % if request.GET.get('show-recovery-codes') == '1' and not c.user_seen_2fa_recovery_codes: | |
|
132 | showRecoveryCodesPopup(); | |
|
133 | % endif | |
|
123 | 134 | </script> |
@@ -1,153 +1,87 b'' | |||
|
1 | 1 | <%inherit file="base/root.mako"/> |
|
2 | 2 | |
|
3 | 3 | <%def name="title()"> |
|
4 |
${_('Setup |
|
|
4 | ${_('Setup 2FA')} | |
|
5 | 5 | %if c.rhodecode_name: |
|
6 | 6 | · ${h.branding(c.rhodecode_name)} |
|
7 | 7 | %endif |
|
8 | 8 | </%def> |
|
9 | 9 | <style>body{background-color:#eeeeee;}</style> |
|
10 | 10 | |
|
11 | 11 | <div class="loginbox"> |
|
12 | 12 | <div class="header-account"> |
|
13 | 13 | <div id="header-inner" class="title"> |
|
14 | 14 | <div id="logo"> |
|
15 | 15 | % if c.rhodecode_name: |
|
16 | 16 | <div class="branding"> |
|
17 | 17 | <a href="${h.route_path('home')}">${h.branding(c.rhodecode_name)}</a> |
|
18 | 18 | </div> |
|
19 | 19 | % endif |
|
20 | 20 | </div> |
|
21 | 21 | </div> |
|
22 | 22 | </div> |
|
23 | 23 | |
|
24 | 24 | <div class="loginwrapper"> |
|
25 | <h1>Setup the authenticator app</h1> | |
|
25 | <h1>${_('Setup the authenticator app')}</h1> | |
|
26 | ||
|
26 | 27 | <p>Authenticator apps like <a href='https://play.google.com/store/apps/details?id=com.google.android.apps.authenticator2' target="_blank" rel="noopener noreferrer">Google Authenticator</a>, etc. generate one-time passwords that are used as a second factor to verify you identity.</p> |
|
27 | 28 | <rhodecode-toast id="notifications"></rhodecode-toast> |
|
28 | 29 | |
|
29 | 30 | <div id="setup_2fa"> |
|
31 | ${h.secure_form(h.route_path('setup_2fa'), request=request, id='totp_form')} | |
|
30 | 32 | <div class="sign-in-title"> |
|
31 | <h1>${_('Scan the QR code')}</h1> | |
|
33 | <h1>${_('Scan the QR code')}: "${totp_name}"</h1> | |
|
32 | 34 | </div> |
|
33 | <p>Use an authenticator app to scan.</p> | |
|
34 |
<img src="data:image/png;base64, |
|
|
35 | <p>${_('Use an authenticator app to scan.')}</p> | |
|
36 | <img alt="qr-code" src="data:image/png;base64, ${qr}"/> | |
|
37 | ||
|
35 | 38 | <p>${_('Unable to scan?')} <a id="toggleLink">${_('Click here')}</a></p> |
|
36 | 39 | <div id="secretDiv" class="hidden"> |
|
37 | <p>${_('Copy and use this code to manually setup an authenticator app')}</p> | |
|
38 | <input type="text" id="secretField" value=${key}> | |
|
39 |
|
|
|
40 | <p>${_('Copy and use this code to manually set up an authenticator app')}</p> | |
|
41 | <input type="text" class="input-monospace" value="${key}" id="secret_totp" name="secret_totp" style="width: 400px"/> | |
|
42 | <i class="tooltip icon-clipboard clipboard-action" data-clipboard-text="${key}" title="${_('Copy the secret key')}"></i> | |
|
40 | 43 | </div> |
|
41 | <div id="codesPopup" class="modal"> | |
|
42 | <div class="modal-content"> | |
|
43 | <ul id="recoveryCodesList"></ul> | |
|
44 | <button id="copyAllBtn" class="btn btn-primary">Copy All</button> | |
|
45 | </div> | |
|
46 | </div> | |
|
47 | <br><br> | |
|
44 | ||
|
48 | 45 | <div id="verify_2fa"> |
|
49 | ${h.secure_form(h.route_path('setup_2fa'), request=request, id='totp_form')} | |
|
46 | ||
|
50 | 47 | <div class="form mt-4"> |
|
51 | 48 | <div class="field"> |
|
52 | 49 | <p> |
|
53 | 50 | <div class="label"> |
|
54 | 51 | <label for="totp" class="form-label text-dark font-weight-bold" style="text-align: left;">${_('Verify the code from the app')}:</label> |
|
55 | 52 | </div> |
|
56 | 53 | </p> |
|
57 | 54 | <p> |
|
58 | 55 | <div> |
|
59 | 56 | <div class="input-group"> |
|
60 | 57 | ${h.text('totp', class_='form-control', style='width: 40%;')} |
|
61 | 58 | <div id="formErrors"> |
|
62 | %if 'totp' in errors: | |
|
59 | % if 'totp' in errors: | |
|
63 | 60 | <span class="error-message">${errors.get('totp')}</span> |
|
64 | 61 | <br /> |
|
65 | %endif | |
|
62 | % endif | |
|
66 | 63 | </div> |
|
67 | 64 | <div class="input-group-append"> |
|
68 |
${h.submit(' |
|
|
65 | ${h.submit('verify_2fa',_('Verify'),class_="btn btn-primary", style='width: 40%;')} | |
|
69 | 66 | </div> |
|
70 | 67 | </div> |
|
71 | 68 | </div> |
|
72 | 69 | </p> |
|
73 | 70 | </div> |
|
74 | 71 | </div> |
|
75 | 72 | </div> |
|
73 | ${h.end_form()} | |
|
76 | 74 | </div> |
|
77 | 75 | </div> |
|
78 | 76 | </div> |
|
79 | <script> | |
|
80 | document.addEventListener('DOMContentLoaded', function() { | |
|
81 | let clipboardIcons = document.querySelectorAll('.clipboard-action'); | |
|
82 | 77 | |
|
83 | clipboardIcons.forEach(function(icon) { | |
|
84 | icon.addEventListener('click', function() { | |
|
85 | var inputField = document.getElementById('secretField'); | |
|
86 | inputField.select(); | |
|
87 | document.execCommand('copy'); | |
|
88 | ||
|
89 | }); | |
|
90 | }); | |
|
91 | }); | |
|
92 | </script> | |
|
93 | 78 | <script> |
|
94 | document.getElementById('toggleLink').addEventListener('click', function() { | |
|
95 | let hiddenField = document.getElementById('secretDiv'); | |
|
96 | if (hiddenField.classList.contains('hidden')) { | |
|
97 | hiddenField.classList.remove('hidden'); | |
|
98 | } | |
|
99 | }); | |
|
100 | </script> | |
|
101 | <script> | |
|
102 | const recovery_codes_string = '${recovery_codes}'; | |
|
103 | const cleaned_recovery_codes_string = recovery_codes_string | |
|
104 | .replace(/"/g, '"') | |
|
105 | .replace(/'/g, "'"); | |
|
106 | ||
|
107 | const recovery_codes = JSON.parse(cleaned_recovery_codes_string); | |
|
108 | ||
|
109 | const cleaned_recovery_codes = recovery_codes.map(code => code.replace(/['"]/g, '')); | |
|
110 | ||
|
111 | function showRecoveryCodesPopup() { | |
|
112 | const popup = document.getElementById("codesPopup"); | |
|
113 | const codesList = document.getElementById("recoveryCodesList"); | |
|
114 | const verify_btn = document.getElementById('save') | |
|
115 | 79 | |
|
116 | if (verify_btn.disabled) { | |
|
117 | codesList.innerHTML = ""; | |
|
118 | ||
|
119 | cleaned_recovery_codes.forEach(code => { | |
|
120 | const listItem = document.createElement("li"); | |
|
121 | listItem.textContent = code; | |
|
122 | codesList.appendChild(listItem); | |
|
123 | }); | |
|
124 | ||
|
125 | popup.style.display = "block"; | |
|
126 | verify_btn.disabled = false; | |
|
127 | } | |
|
128 | } | |
|
129 | ||
|
130 | document.getElementById("save").addEventListener("mouseover", showRecoveryCodesPopup); | |
|
80 | document.getElementById('toggleLink').addEventListener('click', function() { | |
|
81 | let hiddenField = document.getElementById('secretDiv'); | |
|
82 | if (hiddenField.classList.contains('hidden')) { | |
|
83 | hiddenField.classList.remove('hidden'); | |
|
84 | } | |
|
85 | }); | |
|
131 | 86 | |
|
132 | const popup = document.getElementById("codesPopup"); | |
|
133 | const closeButton = document.querySelector(".close"); | |
|
134 | window.onclick = function(event) { | |
|
135 | if (event.target === popup || event.target === closeButton) { | |
|
136 | popup.style.display = "none"; | |
|
137 | } | |
|
138 | } | |
|
139 | ||
|
140 | document.getElementById("copyAllBtn").addEventListener("click", function() { | |
|
141 | const codesListItems = document.querySelectorAll("#recoveryCodesList li"); | |
|
142 | const allCodes = Array.from(codesListItems).map(item => item.textContent).join(", "); | |
|
143 | ||
|
144 | const textarea = document.createElement('textarea'); | |
|
145 | textarea.value = allCodes; | |
|
146 | document.body.appendChild(textarea); | |
|
147 | ||
|
148 | textarea.select(); | |
|
149 | document.execCommand('copy'); | |
|
150 | ||
|
151 | document.body.removeChild(textarea); | |
|
152 | }); | |
|
153 | 87 | </script> |
@@ -1,37 +1,54 b'' | |||
|
1 |
<%inherit file=" |
|
|
1 | <%inherit file="base/root.mako"/> | |
|
2 | ||
|
2 | 3 | <%def name="title()"> |
|
3 |
${_(' |
|
|
4 | ${_('Verify 2FA')} | |
|
4 | 5 | %if c.rhodecode_name: |
|
5 | 6 | · ${h.branding(c.rhodecode_name)} |
|
6 | 7 | %endif |
|
7 | 8 | </%def> |
|
9 | <style>body{background-color:#eeeeee;}</style> | |
|
8 | 10 | |
|
9 | <div class="box"> | |
|
10 |
<div class=" |
|
|
11 | ${h.secure_form(h.route_path('check_2fa'), request=request, id='totp_form')} | |
|
12 | <div class="form mt-4" style="position: relative; margin-left: 35%; margin-top: 20%;"> | |
|
13 | <div class="field"> | |
|
14 |
< |
|
|
15 | <div class="label"> | |
|
16 | <label for="totp" class="form-label text-dark font-weight-bold" style="text-align: left;">${_('Verify the code from the app')}:</label> | |
|
11 | <div class="loginbox"> | |
|
12 | <div class="header-account"> | |
|
13 | <div id="header-inner" class="title"> | |
|
14 | <div id="logo"> | |
|
15 | % if c.rhodecode_name: | |
|
16 | <div class="branding"> | |
|
17 | <a href="${h.route_path('home')}">${h.branding(c.rhodecode_name)}</a> | |
|
17 | 18 | </div> |
|
18 |
|
|
|
19 | <p> | |
|
20 | <div> | |
|
21 | <div class="input-group"> | |
|
22 | ${h.text('totp', class_="form-control", style='width: 38%;')} | |
|
23 | <div id="formErrors"> | |
|
24 | %if 'totp' in errors: | |
|
25 | <span class="error-message">${errors.get('totp')}</span> | |
|
26 | <br /> | |
|
27 | %endif | |
|
28 | </div> | |
|
29 | <br /> | |
|
30 | ${h.submit('save',_('Verify'),class_="btn btn-primary", style='width: 40%;')} | |
|
31 | </div> | |
|
32 | </div> | |
|
33 | </p> | |
|
19 | % endif | |
|
34 | 20 | </div> |
|
35 | 21 | </div> |
|
36 | 22 | </div> |
|
23 | ||
|
24 | <div class="loginwrapper"> | |
|
25 | <rhodecode-toast id="notifications"></rhodecode-toast> | |
|
26 | ||
|
27 | <div id="register"> | |
|
28 | <div class="sign-in-title"> | |
|
29 | <h1>${_('Verify the code from the app')}</h1> | |
|
30 | </div> | |
|
31 | <div class="inner form"> | |
|
32 | ${h.secure_form(h.route_path('check_2fa'), request=request, id='totp_form')} | |
|
33 | <label for="totp">${_('Verification code')}:</label> | |
|
34 | ${h.text('totp', class_="form-control")} | |
|
35 | %if 'totp' in errors: | |
|
36 | <span class="error-message">${errors.get('totp')}</span> | |
|
37 | <br /> | |
|
38 | %endif | |
|
39 | <p class="help-block">${_('Enter the code from your two-factor authenticator app. If you\'ve lost your device, you can enter one of your recovery codes.')}</p> | |
|
40 | ||
|
41 | ${h.submit('send', _('Verify'), class_="btn sign-in")} | |
|
42 | <p class="help-block pull-right"> | |
|
43 | RhodeCode ${c.rhodecode_edition} | |
|
44 | </p> | |
|
45 | ${h.end_form()} | |
|
46 | </div> | |
|
47 | </div> | |
|
48 | ||
|
49 | </div> | |
|
37 | 50 | </div> |
|
51 | ||
|
52 | ||
|
53 | ||
|
54 |
General Comments 0
You need to be logged in to leave comments.
Login now