Show More
@@ -1,2042 +1,2050 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | import time |
|
23 | 23 | |
|
24 | 24 | import rhodecode |
|
25 | 25 | from rhodecode.api import ( |
|
26 | 26 | jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) |
|
27 | 27 | from rhodecode.api.utils import ( |
|
28 | 28 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, |
|
29 | 29 | get_user_group_or_error, get_user_or_error, validate_repo_permissions, |
|
30 | 30 | get_perm_or_error, parse_args, get_origin, build_commit_data, |
|
31 | 31 | validate_set_owner_permissions) |
|
32 | 32 | from rhodecode.lib import audit_logger |
|
33 | 33 | from rhodecode.lib import repo_maintenance |
|
34 | 34 | from rhodecode.lib.auth import HasPermissionAnyApi, HasUserGroupPermissionAnyApi |
|
35 | 35 | from rhodecode.lib.celerylib.utils import get_task_id |
|
36 | 36 | from rhodecode.lib.utils2 import str2bool, time_to_datetime |
|
37 | 37 | from rhodecode.lib.ext_json import json |
|
38 | 38 | from rhodecode.lib.exceptions import StatusChangeOnClosedPullRequestError |
|
39 | 39 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
40 | 40 | from rhodecode.model.comment import CommentsModel |
|
41 | 41 | from rhodecode.model.db import ( |
|
42 | 42 | Session, ChangesetStatus, RepositoryField, Repository, RepoGroup, |
|
43 | 43 | ChangesetComment) |
|
44 | 44 | from rhodecode.model.repo import RepoModel |
|
45 | 45 | from rhodecode.model.scm import ScmModel, RepoList |
|
46 | 46 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel |
|
47 | 47 | from rhodecode.model import validation_schema |
|
48 | 48 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
49 | 49 | |
|
50 | 50 | log = logging.getLogger(__name__) |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | @jsonrpc_method() |
|
54 | 54 | def get_repo(request, apiuser, repoid, cache=Optional(True)): |
|
55 | 55 | """ |
|
56 | 56 | Gets an existing repository by its name or repository_id. |
|
57 | 57 | |
|
58 | 58 | The members section so the output returns users groups or users |
|
59 | 59 | associated with that repository. |
|
60 | 60 | |
|
61 | 61 | This command can only be run using an |authtoken| with admin rights, |
|
62 | 62 | or users with at least read rights to the |repo|. |
|
63 | 63 | |
|
64 | 64 | :param apiuser: This is filled automatically from the |authtoken|. |
|
65 | 65 | :type apiuser: AuthUser |
|
66 | 66 | :param repoid: The repository name or repository id. |
|
67 | 67 | :type repoid: str or int |
|
68 | 68 | :param cache: use the cached value for last changeset |
|
69 | 69 | :type: cache: Optional(bool) |
|
70 | 70 | |
|
71 | 71 | Example output: |
|
72 | 72 | |
|
73 | 73 | .. code-block:: bash |
|
74 | 74 | |
|
75 | 75 | { |
|
76 | 76 | "error": null, |
|
77 | 77 | "id": <repo_id>, |
|
78 | 78 | "result": { |
|
79 | 79 | "clone_uri": null, |
|
80 | 80 | "created_on": "timestamp", |
|
81 | 81 | "description": "repo description", |
|
82 | 82 | "enable_downloads": false, |
|
83 | 83 | "enable_locking": false, |
|
84 | 84 | "enable_statistics": false, |
|
85 | 85 | "followers": [ |
|
86 | 86 | { |
|
87 | 87 | "active": true, |
|
88 | 88 | "admin": false, |
|
89 | 89 | "api_key": "****************************************", |
|
90 | 90 | "api_keys": [ |
|
91 | 91 | "****************************************" |
|
92 | 92 | ], |
|
93 | 93 | "email": "user@example.com", |
|
94 | 94 | "emails": [ |
|
95 | 95 | "user@example.com" |
|
96 | 96 | ], |
|
97 | 97 | "extern_name": "rhodecode", |
|
98 | 98 | "extern_type": "rhodecode", |
|
99 | 99 | "firstname": "username", |
|
100 | 100 | "ip_addresses": [], |
|
101 | 101 | "language": null, |
|
102 | 102 | "last_login": "2015-09-16T17:16:35.854", |
|
103 | 103 | "lastname": "surname", |
|
104 | 104 | "user_id": <user_id>, |
|
105 | 105 | "username": "name" |
|
106 | 106 | } |
|
107 | 107 | ], |
|
108 | 108 | "fork_of": "parent-repo", |
|
109 | 109 | "landing_rev": [ |
|
110 | 110 | "rev", |
|
111 | 111 | "tip" |
|
112 | 112 | ], |
|
113 | 113 | "last_changeset": { |
|
114 | 114 | "author": "User <user@example.com>", |
|
115 | 115 | "branch": "default", |
|
116 | 116 | "date": "timestamp", |
|
117 | 117 | "message": "last commit message", |
|
118 | 118 | "parents": [ |
|
119 | 119 | { |
|
120 | 120 | "raw_id": "commit-id" |
|
121 | 121 | } |
|
122 | 122 | ], |
|
123 | 123 | "raw_id": "commit-id", |
|
124 | 124 | "revision": <revision number>, |
|
125 | 125 | "short_id": "short id" |
|
126 | 126 | }, |
|
127 | 127 | "lock_reason": null, |
|
128 | 128 | "locked_by": null, |
|
129 | 129 | "locked_date": null, |
|
130 | 130 | "owner": "owner-name", |
|
131 | 131 | "permissions": [ |
|
132 | 132 | { |
|
133 | 133 | "name": "super-admin-name", |
|
134 | 134 | "origin": "super-admin", |
|
135 | 135 | "permission": "repository.admin", |
|
136 | 136 | "type": "user" |
|
137 | 137 | }, |
|
138 | 138 | { |
|
139 | 139 | "name": "owner-name", |
|
140 | 140 | "origin": "owner", |
|
141 | 141 | "permission": "repository.admin", |
|
142 | 142 | "type": "user" |
|
143 | 143 | }, |
|
144 | 144 | { |
|
145 | 145 | "name": "user-group-name", |
|
146 | 146 | "origin": "permission", |
|
147 | 147 | "permission": "repository.write", |
|
148 | 148 | "type": "user_group" |
|
149 | 149 | } |
|
150 | 150 | ], |
|
151 | 151 | "private": true, |
|
152 | 152 | "repo_id": 676, |
|
153 | 153 | "repo_name": "user-group/repo-name", |
|
154 | 154 | "repo_type": "hg" |
|
155 | 155 | } |
|
156 | 156 | } |
|
157 | 157 | """ |
|
158 | 158 | |
|
159 | 159 | repo = get_repo_or_error(repoid) |
|
160 | 160 | cache = Optional.extract(cache) |
|
161 | 161 | |
|
162 | 162 | include_secrets = False |
|
163 | 163 | if has_superadmin_permission(apiuser): |
|
164 | 164 | include_secrets = True |
|
165 | 165 | else: |
|
166 | 166 | # check if we have at least read permission for this repo ! |
|
167 | 167 | _perms = ( |
|
168 | 168 | 'repository.admin', 'repository.write', 'repository.read',) |
|
169 | 169 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
170 | 170 | |
|
171 | 171 | permissions = [] |
|
172 | 172 | for _user in repo.permissions(): |
|
173 | 173 | user_data = { |
|
174 | 174 | 'name': _user.username, |
|
175 | 175 | 'permission': _user.permission, |
|
176 | 176 | 'origin': get_origin(_user), |
|
177 | 177 | 'type': "user", |
|
178 | 178 | } |
|
179 | 179 | permissions.append(user_data) |
|
180 | 180 | |
|
181 | 181 | for _user_group in repo.permission_user_groups(): |
|
182 | 182 | user_group_data = { |
|
183 | 183 | 'name': _user_group.users_group_name, |
|
184 | 184 | 'permission': _user_group.permission, |
|
185 | 185 | 'origin': get_origin(_user_group), |
|
186 | 186 | 'type': "user_group", |
|
187 | 187 | } |
|
188 | 188 | permissions.append(user_group_data) |
|
189 | 189 | |
|
190 | 190 | following_users = [ |
|
191 | 191 | user.user.get_api_data(include_secrets=include_secrets) |
|
192 | 192 | for user in repo.followers] |
|
193 | 193 | |
|
194 | 194 | if not cache: |
|
195 | 195 | repo.update_commit_cache() |
|
196 | 196 | data = repo.get_api_data(include_secrets=include_secrets) |
|
197 | 197 | data['permissions'] = permissions |
|
198 | 198 | data['followers'] = following_users |
|
199 | 199 | return data |
|
200 | 200 | |
|
201 | 201 | |
|
202 | 202 | @jsonrpc_method() |
|
203 | 203 | def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)): |
|
204 | 204 | """ |
|
205 | 205 | Lists all existing repositories. |
|
206 | 206 | |
|
207 | 207 | This command can only be run using an |authtoken| with admin rights, |
|
208 | 208 | or users with at least read rights to |repos|. |
|
209 | 209 | |
|
210 | 210 | :param apiuser: This is filled automatically from the |authtoken|. |
|
211 | 211 | :type apiuser: AuthUser |
|
212 | 212 | :param root: specify root repository group to fetch repositories. |
|
213 | 213 | filters the returned repositories to be members of given root group. |
|
214 | 214 | :type root: Optional(None) |
|
215 | 215 | :param traverse: traverse given root into subrepositories. With this flag |
|
216 | 216 | set to False, it will only return top-level repositories from `root`. |
|
217 | 217 | if root is empty it will return just top-level repositories. |
|
218 | 218 | :type traverse: Optional(True) |
|
219 | 219 | |
|
220 | 220 | |
|
221 | 221 | Example output: |
|
222 | 222 | |
|
223 | 223 | .. code-block:: bash |
|
224 | 224 | |
|
225 | 225 | id : <id_given_in_input> |
|
226 | 226 | result: [ |
|
227 | 227 | { |
|
228 | 228 | "repo_id" : "<repo_id>", |
|
229 | 229 | "repo_name" : "<reponame>" |
|
230 | 230 | "repo_type" : "<repo_type>", |
|
231 | 231 | "clone_uri" : "<clone_uri>", |
|
232 | 232 | "private": : "<bool>", |
|
233 | 233 | "created_on" : "<datetimecreated>", |
|
234 | 234 | "description" : "<description>", |
|
235 | 235 | "landing_rev": "<landing_rev>", |
|
236 | 236 | "owner": "<repo_owner>", |
|
237 | 237 | "fork_of": "<name_of_fork_parent>", |
|
238 | 238 | "enable_downloads": "<bool>", |
|
239 | 239 | "enable_locking": "<bool>", |
|
240 | 240 | "enable_statistics": "<bool>", |
|
241 | 241 | }, |
|
242 | 242 | ... |
|
243 | 243 | ] |
|
244 | 244 | error: null |
|
245 | 245 | """ |
|
246 | 246 | |
|
247 | 247 | include_secrets = has_superadmin_permission(apiuser) |
|
248 | 248 | _perms = ('repository.read', 'repository.write', 'repository.admin',) |
|
249 | 249 | extras = {'user': apiuser} |
|
250 | 250 | |
|
251 | 251 | root = Optional.extract(root) |
|
252 | 252 | traverse = Optional.extract(traverse, binary=True) |
|
253 | 253 | |
|
254 | 254 | if root: |
|
255 | 255 | # verify parent existance, if it's empty return an error |
|
256 | 256 | parent = RepoGroup.get_by_group_name(root) |
|
257 | 257 | if not parent: |
|
258 | 258 | raise JSONRPCError( |
|
259 | 259 | 'Root repository group `{}` does not exist'.format(root)) |
|
260 | 260 | |
|
261 | 261 | if traverse: |
|
262 | 262 | repos = RepoModel().get_repos_for_root(root=root, traverse=traverse) |
|
263 | 263 | else: |
|
264 | 264 | repos = RepoModel().get_repos_for_root(root=parent) |
|
265 | 265 | else: |
|
266 | 266 | if traverse: |
|
267 | 267 | repos = RepoModel().get_all() |
|
268 | 268 | else: |
|
269 | 269 | # return just top-level |
|
270 | 270 | repos = RepoModel().get_repos_for_root(root=None) |
|
271 | 271 | |
|
272 | 272 | repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras) |
|
273 | 273 | return [repo.get_api_data(include_secrets=include_secrets) |
|
274 | 274 | for repo in repo_list] |
|
275 | 275 | |
|
276 | 276 | |
|
277 | 277 | @jsonrpc_method() |
|
278 | 278 | def get_repo_changeset(request, apiuser, repoid, revision, |
|
279 | 279 | details=Optional('basic')): |
|
280 | 280 | """ |
|
281 | 281 | Returns information about a changeset. |
|
282 | 282 | |
|
283 | 283 | Additionally parameters define the amount of details returned by |
|
284 | 284 | this function. |
|
285 | 285 | |
|
286 | 286 | This command can only be run using an |authtoken| with admin rights, |
|
287 | 287 | or users with at least read rights to the |repo|. |
|
288 | 288 | |
|
289 | 289 | :param apiuser: This is filled automatically from the |authtoken|. |
|
290 | 290 | :type apiuser: AuthUser |
|
291 | 291 | :param repoid: The repository name or repository id |
|
292 | 292 | :type repoid: str or int |
|
293 | 293 | :param revision: revision for which listing should be done |
|
294 | 294 | :type revision: str |
|
295 | 295 | :param details: details can be 'basic|extended|full' full gives diff |
|
296 | 296 | info details like the diff itself, and number of changed files etc. |
|
297 | 297 | :type details: Optional(str) |
|
298 | 298 | |
|
299 | 299 | """ |
|
300 | 300 | repo = get_repo_or_error(repoid) |
|
301 | 301 | if not has_superadmin_permission(apiuser): |
|
302 | 302 | _perms = ( |
|
303 | 303 | 'repository.admin', 'repository.write', 'repository.read',) |
|
304 | 304 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
305 | 305 | |
|
306 | 306 | changes_details = Optional.extract(details) |
|
307 | 307 | _changes_details_types = ['basic', 'extended', 'full'] |
|
308 | 308 | if changes_details not in _changes_details_types: |
|
309 | 309 | raise JSONRPCError( |
|
310 | 310 | 'ret_type must be one of %s' % ( |
|
311 | 311 | ','.join(_changes_details_types))) |
|
312 | 312 | |
|
313 | 313 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
314 | 314 | 'status', '_commit', '_file_paths'] |
|
315 | 315 | |
|
316 | 316 | try: |
|
317 | 317 | cs = repo.get_commit(commit_id=revision, pre_load=pre_load) |
|
318 | 318 | except TypeError as e: |
|
319 | 319 | raise JSONRPCError(e.message) |
|
320 | 320 | _cs_json = cs.__json__() |
|
321 | 321 | _cs_json['diff'] = build_commit_data(cs, changes_details) |
|
322 | 322 | if changes_details == 'full': |
|
323 | 323 | _cs_json['refs'] = cs._get_refs() |
|
324 | 324 | return _cs_json |
|
325 | 325 | |
|
326 | 326 | |
|
327 | 327 | @jsonrpc_method() |
|
328 | 328 | def get_repo_changesets(request, apiuser, repoid, start_rev, limit, |
|
329 | 329 | details=Optional('basic')): |
|
330 | 330 | """ |
|
331 | 331 | Returns a set of commits limited by the number starting |
|
332 | 332 | from the `start_rev` option. |
|
333 | 333 | |
|
334 | 334 | Additional parameters define the amount of details returned by this |
|
335 | 335 | function. |
|
336 | 336 | |
|
337 | 337 | This command can only be run using an |authtoken| with admin rights, |
|
338 | 338 | or users with at least read rights to |repos|. |
|
339 | 339 | |
|
340 | 340 | :param apiuser: This is filled automatically from the |authtoken|. |
|
341 | 341 | :type apiuser: AuthUser |
|
342 | 342 | :param repoid: The repository name or repository ID. |
|
343 | 343 | :type repoid: str or int |
|
344 | 344 | :param start_rev: The starting revision from where to get changesets. |
|
345 | 345 | :type start_rev: str |
|
346 | 346 | :param limit: Limit the number of commits to this amount |
|
347 | 347 | :type limit: str or int |
|
348 | 348 | :param details: Set the level of detail returned. Valid option are: |
|
349 | 349 | ``basic``, ``extended`` and ``full``. |
|
350 | 350 | :type details: Optional(str) |
|
351 | 351 | |
|
352 | 352 | .. note:: |
|
353 | 353 | |
|
354 | 354 | Setting the parameter `details` to the value ``full`` is extensive |
|
355 | 355 | and returns details like the diff itself, and the number |
|
356 | 356 | of changed files. |
|
357 | 357 | |
|
358 | 358 | """ |
|
359 | 359 | repo = get_repo_or_error(repoid) |
|
360 | 360 | if not has_superadmin_permission(apiuser): |
|
361 | 361 | _perms = ( |
|
362 | 362 | 'repository.admin', 'repository.write', 'repository.read',) |
|
363 | 363 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
364 | 364 | |
|
365 | 365 | changes_details = Optional.extract(details) |
|
366 | 366 | _changes_details_types = ['basic', 'extended', 'full'] |
|
367 | 367 | if changes_details not in _changes_details_types: |
|
368 | 368 | raise JSONRPCError( |
|
369 | 369 | 'ret_type must be one of %s' % ( |
|
370 | 370 | ','.join(_changes_details_types))) |
|
371 | 371 | |
|
372 | 372 | limit = int(limit) |
|
373 | 373 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
374 | 374 | 'status', '_commit', '_file_paths'] |
|
375 | 375 | |
|
376 | 376 | vcs_repo = repo.scm_instance() |
|
377 | 377 | # SVN needs a special case to distinguish its index and commit id |
|
378 | 378 | if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'): |
|
379 | 379 | start_rev = vcs_repo.commit_ids[0] |
|
380 | 380 | |
|
381 | 381 | try: |
|
382 | 382 | commits = vcs_repo.get_commits( |
|
383 | 383 | start_id=start_rev, pre_load=pre_load) |
|
384 | 384 | except TypeError as e: |
|
385 | 385 | raise JSONRPCError(e.message) |
|
386 | 386 | except Exception: |
|
387 | 387 | log.exception('Fetching of commits failed') |
|
388 | 388 | raise JSONRPCError('Error occurred during commit fetching') |
|
389 | 389 | |
|
390 | 390 | ret = [] |
|
391 | 391 | for cnt, commit in enumerate(commits): |
|
392 | 392 | if cnt >= limit != -1: |
|
393 | 393 | break |
|
394 | 394 | _cs_json = commit.__json__() |
|
395 | 395 | _cs_json['diff'] = build_commit_data(commit, changes_details) |
|
396 | 396 | if changes_details == 'full': |
|
397 | 397 | _cs_json['refs'] = { |
|
398 | 398 | 'branches': [commit.branch], |
|
399 | 399 | 'bookmarks': getattr(commit, 'bookmarks', []), |
|
400 | 400 | 'tags': commit.tags |
|
401 | 401 | } |
|
402 | 402 | ret.append(_cs_json) |
|
403 | 403 | return ret |
|
404 | 404 | |
|
405 | 405 | |
|
406 | 406 | @jsonrpc_method() |
|
407 | 407 | def get_repo_nodes(request, apiuser, repoid, revision, root_path, |
|
408 | 408 | ret_type=Optional('all'), details=Optional('basic'), |
|
409 | 409 | max_file_bytes=Optional(None)): |
|
410 | 410 | """ |
|
411 | 411 | Returns a list of nodes and children in a flat list for a given |
|
412 | 412 | path at given revision. |
|
413 | 413 | |
|
414 | 414 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
415 | 415 | |
|
416 | 416 | This command can only be run using an |authtoken| with admin rights, |
|
417 | 417 | or users with at least read rights to |repos|. |
|
418 | 418 | |
|
419 | 419 | :param apiuser: This is filled automatically from the |authtoken|. |
|
420 | 420 | :type apiuser: AuthUser |
|
421 | 421 | :param repoid: The repository name or repository ID. |
|
422 | 422 | :type repoid: str or int |
|
423 | 423 | :param revision: The revision for which listing should be done. |
|
424 | 424 | :type revision: str |
|
425 | 425 | :param root_path: The path from which to start displaying. |
|
426 | 426 | :type root_path: str |
|
427 | 427 | :param ret_type: Set the return type. Valid options are |
|
428 | 428 | ``all`` (default), ``files`` and ``dirs``. |
|
429 | 429 | :type ret_type: Optional(str) |
|
430 | 430 | :param details: Returns extended information about nodes, such as |
|
431 | 431 | md5, binary, and or content. The valid options are ``basic`` and |
|
432 | 432 | ``full``. |
|
433 | 433 | :type details: Optional(str) |
|
434 | 434 | :param max_file_bytes: Only return file content under this file size bytes |
|
435 | 435 | :type details: Optional(int) |
|
436 | 436 | |
|
437 | 437 | Example output: |
|
438 | 438 | |
|
439 | 439 | .. code-block:: bash |
|
440 | 440 | |
|
441 | 441 | id : <id_given_in_input> |
|
442 | 442 | result: [ |
|
443 | 443 | { |
|
444 | 444 | "name" : "<name>" |
|
445 | 445 | "type" : "<type>", |
|
446 | 446 | "binary": "<true|false>" (only in extended mode) |
|
447 | 447 | "md5" : "<md5 of file content>" (only in extended mode) |
|
448 | 448 | }, |
|
449 | 449 | ... |
|
450 | 450 | ] |
|
451 | 451 | error: null |
|
452 | 452 | """ |
|
453 | 453 | |
|
454 | 454 | repo = get_repo_or_error(repoid) |
|
455 | 455 | if not has_superadmin_permission(apiuser): |
|
456 | 456 | _perms = ( |
|
457 | 457 | 'repository.admin', 'repository.write', 'repository.read',) |
|
458 | 458 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
459 | 459 | |
|
460 | 460 | ret_type = Optional.extract(ret_type) |
|
461 | 461 | details = Optional.extract(details) |
|
462 | 462 | _extended_types = ['basic', 'full'] |
|
463 | 463 | if details not in _extended_types: |
|
464 | 464 | raise JSONRPCError( |
|
465 | 465 | 'ret_type must be one of %s' % (','.join(_extended_types))) |
|
466 | 466 | extended_info = False |
|
467 | 467 | content = False |
|
468 | 468 | if details == 'basic': |
|
469 | 469 | extended_info = True |
|
470 | 470 | |
|
471 | 471 | if details == 'full': |
|
472 | 472 | extended_info = content = True |
|
473 | 473 | |
|
474 | 474 | _map = {} |
|
475 | 475 | try: |
|
476 | 476 | # check if repo is not empty by any chance, skip quicker if it is. |
|
477 | 477 | _scm = repo.scm_instance() |
|
478 | 478 | if _scm.is_empty(): |
|
479 | 479 | return [] |
|
480 | 480 | |
|
481 | 481 | _d, _f = ScmModel().get_nodes( |
|
482 | 482 | repo, revision, root_path, flat=False, |
|
483 | 483 | extended_info=extended_info, content=content, |
|
484 | 484 | max_file_bytes=max_file_bytes) |
|
485 | 485 | _map = { |
|
486 | 486 | 'all': _d + _f, |
|
487 | 487 | 'files': _f, |
|
488 | 488 | 'dirs': _d, |
|
489 | 489 | } |
|
490 | 490 | return _map[ret_type] |
|
491 | 491 | except KeyError: |
|
492 | 492 | raise JSONRPCError( |
|
493 | 493 | 'ret_type must be one of %s' % (','.join(sorted(_map.keys())))) |
|
494 | 494 | except Exception: |
|
495 | 495 | log.exception("Exception occurred while trying to get repo nodes") |
|
496 | 496 | raise JSONRPCError( |
|
497 | 497 | 'failed to get repo: `%s` nodes' % repo.repo_name |
|
498 | 498 | ) |
|
499 | 499 | |
|
500 | 500 | |
|
501 | 501 | @jsonrpc_method() |
|
502 | 502 | def get_repo_refs(request, apiuser, repoid): |
|
503 | 503 | """ |
|
504 | 504 | Returns a dictionary of current references. It returns |
|
505 | 505 | bookmarks, branches, closed_branches, and tags for given repository |
|
506 | 506 | |
|
507 | 507 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
508 | 508 | |
|
509 | 509 | This command can only be run using an |authtoken| with admin rights, |
|
510 | 510 | or users with at least read rights to |repos|. |
|
511 | 511 | |
|
512 | 512 | :param apiuser: This is filled automatically from the |authtoken|. |
|
513 | 513 | :type apiuser: AuthUser |
|
514 | 514 | :param repoid: The repository name or repository ID. |
|
515 | 515 | :type repoid: str or int |
|
516 | 516 | |
|
517 | 517 | Example output: |
|
518 | 518 | |
|
519 | 519 | .. code-block:: bash |
|
520 | 520 | |
|
521 | 521 | id : <id_given_in_input> |
|
522 | 522 | "result": { |
|
523 | 523 | "bookmarks": { |
|
524 | 524 | "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
525 | 525 | "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
526 | 526 | }, |
|
527 | 527 | "branches": { |
|
528 | 528 | "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
529 | 529 | "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
530 | 530 | }, |
|
531 | 531 | "branches_closed": {}, |
|
532 | 532 | "tags": { |
|
533 | 533 | "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
534 | 534 | "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022", |
|
535 | 535 | "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27", |
|
536 | 536 | "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17", |
|
537 | 537 | } |
|
538 | 538 | } |
|
539 | 539 | error: null |
|
540 | 540 | """ |
|
541 | 541 | |
|
542 | 542 | repo = get_repo_or_error(repoid) |
|
543 | 543 | if not has_superadmin_permission(apiuser): |
|
544 | 544 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
545 | 545 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
546 | 546 | |
|
547 | 547 | try: |
|
548 | 548 | # check if repo is not empty by any chance, skip quicker if it is. |
|
549 | 549 | vcs_instance = repo.scm_instance() |
|
550 | 550 | refs = vcs_instance.refs() |
|
551 | 551 | return refs |
|
552 | 552 | except Exception: |
|
553 | 553 | log.exception("Exception occurred while trying to get repo refs") |
|
554 | 554 | raise JSONRPCError( |
|
555 | 555 | 'failed to get repo: `%s` references' % repo.repo_name |
|
556 | 556 | ) |
|
557 | 557 | |
|
558 | 558 | |
|
559 | 559 | @jsonrpc_method() |
|
560 | 560 | def create_repo( |
|
561 | 561 | request, apiuser, repo_name, repo_type, |
|
562 | 562 | owner=Optional(OAttr('apiuser')), |
|
563 | 563 | description=Optional(''), |
|
564 | 564 | private=Optional(False), |
|
565 | 565 | clone_uri=Optional(None), |
|
566 | push_uri=Optional(None), | |
|
566 | 567 | landing_rev=Optional('rev:tip'), |
|
567 | 568 | enable_statistics=Optional(False), |
|
568 | 569 | enable_locking=Optional(False), |
|
569 | 570 | enable_downloads=Optional(False), |
|
570 | 571 | copy_permissions=Optional(False)): |
|
571 | 572 | """ |
|
572 | 573 | Creates a repository. |
|
573 | 574 | |
|
574 | 575 | * If the repository name contains "/", repository will be created inside |
|
575 | 576 | a repository group or nested repository groups |
|
576 | 577 | |
|
577 | 578 | For example "foo/bar/repo1" will create |repo| called "repo1" inside |
|
578 | 579 | group "foo/bar". You have to have permissions to access and write to |
|
579 | 580 | the last repository group ("bar" in this example) |
|
580 | 581 | |
|
581 | 582 | This command can only be run using an |authtoken| with at least |
|
582 | 583 | permissions to create repositories, or write permissions to |
|
583 | 584 | parent repository groups. |
|
584 | 585 | |
|
585 | 586 | :param apiuser: This is filled automatically from the |authtoken|. |
|
586 | 587 | :type apiuser: AuthUser |
|
587 | 588 | :param repo_name: Set the repository name. |
|
588 | 589 | :type repo_name: str |
|
589 | 590 | :param repo_type: Set the repository type; 'hg','git', or 'svn'. |
|
590 | 591 | :type repo_type: str |
|
591 | 592 | :param owner: user_id or username |
|
592 | 593 | :type owner: Optional(str) |
|
593 | 594 | :param description: Set the repository description. |
|
594 | 595 | :type description: Optional(str) |
|
595 | 596 | :param private: set repository as private |
|
596 | 597 | :type private: bool |
|
597 | 598 | :param clone_uri: set clone_uri |
|
598 | 599 | :type clone_uri: str |
|
600 | :param push_uri: set push_uri | |
|
601 | :type push_uri: str | |
|
599 | 602 | :param landing_rev: <rev_type>:<rev> |
|
600 | 603 | :type landing_rev: str |
|
601 | 604 | :param enable_locking: |
|
602 | 605 | :type enable_locking: bool |
|
603 | 606 | :param enable_downloads: |
|
604 | 607 | :type enable_downloads: bool |
|
605 | 608 | :param enable_statistics: |
|
606 | 609 | :type enable_statistics: bool |
|
607 | 610 | :param copy_permissions: Copy permission from group in which the |
|
608 | 611 | repository is being created. |
|
609 | 612 | :type copy_permissions: bool |
|
610 | 613 | |
|
611 | 614 | |
|
612 | 615 | Example output: |
|
613 | 616 | |
|
614 | 617 | .. code-block:: bash |
|
615 | 618 | |
|
616 | 619 | id : <id_given_in_input> |
|
617 | 620 | result: { |
|
618 | 621 | "msg": "Created new repository `<reponame>`", |
|
619 | 622 | "success": true, |
|
620 | 623 | "task": "<celery task id or None if done sync>" |
|
621 | 624 | } |
|
622 | 625 | error: null |
|
623 | 626 | |
|
624 | 627 | |
|
625 | 628 | Example error output: |
|
626 | 629 | |
|
627 | 630 | .. code-block:: bash |
|
628 | 631 | |
|
629 | 632 | id : <id_given_in_input> |
|
630 | 633 | result : null |
|
631 | 634 | error : { |
|
632 | 635 | 'failed to create repository `<repo_name>`' |
|
633 | 636 | } |
|
634 | 637 | |
|
635 | 638 | """ |
|
636 | 639 | |
|
637 | 640 | owner = validate_set_owner_permissions(apiuser, owner) |
|
638 | 641 | |
|
639 | 642 | description = Optional.extract(description) |
|
640 | 643 | copy_permissions = Optional.extract(copy_permissions) |
|
641 | 644 | clone_uri = Optional.extract(clone_uri) |
|
645 | push_uri = Optional.extract(push_uri) | |
|
642 | 646 | landing_commit_ref = Optional.extract(landing_rev) |
|
643 | 647 | |
|
644 | 648 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
645 | 649 | if isinstance(private, Optional): |
|
646 | 650 | private = defs.get('repo_private') or Optional.extract(private) |
|
647 | 651 | if isinstance(repo_type, Optional): |
|
648 | 652 | repo_type = defs.get('repo_type') |
|
649 | 653 | if isinstance(enable_statistics, Optional): |
|
650 | 654 | enable_statistics = defs.get('repo_enable_statistics') |
|
651 | 655 | if isinstance(enable_locking, Optional): |
|
652 | 656 | enable_locking = defs.get('repo_enable_locking') |
|
653 | 657 | if isinstance(enable_downloads, Optional): |
|
654 | 658 | enable_downloads = defs.get('repo_enable_downloads') |
|
655 | 659 | |
|
656 | 660 | schema = repo_schema.RepoSchema().bind( |
|
657 | 661 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
658 | 662 | # user caller |
|
659 | 663 | user=apiuser) |
|
660 | 664 | |
|
661 | 665 | try: |
|
662 | 666 | schema_data = schema.deserialize(dict( |
|
663 | 667 | repo_name=repo_name, |
|
664 | 668 | repo_type=repo_type, |
|
665 | 669 | repo_owner=owner.username, |
|
666 | 670 | repo_description=description, |
|
667 | 671 | repo_landing_commit_ref=landing_commit_ref, |
|
668 | 672 | repo_clone_uri=clone_uri, |
|
673 | repo_push_uri=push_uri, | |
|
669 | 674 | repo_private=private, |
|
670 | 675 | repo_copy_permissions=copy_permissions, |
|
671 | 676 | repo_enable_statistics=enable_statistics, |
|
672 | 677 | repo_enable_downloads=enable_downloads, |
|
673 | 678 | repo_enable_locking=enable_locking)) |
|
674 | 679 | except validation_schema.Invalid as err: |
|
675 | 680 | raise JSONRPCValidationError(colander_exc=err) |
|
676 | 681 | |
|
677 | 682 | try: |
|
678 | 683 | data = { |
|
679 | 684 | 'owner': owner, |
|
680 | 685 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
681 | 686 | 'repo_name_full': schema_data['repo_name'], |
|
682 | 687 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
683 | 688 | 'repo_type': schema_data['repo_type'], |
|
684 | 689 | 'repo_description': schema_data['repo_description'], |
|
685 | 690 | 'repo_private': schema_data['repo_private'], |
|
686 | 691 | 'clone_uri': schema_data['repo_clone_uri'], |
|
692 | 'push_uri': schema_data['repo_push_uri'], | |
|
687 | 693 | 'repo_landing_rev': schema_data['repo_landing_commit_ref'], |
|
688 | 694 | 'enable_statistics': schema_data['repo_enable_statistics'], |
|
689 | 695 | 'enable_locking': schema_data['repo_enable_locking'], |
|
690 | 696 | 'enable_downloads': schema_data['repo_enable_downloads'], |
|
691 | 697 | 'repo_copy_permissions': schema_data['repo_copy_permissions'], |
|
692 | 698 | } |
|
693 | 699 | |
|
694 | 700 | task = RepoModel().create(form_data=data, cur_user=owner) |
|
695 | 701 | task_id = get_task_id(task) |
|
696 | 702 | # no commit, it's done in RepoModel, or async via celery |
|
697 | 703 | return { |
|
698 | 704 | 'msg': "Created new repository `%s`" % (schema_data['repo_name'],), |
|
699 | 705 | 'success': True, # cannot return the repo data here since fork |
|
700 | 706 | # can be done async |
|
701 | 707 | 'task': task_id |
|
702 | 708 | } |
|
703 | 709 | except Exception: |
|
704 | 710 | log.exception( |
|
705 | 711 | u"Exception while trying to create the repository %s", |
|
706 | 712 | schema_data['repo_name']) |
|
707 | 713 | raise JSONRPCError( |
|
708 | 714 | 'failed to create repository `%s`' % (schema_data['repo_name'],)) |
|
709 | 715 | |
|
710 | 716 | |
|
711 | 717 | @jsonrpc_method() |
|
712 | 718 | def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''), |
|
713 | 719 | description=Optional('')): |
|
714 | 720 | """ |
|
715 | 721 | Adds an extra field to a repository. |
|
716 | 722 | |
|
717 | 723 | This command can only be run using an |authtoken| with at least |
|
718 | 724 | write permissions to the |repo|. |
|
719 | 725 | |
|
720 | 726 | :param apiuser: This is filled automatically from the |authtoken|. |
|
721 | 727 | :type apiuser: AuthUser |
|
722 | 728 | :param repoid: Set the repository name or repository id. |
|
723 | 729 | :type repoid: str or int |
|
724 | 730 | :param key: Create a unique field key for this repository. |
|
725 | 731 | :type key: str |
|
726 | 732 | :param label: |
|
727 | 733 | :type label: Optional(str) |
|
728 | 734 | :param description: |
|
729 | 735 | :type description: Optional(str) |
|
730 | 736 | """ |
|
731 | 737 | repo = get_repo_or_error(repoid) |
|
732 | 738 | if not has_superadmin_permission(apiuser): |
|
733 | 739 | _perms = ('repository.admin',) |
|
734 | 740 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
735 | 741 | |
|
736 | 742 | label = Optional.extract(label) or key |
|
737 | 743 | description = Optional.extract(description) |
|
738 | 744 | |
|
739 | 745 | field = RepositoryField.get_by_key_name(key, repo) |
|
740 | 746 | if field: |
|
741 | 747 | raise JSONRPCError('Field with key ' |
|
742 | 748 | '`%s` exists for repo `%s`' % (key, repoid)) |
|
743 | 749 | |
|
744 | 750 | try: |
|
745 | 751 | RepoModel().add_repo_field(repo, key, field_label=label, |
|
746 | 752 | field_desc=description) |
|
747 | 753 | Session().commit() |
|
748 | 754 | return { |
|
749 | 755 | 'msg': "Added new repository field `%s`" % (key,), |
|
750 | 756 | 'success': True, |
|
751 | 757 | } |
|
752 | 758 | except Exception: |
|
753 | 759 | log.exception("Exception occurred while trying to add field to repo") |
|
754 | 760 | raise JSONRPCError( |
|
755 | 761 | 'failed to create new field for repository `%s`' % (repoid,)) |
|
756 | 762 | |
|
757 | 763 | |
|
758 | 764 | @jsonrpc_method() |
|
759 | 765 | def remove_field_from_repo(request, apiuser, repoid, key): |
|
760 | 766 | """ |
|
761 | 767 | Removes an extra field from a repository. |
|
762 | 768 | |
|
763 | 769 | This command can only be run using an |authtoken| with at least |
|
764 | 770 | write permissions to the |repo|. |
|
765 | 771 | |
|
766 | 772 | :param apiuser: This is filled automatically from the |authtoken|. |
|
767 | 773 | :type apiuser: AuthUser |
|
768 | 774 | :param repoid: Set the repository name or repository ID. |
|
769 | 775 | :type repoid: str or int |
|
770 | 776 | :param key: Set the unique field key for this repository. |
|
771 | 777 | :type key: str |
|
772 | 778 | """ |
|
773 | 779 | |
|
774 | 780 | repo = get_repo_or_error(repoid) |
|
775 | 781 | if not has_superadmin_permission(apiuser): |
|
776 | 782 | _perms = ('repository.admin',) |
|
777 | 783 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
778 | 784 | |
|
779 | 785 | field = RepositoryField.get_by_key_name(key, repo) |
|
780 | 786 | if not field: |
|
781 | 787 | raise JSONRPCError('Field with key `%s` does not ' |
|
782 | 788 | 'exists for repo `%s`' % (key, repoid)) |
|
783 | 789 | |
|
784 | 790 | try: |
|
785 | 791 | RepoModel().delete_repo_field(repo, field_key=key) |
|
786 | 792 | Session().commit() |
|
787 | 793 | return { |
|
788 | 794 | 'msg': "Deleted repository field `%s`" % (key,), |
|
789 | 795 | 'success': True, |
|
790 | 796 | } |
|
791 | 797 | except Exception: |
|
792 | 798 | log.exception( |
|
793 | 799 | "Exception occurred while trying to delete field from repo") |
|
794 | 800 | raise JSONRPCError( |
|
795 | 801 | 'failed to delete field for repository `%s`' % (repoid,)) |
|
796 | 802 | |
|
797 | 803 | |
|
798 | 804 | @jsonrpc_method() |
|
799 | 805 | def update_repo( |
|
800 | 806 | request, apiuser, repoid, repo_name=Optional(None), |
|
801 | 807 | owner=Optional(OAttr('apiuser')), description=Optional(''), |
|
802 | 808 | private=Optional(False), clone_uri=Optional(None), |
|
803 | 809 | landing_rev=Optional('rev:tip'), fork_of=Optional(None), |
|
804 | 810 | enable_statistics=Optional(False), |
|
805 | 811 | enable_locking=Optional(False), |
|
806 | 812 | enable_downloads=Optional(False), fields=Optional('')): |
|
807 | 813 | """ |
|
808 | 814 | Updates a repository with the given information. |
|
809 | 815 | |
|
810 | 816 | This command can only be run using an |authtoken| with at least |
|
811 | 817 | admin permissions to the |repo|. |
|
812 | 818 | |
|
813 | 819 | * If the repository name contains "/", repository will be updated |
|
814 | 820 | accordingly with a repository group or nested repository groups |
|
815 | 821 | |
|
816 | 822 | For example repoid=repo-test name="foo/bar/repo-test" will update |repo| |
|
817 | 823 | called "repo-test" and place it inside group "foo/bar". |
|
818 | 824 | You have to have permissions to access and write to the last repository |
|
819 | 825 | group ("bar" in this example) |
|
820 | 826 | |
|
821 | 827 | :param apiuser: This is filled automatically from the |authtoken|. |
|
822 | 828 | :type apiuser: AuthUser |
|
823 | 829 | :param repoid: repository name or repository ID. |
|
824 | 830 | :type repoid: str or int |
|
825 | 831 | :param repo_name: Update the |repo| name, including the |
|
826 | 832 | repository group it's in. |
|
827 | 833 | :type repo_name: str |
|
828 | 834 | :param owner: Set the |repo| owner. |
|
829 | 835 | :type owner: str |
|
830 | 836 | :param fork_of: Set the |repo| as fork of another |repo|. |
|
831 | 837 | :type fork_of: str |
|
832 | 838 | :param description: Update the |repo| description. |
|
833 | 839 | :type description: str |
|
834 | 840 | :param private: Set the |repo| as private. (True | False) |
|
835 | 841 | :type private: bool |
|
836 | 842 | :param clone_uri: Update the |repo| clone URI. |
|
837 | 843 | :type clone_uri: str |
|
838 | 844 | :param landing_rev: Set the |repo| landing revision. Default is ``rev:tip``. |
|
839 | 845 | :type landing_rev: str |
|
840 | 846 | :param enable_statistics: Enable statistics on the |repo|, (True | False). |
|
841 | 847 | :type enable_statistics: bool |
|
842 | 848 | :param enable_locking: Enable |repo| locking. |
|
843 | 849 | :type enable_locking: bool |
|
844 | 850 | :param enable_downloads: Enable downloads from the |repo|, (True | False). |
|
845 | 851 | :type enable_downloads: bool |
|
846 | 852 | :param fields: Add extra fields to the |repo|. Use the following |
|
847 | 853 | example format: ``field_key=field_val,field_key2=fieldval2``. |
|
848 | 854 | Escape ', ' with \, |
|
849 | 855 | :type fields: str |
|
850 | 856 | """ |
|
851 | 857 | |
|
852 | 858 | repo = get_repo_or_error(repoid) |
|
853 | 859 | |
|
854 | 860 | include_secrets = False |
|
855 | 861 | if not has_superadmin_permission(apiuser): |
|
856 | 862 | validate_repo_permissions(apiuser, repoid, repo, ('repository.admin',)) |
|
857 | 863 | else: |
|
858 | 864 | include_secrets = True |
|
859 | 865 | |
|
860 | 866 | updates = dict( |
|
861 | 867 | repo_name=repo_name |
|
862 | 868 | if not isinstance(repo_name, Optional) else repo.repo_name, |
|
863 | 869 | |
|
864 | 870 | fork_id=fork_of |
|
865 | 871 | if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None, |
|
866 | 872 | |
|
867 | 873 | user=owner |
|
868 | 874 | if not isinstance(owner, Optional) else repo.user.username, |
|
869 | 875 | |
|
870 | 876 | repo_description=description |
|
871 | 877 | if not isinstance(description, Optional) else repo.description, |
|
872 | 878 | |
|
873 | 879 | repo_private=private |
|
874 | 880 | if not isinstance(private, Optional) else repo.private, |
|
875 | 881 | |
|
876 | 882 | clone_uri=clone_uri |
|
877 | 883 | if not isinstance(clone_uri, Optional) else repo.clone_uri, |
|
878 | 884 | |
|
879 | 885 | repo_landing_rev=landing_rev |
|
880 | 886 | if not isinstance(landing_rev, Optional) else repo._landing_revision, |
|
881 | 887 | |
|
882 | 888 | repo_enable_statistics=enable_statistics |
|
883 | 889 | if not isinstance(enable_statistics, Optional) else repo.enable_statistics, |
|
884 | 890 | |
|
885 | 891 | repo_enable_locking=enable_locking |
|
886 | 892 | if not isinstance(enable_locking, Optional) else repo.enable_locking, |
|
887 | 893 | |
|
888 | 894 | repo_enable_downloads=enable_downloads |
|
889 | 895 | if not isinstance(enable_downloads, Optional) else repo.enable_downloads) |
|
890 | 896 | |
|
891 | 897 | ref_choices, _labels = ScmModel().get_repo_landing_revs( |
|
892 | 898 | request.translate, repo=repo) |
|
893 | 899 | |
|
894 | 900 | old_values = repo.get_api_data() |
|
895 | 901 | schema = repo_schema.RepoSchema().bind( |
|
896 | 902 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
897 | 903 | repo_ref_options=ref_choices, |
|
898 | 904 | # user caller |
|
899 | 905 | user=apiuser, |
|
900 | 906 | old_values=old_values) |
|
901 | 907 | try: |
|
902 | 908 | schema_data = schema.deserialize(dict( |
|
903 | 909 | # we save old value, users cannot change type |
|
904 | 910 | repo_type=repo.repo_type, |
|
905 | 911 | |
|
906 | 912 | repo_name=updates['repo_name'], |
|
907 | 913 | repo_owner=updates['user'], |
|
908 | 914 | repo_description=updates['repo_description'], |
|
909 | 915 | repo_clone_uri=updates['clone_uri'], |
|
916 | repo_push_uri=updates['push_uri'], | |
|
910 | 917 | repo_fork_of=updates['fork_id'], |
|
911 | 918 | repo_private=updates['repo_private'], |
|
912 | 919 | repo_landing_commit_ref=updates['repo_landing_rev'], |
|
913 | 920 | repo_enable_statistics=updates['repo_enable_statistics'], |
|
914 | 921 | repo_enable_downloads=updates['repo_enable_downloads'], |
|
915 | 922 | repo_enable_locking=updates['repo_enable_locking'])) |
|
916 | 923 | except validation_schema.Invalid as err: |
|
917 | 924 | raise JSONRPCValidationError(colander_exc=err) |
|
918 | 925 | |
|
919 | 926 | # save validated data back into the updates dict |
|
920 | 927 | validated_updates = dict( |
|
921 | 928 | repo_name=schema_data['repo_group']['repo_name_without_group'], |
|
922 | 929 | repo_group=schema_data['repo_group']['repo_group_id'], |
|
923 | 930 | |
|
924 | 931 | user=schema_data['repo_owner'], |
|
925 | 932 | repo_description=schema_data['repo_description'], |
|
926 | 933 | repo_private=schema_data['repo_private'], |
|
927 | 934 | clone_uri=schema_data['repo_clone_uri'], |
|
935 | push_uri=schema_data['repo_push_uri'], | |
|
928 | 936 | repo_landing_rev=schema_data['repo_landing_commit_ref'], |
|
929 | 937 | repo_enable_statistics=schema_data['repo_enable_statistics'], |
|
930 | 938 | repo_enable_locking=schema_data['repo_enable_locking'], |
|
931 | 939 | repo_enable_downloads=schema_data['repo_enable_downloads'], |
|
932 | 940 | ) |
|
933 | 941 | |
|
934 | 942 | if schema_data['repo_fork_of']: |
|
935 | 943 | fork_repo = get_repo_or_error(schema_data['repo_fork_of']) |
|
936 | 944 | validated_updates['fork_id'] = fork_repo.repo_id |
|
937 | 945 | |
|
938 | 946 | # extra fields |
|
939 | 947 | fields = parse_args(Optional.extract(fields), key_prefix='ex_') |
|
940 | 948 | if fields: |
|
941 | 949 | validated_updates.update(fields) |
|
942 | 950 | |
|
943 | 951 | try: |
|
944 | 952 | RepoModel().update(repo, **validated_updates) |
|
945 | 953 | audit_logger.store_api( |
|
946 | 954 | 'repo.edit', action_data={'old_data': old_values}, |
|
947 | 955 | user=apiuser, repo=repo) |
|
948 | 956 | Session().commit() |
|
949 | 957 | return { |
|
950 | 958 | 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name), |
|
951 | 959 | 'repository': repo.get_api_data(include_secrets=include_secrets) |
|
952 | 960 | } |
|
953 | 961 | except Exception: |
|
954 | 962 | log.exception( |
|
955 | 963 | u"Exception while trying to update the repository %s", |
|
956 | 964 | repoid) |
|
957 | 965 | raise JSONRPCError('failed to update repo `%s`' % repoid) |
|
958 | 966 | |
|
959 | 967 | |
|
960 | 968 | @jsonrpc_method() |
|
961 | 969 | def fork_repo(request, apiuser, repoid, fork_name, |
|
962 | 970 | owner=Optional(OAttr('apiuser')), |
|
963 | 971 | description=Optional(''), |
|
964 | 972 | private=Optional(False), |
|
965 | 973 | clone_uri=Optional(None), |
|
966 | 974 | landing_rev=Optional('rev:tip'), |
|
967 | 975 | copy_permissions=Optional(False)): |
|
968 | 976 | """ |
|
969 | 977 | Creates a fork of the specified |repo|. |
|
970 | 978 | |
|
971 | 979 | * If the fork_name contains "/", fork will be created inside |
|
972 | 980 | a repository group or nested repository groups |
|
973 | 981 | |
|
974 | 982 | For example "foo/bar/fork-repo" will create fork called "fork-repo" |
|
975 | 983 | inside group "foo/bar". You have to have permissions to access and |
|
976 | 984 | write to the last repository group ("bar" in this example) |
|
977 | 985 | |
|
978 | 986 | This command can only be run using an |authtoken| with minimum |
|
979 | 987 | read permissions of the forked repo, create fork permissions for an user. |
|
980 | 988 | |
|
981 | 989 | :param apiuser: This is filled automatically from the |authtoken|. |
|
982 | 990 | :type apiuser: AuthUser |
|
983 | 991 | :param repoid: Set repository name or repository ID. |
|
984 | 992 | :type repoid: str or int |
|
985 | 993 | :param fork_name: Set the fork name, including it's repository group membership. |
|
986 | 994 | :type fork_name: str |
|
987 | 995 | :param owner: Set the fork owner. |
|
988 | 996 | :type owner: str |
|
989 | 997 | :param description: Set the fork description. |
|
990 | 998 | :type description: str |
|
991 | 999 | :param copy_permissions: Copy permissions from parent |repo|. The |
|
992 | 1000 | default is False. |
|
993 | 1001 | :type copy_permissions: bool |
|
994 | 1002 | :param private: Make the fork private. The default is False. |
|
995 | 1003 | :type private: bool |
|
996 | 1004 | :param landing_rev: Set the landing revision. The default is tip. |
|
997 | 1005 | |
|
998 | 1006 | Example output: |
|
999 | 1007 | |
|
1000 | 1008 | .. code-block:: bash |
|
1001 | 1009 | |
|
1002 | 1010 | id : <id_for_response> |
|
1003 | 1011 | api_key : "<api_key>" |
|
1004 | 1012 | args: { |
|
1005 | 1013 | "repoid" : "<reponame or repo_id>", |
|
1006 | 1014 | "fork_name": "<forkname>", |
|
1007 | 1015 | "owner": "<username or user_id = Optional(=apiuser)>", |
|
1008 | 1016 | "description": "<description>", |
|
1009 | 1017 | "copy_permissions": "<bool>", |
|
1010 | 1018 | "private": "<bool>", |
|
1011 | 1019 | "landing_rev": "<landing_rev>" |
|
1012 | 1020 | } |
|
1013 | 1021 | |
|
1014 | 1022 | Example error output: |
|
1015 | 1023 | |
|
1016 | 1024 | .. code-block:: bash |
|
1017 | 1025 | |
|
1018 | 1026 | id : <id_given_in_input> |
|
1019 | 1027 | result: { |
|
1020 | 1028 | "msg": "Created fork of `<reponame>` as `<forkname>`", |
|
1021 | 1029 | "success": true, |
|
1022 | 1030 | "task": "<celery task id or None if done sync>" |
|
1023 | 1031 | } |
|
1024 | 1032 | error: null |
|
1025 | 1033 | |
|
1026 | 1034 | """ |
|
1027 | 1035 | |
|
1028 | 1036 | repo = get_repo_or_error(repoid) |
|
1029 | 1037 | repo_name = repo.repo_name |
|
1030 | 1038 | |
|
1031 | 1039 | if not has_superadmin_permission(apiuser): |
|
1032 | 1040 | # check if we have at least read permission for |
|
1033 | 1041 | # this repo that we fork ! |
|
1034 | 1042 | _perms = ( |
|
1035 | 1043 | 'repository.admin', 'repository.write', 'repository.read') |
|
1036 | 1044 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1037 | 1045 | |
|
1038 | 1046 | # check if the regular user has at least fork permissions as well |
|
1039 | 1047 | if not HasPermissionAnyApi('hg.fork.repository')(user=apiuser): |
|
1040 | 1048 | raise JSONRPCForbidden() |
|
1041 | 1049 | |
|
1042 | 1050 | # check if user can set owner parameter |
|
1043 | 1051 | owner = validate_set_owner_permissions(apiuser, owner) |
|
1044 | 1052 | |
|
1045 | 1053 | description = Optional.extract(description) |
|
1046 | 1054 | copy_permissions = Optional.extract(copy_permissions) |
|
1047 | 1055 | clone_uri = Optional.extract(clone_uri) |
|
1048 | 1056 | landing_commit_ref = Optional.extract(landing_rev) |
|
1049 | 1057 | private = Optional.extract(private) |
|
1050 | 1058 | |
|
1051 | 1059 | schema = repo_schema.RepoSchema().bind( |
|
1052 | 1060 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
1053 | 1061 | # user caller |
|
1054 | 1062 | user=apiuser) |
|
1055 | 1063 | |
|
1056 | 1064 | try: |
|
1057 | 1065 | schema_data = schema.deserialize(dict( |
|
1058 | 1066 | repo_name=fork_name, |
|
1059 | 1067 | repo_type=repo.repo_type, |
|
1060 | 1068 | repo_owner=owner.username, |
|
1061 | 1069 | repo_description=description, |
|
1062 | 1070 | repo_landing_commit_ref=landing_commit_ref, |
|
1063 | 1071 | repo_clone_uri=clone_uri, |
|
1064 | 1072 | repo_private=private, |
|
1065 | 1073 | repo_copy_permissions=copy_permissions)) |
|
1066 | 1074 | except validation_schema.Invalid as err: |
|
1067 | 1075 | raise JSONRPCValidationError(colander_exc=err) |
|
1068 | 1076 | |
|
1069 | 1077 | try: |
|
1070 | 1078 | data = { |
|
1071 | 1079 | 'fork_parent_id': repo.repo_id, |
|
1072 | 1080 | |
|
1073 | 1081 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
1074 | 1082 | 'repo_name_full': schema_data['repo_name'], |
|
1075 | 1083 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
1076 | 1084 | 'repo_type': schema_data['repo_type'], |
|
1077 | 1085 | 'description': schema_data['repo_description'], |
|
1078 | 1086 | 'private': schema_data['repo_private'], |
|
1079 | 1087 | 'copy_permissions': schema_data['repo_copy_permissions'], |
|
1080 | 1088 | 'landing_rev': schema_data['repo_landing_commit_ref'], |
|
1081 | 1089 | } |
|
1082 | 1090 | |
|
1083 | 1091 | task = RepoModel().create_fork(data, cur_user=owner) |
|
1084 | 1092 | # no commit, it's done in RepoModel, or async via celery |
|
1085 | 1093 | task_id = get_task_id(task) |
|
1086 | 1094 | |
|
1087 | 1095 | return { |
|
1088 | 1096 | 'msg': 'Created fork of `%s` as `%s`' % ( |
|
1089 | 1097 | repo.repo_name, schema_data['repo_name']), |
|
1090 | 1098 | 'success': True, # cannot return the repo data here since fork |
|
1091 | 1099 | # can be done async |
|
1092 | 1100 | 'task': task_id |
|
1093 | 1101 | } |
|
1094 | 1102 | except Exception: |
|
1095 | 1103 | log.exception( |
|
1096 | 1104 | u"Exception while trying to create fork %s", |
|
1097 | 1105 | schema_data['repo_name']) |
|
1098 | 1106 | raise JSONRPCError( |
|
1099 | 1107 | 'failed to fork repository `%s` as `%s`' % ( |
|
1100 | 1108 | repo_name, schema_data['repo_name'])) |
|
1101 | 1109 | |
|
1102 | 1110 | |
|
1103 | 1111 | @jsonrpc_method() |
|
1104 | 1112 | def delete_repo(request, apiuser, repoid, forks=Optional('')): |
|
1105 | 1113 | """ |
|
1106 | 1114 | Deletes a repository. |
|
1107 | 1115 | |
|
1108 | 1116 | * When the `forks` parameter is set it's possible to detach or delete |
|
1109 | 1117 | forks of deleted repository. |
|
1110 | 1118 | |
|
1111 | 1119 | This command can only be run using an |authtoken| with admin |
|
1112 | 1120 | permissions on the |repo|. |
|
1113 | 1121 | |
|
1114 | 1122 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1115 | 1123 | :type apiuser: AuthUser |
|
1116 | 1124 | :param repoid: Set the repository name or repository ID. |
|
1117 | 1125 | :type repoid: str or int |
|
1118 | 1126 | :param forks: Set to `detach` or `delete` forks from the |repo|. |
|
1119 | 1127 | :type forks: Optional(str) |
|
1120 | 1128 | |
|
1121 | 1129 | Example error output: |
|
1122 | 1130 | |
|
1123 | 1131 | .. code-block:: bash |
|
1124 | 1132 | |
|
1125 | 1133 | id : <id_given_in_input> |
|
1126 | 1134 | result: { |
|
1127 | 1135 | "msg": "Deleted repository `<reponame>`", |
|
1128 | 1136 | "success": true |
|
1129 | 1137 | } |
|
1130 | 1138 | error: null |
|
1131 | 1139 | """ |
|
1132 | 1140 | |
|
1133 | 1141 | repo = get_repo_or_error(repoid) |
|
1134 | 1142 | repo_name = repo.repo_name |
|
1135 | 1143 | if not has_superadmin_permission(apiuser): |
|
1136 | 1144 | _perms = ('repository.admin',) |
|
1137 | 1145 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1138 | 1146 | |
|
1139 | 1147 | try: |
|
1140 | 1148 | handle_forks = Optional.extract(forks) |
|
1141 | 1149 | _forks_msg = '' |
|
1142 | 1150 | _forks = [f for f in repo.forks] |
|
1143 | 1151 | if handle_forks == 'detach': |
|
1144 | 1152 | _forks_msg = ' ' + 'Detached %s forks' % len(_forks) |
|
1145 | 1153 | elif handle_forks == 'delete': |
|
1146 | 1154 | _forks_msg = ' ' + 'Deleted %s forks' % len(_forks) |
|
1147 | 1155 | elif _forks: |
|
1148 | 1156 | raise JSONRPCError( |
|
1149 | 1157 | 'Cannot delete `%s` it still contains attached forks' % |
|
1150 | 1158 | (repo.repo_name,) |
|
1151 | 1159 | ) |
|
1152 | 1160 | old_data = repo.get_api_data() |
|
1153 | 1161 | RepoModel().delete(repo, forks=forks) |
|
1154 | 1162 | |
|
1155 | 1163 | repo = audit_logger.RepoWrap(repo_id=None, |
|
1156 | 1164 | repo_name=repo.repo_name) |
|
1157 | 1165 | |
|
1158 | 1166 | audit_logger.store_api( |
|
1159 | 1167 | 'repo.delete', action_data={'old_data': old_data}, |
|
1160 | 1168 | user=apiuser, repo=repo) |
|
1161 | 1169 | |
|
1162 | 1170 | ScmModel().mark_for_invalidation(repo_name, delete=True) |
|
1163 | 1171 | Session().commit() |
|
1164 | 1172 | return { |
|
1165 | 1173 | 'msg': 'Deleted repository `%s`%s' % (repo_name, _forks_msg), |
|
1166 | 1174 | 'success': True |
|
1167 | 1175 | } |
|
1168 | 1176 | except Exception: |
|
1169 | 1177 | log.exception("Exception occurred while trying to delete repo") |
|
1170 | 1178 | raise JSONRPCError( |
|
1171 | 1179 | 'failed to delete repository `%s`' % (repo_name,) |
|
1172 | 1180 | ) |
|
1173 | 1181 | |
|
1174 | 1182 | |
|
1175 | 1183 | #TODO: marcink, change name ? |
|
1176 | 1184 | @jsonrpc_method() |
|
1177 | 1185 | def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)): |
|
1178 | 1186 | """ |
|
1179 | 1187 | Invalidates the cache for the specified repository. |
|
1180 | 1188 | |
|
1181 | 1189 | This command can only be run using an |authtoken| with admin rights to |
|
1182 | 1190 | the specified repository. |
|
1183 | 1191 | |
|
1184 | 1192 | This command takes the following options: |
|
1185 | 1193 | |
|
1186 | 1194 | :param apiuser: This is filled automatically from |authtoken|. |
|
1187 | 1195 | :type apiuser: AuthUser |
|
1188 | 1196 | :param repoid: Sets the repository name or repository ID. |
|
1189 | 1197 | :type repoid: str or int |
|
1190 | 1198 | :param delete_keys: This deletes the invalidated keys instead of |
|
1191 | 1199 | just flagging them. |
|
1192 | 1200 | :type delete_keys: Optional(``True`` | ``False``) |
|
1193 | 1201 | |
|
1194 | 1202 | Example output: |
|
1195 | 1203 | |
|
1196 | 1204 | .. code-block:: bash |
|
1197 | 1205 | |
|
1198 | 1206 | id : <id_given_in_input> |
|
1199 | 1207 | result : { |
|
1200 | 1208 | 'msg': Cache for repository `<repository name>` was invalidated, |
|
1201 | 1209 | 'repository': <repository name> |
|
1202 | 1210 | } |
|
1203 | 1211 | error : null |
|
1204 | 1212 | |
|
1205 | 1213 | Example error output: |
|
1206 | 1214 | |
|
1207 | 1215 | .. code-block:: bash |
|
1208 | 1216 | |
|
1209 | 1217 | id : <id_given_in_input> |
|
1210 | 1218 | result : null |
|
1211 | 1219 | error : { |
|
1212 | 1220 | 'Error occurred during cache invalidation action' |
|
1213 | 1221 | } |
|
1214 | 1222 | |
|
1215 | 1223 | """ |
|
1216 | 1224 | |
|
1217 | 1225 | repo = get_repo_or_error(repoid) |
|
1218 | 1226 | if not has_superadmin_permission(apiuser): |
|
1219 | 1227 | _perms = ('repository.admin', 'repository.write',) |
|
1220 | 1228 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1221 | 1229 | |
|
1222 | 1230 | delete = Optional.extract(delete_keys) |
|
1223 | 1231 | try: |
|
1224 | 1232 | ScmModel().mark_for_invalidation(repo.repo_name, delete=delete) |
|
1225 | 1233 | return { |
|
1226 | 1234 | 'msg': 'Cache for repository `%s` was invalidated' % (repoid,), |
|
1227 | 1235 | 'repository': repo.repo_name |
|
1228 | 1236 | } |
|
1229 | 1237 | except Exception: |
|
1230 | 1238 | log.exception( |
|
1231 | 1239 | "Exception occurred while trying to invalidate repo cache") |
|
1232 | 1240 | raise JSONRPCError( |
|
1233 | 1241 | 'Error occurred during cache invalidation action' |
|
1234 | 1242 | ) |
|
1235 | 1243 | |
|
1236 | 1244 | |
|
1237 | 1245 | #TODO: marcink, change name ? |
|
1238 | 1246 | @jsonrpc_method() |
|
1239 | 1247 | def lock(request, apiuser, repoid, locked=Optional(None), |
|
1240 | 1248 | userid=Optional(OAttr('apiuser'))): |
|
1241 | 1249 | """ |
|
1242 | 1250 | Sets the lock state of the specified |repo| by the given user. |
|
1243 | 1251 | From more information, see :ref:`repo-locking`. |
|
1244 | 1252 | |
|
1245 | 1253 | * If the ``userid`` option is not set, the repository is locked to the |
|
1246 | 1254 | user who called the method. |
|
1247 | 1255 | * If the ``locked`` parameter is not set, the current lock state of the |
|
1248 | 1256 | repository is displayed. |
|
1249 | 1257 | |
|
1250 | 1258 | This command can only be run using an |authtoken| with admin rights to |
|
1251 | 1259 | the specified repository. |
|
1252 | 1260 | |
|
1253 | 1261 | This command takes the following options: |
|
1254 | 1262 | |
|
1255 | 1263 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1256 | 1264 | :type apiuser: AuthUser |
|
1257 | 1265 | :param repoid: Sets the repository name or repository ID. |
|
1258 | 1266 | :type repoid: str or int |
|
1259 | 1267 | :param locked: Sets the lock state. |
|
1260 | 1268 | :type locked: Optional(``True`` | ``False``) |
|
1261 | 1269 | :param userid: Set the repository lock to this user. |
|
1262 | 1270 | :type userid: Optional(str or int) |
|
1263 | 1271 | |
|
1264 | 1272 | Example error output: |
|
1265 | 1273 | |
|
1266 | 1274 | .. code-block:: bash |
|
1267 | 1275 | |
|
1268 | 1276 | id : <id_given_in_input> |
|
1269 | 1277 | result : { |
|
1270 | 1278 | 'repo': '<reponame>', |
|
1271 | 1279 | 'locked': <bool: lock state>, |
|
1272 | 1280 | 'locked_since': <int: lock timestamp>, |
|
1273 | 1281 | 'locked_by': <username of person who made the lock>, |
|
1274 | 1282 | 'lock_reason': <str: reason for locking>, |
|
1275 | 1283 | 'lock_state_changed': <bool: True if lock state has been changed in this request>, |
|
1276 | 1284 | 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.' |
|
1277 | 1285 | or |
|
1278 | 1286 | 'msg': 'Repo `<repository name>` not locked.' |
|
1279 | 1287 | or |
|
1280 | 1288 | 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`' |
|
1281 | 1289 | } |
|
1282 | 1290 | error : null |
|
1283 | 1291 | |
|
1284 | 1292 | Example error output: |
|
1285 | 1293 | |
|
1286 | 1294 | .. code-block:: bash |
|
1287 | 1295 | |
|
1288 | 1296 | id : <id_given_in_input> |
|
1289 | 1297 | result : null |
|
1290 | 1298 | error : { |
|
1291 | 1299 | 'Error occurred locking repository `<reponame>`' |
|
1292 | 1300 | } |
|
1293 | 1301 | """ |
|
1294 | 1302 | |
|
1295 | 1303 | repo = get_repo_or_error(repoid) |
|
1296 | 1304 | if not has_superadmin_permission(apiuser): |
|
1297 | 1305 | # check if we have at least write permission for this repo ! |
|
1298 | 1306 | _perms = ('repository.admin', 'repository.write',) |
|
1299 | 1307 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1300 | 1308 | |
|
1301 | 1309 | # make sure normal user does not pass someone else userid, |
|
1302 | 1310 | # he is not allowed to do that |
|
1303 | 1311 | if not isinstance(userid, Optional) and userid != apiuser.user_id: |
|
1304 | 1312 | raise JSONRPCError('userid is not the same as your user') |
|
1305 | 1313 | |
|
1306 | 1314 | if isinstance(userid, Optional): |
|
1307 | 1315 | userid = apiuser.user_id |
|
1308 | 1316 | |
|
1309 | 1317 | user = get_user_or_error(userid) |
|
1310 | 1318 | |
|
1311 | 1319 | if isinstance(locked, Optional): |
|
1312 | 1320 | lockobj = repo.locked |
|
1313 | 1321 | |
|
1314 | 1322 | if lockobj[0] is None: |
|
1315 | 1323 | _d = { |
|
1316 | 1324 | 'repo': repo.repo_name, |
|
1317 | 1325 | 'locked': False, |
|
1318 | 1326 | 'locked_since': None, |
|
1319 | 1327 | 'locked_by': None, |
|
1320 | 1328 | 'lock_reason': None, |
|
1321 | 1329 | 'lock_state_changed': False, |
|
1322 | 1330 | 'msg': 'Repo `%s` not locked.' % repo.repo_name |
|
1323 | 1331 | } |
|
1324 | 1332 | return _d |
|
1325 | 1333 | else: |
|
1326 | 1334 | _user_id, _time, _reason = lockobj |
|
1327 | 1335 | lock_user = get_user_or_error(userid) |
|
1328 | 1336 | _d = { |
|
1329 | 1337 | 'repo': repo.repo_name, |
|
1330 | 1338 | 'locked': True, |
|
1331 | 1339 | 'locked_since': _time, |
|
1332 | 1340 | 'locked_by': lock_user.username, |
|
1333 | 1341 | 'lock_reason': _reason, |
|
1334 | 1342 | 'lock_state_changed': False, |
|
1335 | 1343 | 'msg': ('Repo `%s` locked by `%s` on `%s`.' |
|
1336 | 1344 | % (repo.repo_name, lock_user.username, |
|
1337 | 1345 | json.dumps(time_to_datetime(_time)))) |
|
1338 | 1346 | } |
|
1339 | 1347 | return _d |
|
1340 | 1348 | |
|
1341 | 1349 | # force locked state through a flag |
|
1342 | 1350 | else: |
|
1343 | 1351 | locked = str2bool(locked) |
|
1344 | 1352 | lock_reason = Repository.LOCK_API |
|
1345 | 1353 | try: |
|
1346 | 1354 | if locked: |
|
1347 | 1355 | lock_time = time.time() |
|
1348 | 1356 | Repository.lock(repo, user.user_id, lock_time, lock_reason) |
|
1349 | 1357 | else: |
|
1350 | 1358 | lock_time = None |
|
1351 | 1359 | Repository.unlock(repo) |
|
1352 | 1360 | _d = { |
|
1353 | 1361 | 'repo': repo.repo_name, |
|
1354 | 1362 | 'locked': locked, |
|
1355 | 1363 | 'locked_since': lock_time, |
|
1356 | 1364 | 'locked_by': user.username, |
|
1357 | 1365 | 'lock_reason': lock_reason, |
|
1358 | 1366 | 'lock_state_changed': True, |
|
1359 | 1367 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
1360 | 1368 | % (user.username, repo.repo_name, locked)) |
|
1361 | 1369 | } |
|
1362 | 1370 | return _d |
|
1363 | 1371 | except Exception: |
|
1364 | 1372 | log.exception( |
|
1365 | 1373 | "Exception occurred while trying to lock repository") |
|
1366 | 1374 | raise JSONRPCError( |
|
1367 | 1375 | 'Error occurred locking repository `%s`' % repo.repo_name |
|
1368 | 1376 | ) |
|
1369 | 1377 | |
|
1370 | 1378 | |
|
1371 | 1379 | @jsonrpc_method() |
|
1372 | 1380 | def comment_commit( |
|
1373 | 1381 | request, apiuser, repoid, commit_id, message, status=Optional(None), |
|
1374 | 1382 | comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE), |
|
1375 | 1383 | resolves_comment_id=Optional(None), |
|
1376 | 1384 | userid=Optional(OAttr('apiuser'))): |
|
1377 | 1385 | """ |
|
1378 | 1386 | Set a commit comment, and optionally change the status of the commit. |
|
1379 | 1387 | |
|
1380 | 1388 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1381 | 1389 | :type apiuser: AuthUser |
|
1382 | 1390 | :param repoid: Set the repository name or repository ID. |
|
1383 | 1391 | :type repoid: str or int |
|
1384 | 1392 | :param commit_id: Specify the commit_id for which to set a comment. |
|
1385 | 1393 | :type commit_id: str |
|
1386 | 1394 | :param message: The comment text. |
|
1387 | 1395 | :type message: str |
|
1388 | 1396 | :param status: (**Optional**) status of commit, one of: 'not_reviewed', |
|
1389 | 1397 | 'approved', 'rejected', 'under_review' |
|
1390 | 1398 | :type status: str |
|
1391 | 1399 | :param comment_type: Comment type, one of: 'note', 'todo' |
|
1392 | 1400 | :type comment_type: Optional(str), default: 'note' |
|
1393 | 1401 | :param userid: Set the user name of the comment creator. |
|
1394 | 1402 | :type userid: Optional(str or int) |
|
1395 | 1403 | |
|
1396 | 1404 | Example error output: |
|
1397 | 1405 | |
|
1398 | 1406 | .. code-block:: bash |
|
1399 | 1407 | |
|
1400 | 1408 | { |
|
1401 | 1409 | "id" : <id_given_in_input>, |
|
1402 | 1410 | "result" : { |
|
1403 | 1411 | "msg": "Commented on commit `<commit_id>` for repository `<repoid>`", |
|
1404 | 1412 | "status_change": null or <status>, |
|
1405 | 1413 | "success": true |
|
1406 | 1414 | }, |
|
1407 | 1415 | "error" : null |
|
1408 | 1416 | } |
|
1409 | 1417 | |
|
1410 | 1418 | """ |
|
1411 | 1419 | repo = get_repo_or_error(repoid) |
|
1412 | 1420 | if not has_superadmin_permission(apiuser): |
|
1413 | 1421 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1414 | 1422 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1415 | 1423 | |
|
1416 | 1424 | try: |
|
1417 | 1425 | commit_id = repo.scm_instance().get_commit(commit_id=commit_id).raw_id |
|
1418 | 1426 | except Exception as e: |
|
1419 | 1427 | log.exception('Failed to fetch commit') |
|
1420 | 1428 | raise JSONRPCError(e.message) |
|
1421 | 1429 | |
|
1422 | 1430 | if isinstance(userid, Optional): |
|
1423 | 1431 | userid = apiuser.user_id |
|
1424 | 1432 | |
|
1425 | 1433 | user = get_user_or_error(userid) |
|
1426 | 1434 | status = Optional.extract(status) |
|
1427 | 1435 | comment_type = Optional.extract(comment_type) |
|
1428 | 1436 | resolves_comment_id = Optional.extract(resolves_comment_id) |
|
1429 | 1437 | |
|
1430 | 1438 | allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES] |
|
1431 | 1439 | if status and status not in allowed_statuses: |
|
1432 | 1440 | raise JSONRPCError('Bad status, must be on ' |
|
1433 | 1441 | 'of %s got %s' % (allowed_statuses, status,)) |
|
1434 | 1442 | |
|
1435 | 1443 | if resolves_comment_id: |
|
1436 | 1444 | comment = ChangesetComment.get(resolves_comment_id) |
|
1437 | 1445 | if not comment: |
|
1438 | 1446 | raise JSONRPCError( |
|
1439 | 1447 | 'Invalid resolves_comment_id `%s` for this commit.' |
|
1440 | 1448 | % resolves_comment_id) |
|
1441 | 1449 | if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO: |
|
1442 | 1450 | raise JSONRPCError( |
|
1443 | 1451 | 'Comment `%s` is wrong type for setting status to resolved.' |
|
1444 | 1452 | % resolves_comment_id) |
|
1445 | 1453 | |
|
1446 | 1454 | try: |
|
1447 | 1455 | rc_config = SettingsModel().get_all_settings() |
|
1448 | 1456 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') |
|
1449 | 1457 | status_change_label = ChangesetStatus.get_status_lbl(status) |
|
1450 | 1458 | comment = CommentsModel().create( |
|
1451 | 1459 | message, repo, user, commit_id=commit_id, |
|
1452 | 1460 | status_change=status_change_label, |
|
1453 | 1461 | status_change_type=status, |
|
1454 | 1462 | renderer=renderer, |
|
1455 | 1463 | comment_type=comment_type, |
|
1456 | 1464 | resolves_comment_id=resolves_comment_id |
|
1457 | 1465 | ) |
|
1458 | 1466 | if status: |
|
1459 | 1467 | # also do a status change |
|
1460 | 1468 | try: |
|
1461 | 1469 | ChangesetStatusModel().set_status( |
|
1462 | 1470 | repo, status, user, comment, revision=commit_id, |
|
1463 | 1471 | dont_allow_on_closed_pull_request=True |
|
1464 | 1472 | ) |
|
1465 | 1473 | except StatusChangeOnClosedPullRequestError: |
|
1466 | 1474 | log.exception( |
|
1467 | 1475 | "Exception occurred while trying to change repo commit status") |
|
1468 | 1476 | msg = ('Changing status on a changeset associated with ' |
|
1469 | 1477 | 'a closed pull request is not allowed') |
|
1470 | 1478 | raise JSONRPCError(msg) |
|
1471 | 1479 | |
|
1472 | 1480 | Session().commit() |
|
1473 | 1481 | return { |
|
1474 | 1482 | 'msg': ( |
|
1475 | 1483 | 'Commented on commit `%s` for repository `%s`' % ( |
|
1476 | 1484 | comment.revision, repo.repo_name)), |
|
1477 | 1485 | 'status_change': status, |
|
1478 | 1486 | 'success': True, |
|
1479 | 1487 | } |
|
1480 | 1488 | except JSONRPCError: |
|
1481 | 1489 | # catch any inside errors, and re-raise them to prevent from |
|
1482 | 1490 | # below global catch to silence them |
|
1483 | 1491 | raise |
|
1484 | 1492 | except Exception: |
|
1485 | 1493 | log.exception("Exception occurred while trying to comment on commit") |
|
1486 | 1494 | raise JSONRPCError( |
|
1487 | 1495 | 'failed to set comment on repository `%s`' % (repo.repo_name,) |
|
1488 | 1496 | ) |
|
1489 | 1497 | |
|
1490 | 1498 | |
|
1491 | 1499 | @jsonrpc_method() |
|
1492 | 1500 | def grant_user_permission(request, apiuser, repoid, userid, perm): |
|
1493 | 1501 | """ |
|
1494 | 1502 | Grant permissions for the specified user on the given repository, |
|
1495 | 1503 | or update existing permissions if found. |
|
1496 | 1504 | |
|
1497 | 1505 | This command can only be run using an |authtoken| with admin |
|
1498 | 1506 | permissions on the |repo|. |
|
1499 | 1507 | |
|
1500 | 1508 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1501 | 1509 | :type apiuser: AuthUser |
|
1502 | 1510 | :param repoid: Set the repository name or repository ID. |
|
1503 | 1511 | :type repoid: str or int |
|
1504 | 1512 | :param userid: Set the user name. |
|
1505 | 1513 | :type userid: str |
|
1506 | 1514 | :param perm: Set the user permissions, using the following format |
|
1507 | 1515 | ``(repository.(none|read|write|admin))`` |
|
1508 | 1516 | :type perm: str |
|
1509 | 1517 | |
|
1510 | 1518 | Example output: |
|
1511 | 1519 | |
|
1512 | 1520 | .. code-block:: bash |
|
1513 | 1521 | |
|
1514 | 1522 | id : <id_given_in_input> |
|
1515 | 1523 | result: { |
|
1516 | 1524 | "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", |
|
1517 | 1525 | "success": true |
|
1518 | 1526 | } |
|
1519 | 1527 | error: null |
|
1520 | 1528 | """ |
|
1521 | 1529 | |
|
1522 | 1530 | repo = get_repo_or_error(repoid) |
|
1523 | 1531 | user = get_user_or_error(userid) |
|
1524 | 1532 | perm = get_perm_or_error(perm) |
|
1525 | 1533 | if not has_superadmin_permission(apiuser): |
|
1526 | 1534 | _perms = ('repository.admin',) |
|
1527 | 1535 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1528 | 1536 | |
|
1529 | 1537 | try: |
|
1530 | 1538 | |
|
1531 | 1539 | RepoModel().grant_user_permission(repo=repo, user=user, perm=perm) |
|
1532 | 1540 | |
|
1533 | 1541 | Session().commit() |
|
1534 | 1542 | return { |
|
1535 | 1543 | 'msg': 'Granted perm: `%s` for user: `%s` in repo: `%s`' % ( |
|
1536 | 1544 | perm.permission_name, user.username, repo.repo_name |
|
1537 | 1545 | ), |
|
1538 | 1546 | 'success': True |
|
1539 | 1547 | } |
|
1540 | 1548 | except Exception: |
|
1541 | 1549 | log.exception( |
|
1542 | 1550 | "Exception occurred while trying edit permissions for repo") |
|
1543 | 1551 | raise JSONRPCError( |
|
1544 | 1552 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1545 | 1553 | userid, repoid |
|
1546 | 1554 | ) |
|
1547 | 1555 | ) |
|
1548 | 1556 | |
|
1549 | 1557 | |
|
1550 | 1558 | @jsonrpc_method() |
|
1551 | 1559 | def revoke_user_permission(request, apiuser, repoid, userid): |
|
1552 | 1560 | """ |
|
1553 | 1561 | Revoke permission for a user on the specified repository. |
|
1554 | 1562 | |
|
1555 | 1563 | This command can only be run using an |authtoken| with admin |
|
1556 | 1564 | permissions on the |repo|. |
|
1557 | 1565 | |
|
1558 | 1566 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1559 | 1567 | :type apiuser: AuthUser |
|
1560 | 1568 | :param repoid: Set the repository name or repository ID. |
|
1561 | 1569 | :type repoid: str or int |
|
1562 | 1570 | :param userid: Set the user name of revoked user. |
|
1563 | 1571 | :type userid: str or int |
|
1564 | 1572 | |
|
1565 | 1573 | Example error output: |
|
1566 | 1574 | |
|
1567 | 1575 | .. code-block:: bash |
|
1568 | 1576 | |
|
1569 | 1577 | id : <id_given_in_input> |
|
1570 | 1578 | result: { |
|
1571 | 1579 | "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", |
|
1572 | 1580 | "success": true |
|
1573 | 1581 | } |
|
1574 | 1582 | error: null |
|
1575 | 1583 | """ |
|
1576 | 1584 | |
|
1577 | 1585 | repo = get_repo_or_error(repoid) |
|
1578 | 1586 | user = get_user_or_error(userid) |
|
1579 | 1587 | if not has_superadmin_permission(apiuser): |
|
1580 | 1588 | _perms = ('repository.admin',) |
|
1581 | 1589 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1582 | 1590 | |
|
1583 | 1591 | try: |
|
1584 | 1592 | RepoModel().revoke_user_permission(repo=repo, user=user) |
|
1585 | 1593 | Session().commit() |
|
1586 | 1594 | return { |
|
1587 | 1595 | 'msg': 'Revoked perm for user: `%s` in repo: `%s`' % ( |
|
1588 | 1596 | user.username, repo.repo_name |
|
1589 | 1597 | ), |
|
1590 | 1598 | 'success': True |
|
1591 | 1599 | } |
|
1592 | 1600 | except Exception: |
|
1593 | 1601 | log.exception( |
|
1594 | 1602 | "Exception occurred while trying revoke permissions to repo") |
|
1595 | 1603 | raise JSONRPCError( |
|
1596 | 1604 | 'failed to edit permission for user: `%s` in repo: `%s`' % ( |
|
1597 | 1605 | userid, repoid |
|
1598 | 1606 | ) |
|
1599 | 1607 | ) |
|
1600 | 1608 | |
|
1601 | 1609 | |
|
1602 | 1610 | @jsonrpc_method() |
|
1603 | 1611 | def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm): |
|
1604 | 1612 | """ |
|
1605 | 1613 | Grant permission for a user group on the specified repository, |
|
1606 | 1614 | or update existing permissions. |
|
1607 | 1615 | |
|
1608 | 1616 | This command can only be run using an |authtoken| with admin |
|
1609 | 1617 | permissions on the |repo|. |
|
1610 | 1618 | |
|
1611 | 1619 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1612 | 1620 | :type apiuser: AuthUser |
|
1613 | 1621 | :param repoid: Set the repository name or repository ID. |
|
1614 | 1622 | :type repoid: str or int |
|
1615 | 1623 | :param usergroupid: Specify the ID of the user group. |
|
1616 | 1624 | :type usergroupid: str or int |
|
1617 | 1625 | :param perm: Set the user group permissions using the following |
|
1618 | 1626 | format: (repository.(none|read|write|admin)) |
|
1619 | 1627 | :type perm: str |
|
1620 | 1628 | |
|
1621 | 1629 | Example output: |
|
1622 | 1630 | |
|
1623 | 1631 | .. code-block:: bash |
|
1624 | 1632 | |
|
1625 | 1633 | id : <id_given_in_input> |
|
1626 | 1634 | result : { |
|
1627 | 1635 | "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", |
|
1628 | 1636 | "success": true |
|
1629 | 1637 | |
|
1630 | 1638 | } |
|
1631 | 1639 | error : null |
|
1632 | 1640 | |
|
1633 | 1641 | Example error output: |
|
1634 | 1642 | |
|
1635 | 1643 | .. code-block:: bash |
|
1636 | 1644 | |
|
1637 | 1645 | id : <id_given_in_input> |
|
1638 | 1646 | result : null |
|
1639 | 1647 | error : { |
|
1640 | 1648 | "failed to edit permission for user group: `<usergroup>` in repo `<repo>`' |
|
1641 | 1649 | } |
|
1642 | 1650 | |
|
1643 | 1651 | """ |
|
1644 | 1652 | |
|
1645 | 1653 | repo = get_repo_or_error(repoid) |
|
1646 | 1654 | perm = get_perm_or_error(perm) |
|
1647 | 1655 | if not has_superadmin_permission(apiuser): |
|
1648 | 1656 | _perms = ('repository.admin',) |
|
1649 | 1657 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1650 | 1658 | |
|
1651 | 1659 | user_group = get_user_group_or_error(usergroupid) |
|
1652 | 1660 | if not has_superadmin_permission(apiuser): |
|
1653 | 1661 | # check if we have at least read permission for this user group ! |
|
1654 | 1662 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
1655 | 1663 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
1656 | 1664 | user=apiuser, user_group_name=user_group.users_group_name): |
|
1657 | 1665 | raise JSONRPCError( |
|
1658 | 1666 | 'user group `%s` does not exist' % (usergroupid,)) |
|
1659 | 1667 | |
|
1660 | 1668 | try: |
|
1661 | 1669 | RepoModel().grant_user_group_permission( |
|
1662 | 1670 | repo=repo, group_name=user_group, perm=perm) |
|
1663 | 1671 | |
|
1664 | 1672 | Session().commit() |
|
1665 | 1673 | return { |
|
1666 | 1674 | 'msg': 'Granted perm: `%s` for user group: `%s` in ' |
|
1667 | 1675 | 'repo: `%s`' % ( |
|
1668 | 1676 | perm.permission_name, user_group.users_group_name, |
|
1669 | 1677 | repo.repo_name |
|
1670 | 1678 | ), |
|
1671 | 1679 | 'success': True |
|
1672 | 1680 | } |
|
1673 | 1681 | except Exception: |
|
1674 | 1682 | log.exception( |
|
1675 | 1683 | "Exception occurred while trying change permission on repo") |
|
1676 | 1684 | raise JSONRPCError( |
|
1677 | 1685 | 'failed to edit permission for user group: `%s` in ' |
|
1678 | 1686 | 'repo: `%s`' % ( |
|
1679 | 1687 | usergroupid, repo.repo_name |
|
1680 | 1688 | ) |
|
1681 | 1689 | ) |
|
1682 | 1690 | |
|
1683 | 1691 | |
|
1684 | 1692 | @jsonrpc_method() |
|
1685 | 1693 | def revoke_user_group_permission(request, apiuser, repoid, usergroupid): |
|
1686 | 1694 | """ |
|
1687 | 1695 | Revoke the permissions of a user group on a given repository. |
|
1688 | 1696 | |
|
1689 | 1697 | This command can only be run using an |authtoken| with admin |
|
1690 | 1698 | permissions on the |repo|. |
|
1691 | 1699 | |
|
1692 | 1700 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1693 | 1701 | :type apiuser: AuthUser |
|
1694 | 1702 | :param repoid: Set the repository name or repository ID. |
|
1695 | 1703 | :type repoid: str or int |
|
1696 | 1704 | :param usergroupid: Specify the user group ID. |
|
1697 | 1705 | :type usergroupid: str or int |
|
1698 | 1706 | |
|
1699 | 1707 | Example output: |
|
1700 | 1708 | |
|
1701 | 1709 | .. code-block:: bash |
|
1702 | 1710 | |
|
1703 | 1711 | id : <id_given_in_input> |
|
1704 | 1712 | result: { |
|
1705 | 1713 | "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", |
|
1706 | 1714 | "success": true |
|
1707 | 1715 | } |
|
1708 | 1716 | error: null |
|
1709 | 1717 | """ |
|
1710 | 1718 | |
|
1711 | 1719 | repo = get_repo_or_error(repoid) |
|
1712 | 1720 | if not has_superadmin_permission(apiuser): |
|
1713 | 1721 | _perms = ('repository.admin',) |
|
1714 | 1722 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1715 | 1723 | |
|
1716 | 1724 | user_group = get_user_group_or_error(usergroupid) |
|
1717 | 1725 | if not has_superadmin_permission(apiuser): |
|
1718 | 1726 | # check if we have at least read permission for this user group ! |
|
1719 | 1727 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
1720 | 1728 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
1721 | 1729 | user=apiuser, user_group_name=user_group.users_group_name): |
|
1722 | 1730 | raise JSONRPCError( |
|
1723 | 1731 | 'user group `%s` does not exist' % (usergroupid,)) |
|
1724 | 1732 | |
|
1725 | 1733 | try: |
|
1726 | 1734 | RepoModel().revoke_user_group_permission( |
|
1727 | 1735 | repo=repo, group_name=user_group) |
|
1728 | 1736 | |
|
1729 | 1737 | Session().commit() |
|
1730 | 1738 | return { |
|
1731 | 1739 | 'msg': 'Revoked perm for user group: `%s` in repo: `%s`' % ( |
|
1732 | 1740 | user_group.users_group_name, repo.repo_name |
|
1733 | 1741 | ), |
|
1734 | 1742 | 'success': True |
|
1735 | 1743 | } |
|
1736 | 1744 | except Exception: |
|
1737 | 1745 | log.exception("Exception occurred while trying revoke " |
|
1738 | 1746 | "user group permission on repo") |
|
1739 | 1747 | raise JSONRPCError( |
|
1740 | 1748 | 'failed to edit permission for user group: `%s` in ' |
|
1741 | 1749 | 'repo: `%s`' % ( |
|
1742 | 1750 | user_group.users_group_name, repo.repo_name |
|
1743 | 1751 | ) |
|
1744 | 1752 | ) |
|
1745 | 1753 | |
|
1746 | 1754 | |
|
1747 | 1755 | @jsonrpc_method() |
|
1748 | 1756 | def pull(request, apiuser, repoid): |
|
1749 | 1757 | """ |
|
1750 | 1758 | Triggers a pull on the given repository from a remote location. You |
|
1751 | 1759 | can use this to keep remote repositories up-to-date. |
|
1752 | 1760 | |
|
1753 | 1761 | This command can only be run using an |authtoken| with admin |
|
1754 | 1762 | rights to the specified repository. For more information, |
|
1755 | 1763 | see :ref:`config-token-ref`. |
|
1756 | 1764 | |
|
1757 | 1765 | This command takes the following options: |
|
1758 | 1766 | |
|
1759 | 1767 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1760 | 1768 | :type apiuser: AuthUser |
|
1761 | 1769 | :param repoid: The repository name or repository ID. |
|
1762 | 1770 | :type repoid: str or int |
|
1763 | 1771 | |
|
1764 | 1772 | Example output: |
|
1765 | 1773 | |
|
1766 | 1774 | .. code-block:: bash |
|
1767 | 1775 | |
|
1768 | 1776 | id : <id_given_in_input> |
|
1769 | 1777 | result : { |
|
1770 | 1778 | "msg": "Pulled from `<repository name>`" |
|
1771 | 1779 | "repository": "<repository name>" |
|
1772 | 1780 | } |
|
1773 | 1781 | error : null |
|
1774 | 1782 | |
|
1775 | 1783 | Example error output: |
|
1776 | 1784 | |
|
1777 | 1785 | .. code-block:: bash |
|
1778 | 1786 | |
|
1779 | 1787 | id : <id_given_in_input> |
|
1780 | 1788 | result : null |
|
1781 | 1789 | error : { |
|
1782 | 1790 | "Unable to pull changes from `<reponame>`" |
|
1783 | 1791 | } |
|
1784 | 1792 | |
|
1785 | 1793 | """ |
|
1786 | 1794 | |
|
1787 | 1795 | repo = get_repo_or_error(repoid) |
|
1788 | 1796 | if not has_superadmin_permission(apiuser): |
|
1789 | 1797 | _perms = ('repository.admin',) |
|
1790 | 1798 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1791 | 1799 | |
|
1792 | 1800 | try: |
|
1793 | 1801 | ScmModel().pull_changes(repo.repo_name, apiuser.username) |
|
1794 | 1802 | return { |
|
1795 | 1803 | 'msg': 'Pulled from `%s`' % repo.repo_name, |
|
1796 | 1804 | 'repository': repo.repo_name |
|
1797 | 1805 | } |
|
1798 | 1806 | except Exception: |
|
1799 | 1807 | log.exception("Exception occurred while trying to " |
|
1800 | 1808 | "pull changes from remote location") |
|
1801 | 1809 | raise JSONRPCError( |
|
1802 | 1810 | 'Unable to pull changes from `%s`' % repo.repo_name |
|
1803 | 1811 | ) |
|
1804 | 1812 | |
|
1805 | 1813 | |
|
1806 | 1814 | @jsonrpc_method() |
|
1807 | 1815 | def strip(request, apiuser, repoid, revision, branch): |
|
1808 | 1816 | """ |
|
1809 | 1817 | Strips the given revision from the specified repository. |
|
1810 | 1818 | |
|
1811 | 1819 | * This will remove the revision and all of its decendants. |
|
1812 | 1820 | |
|
1813 | 1821 | This command can only be run using an |authtoken| with admin rights to |
|
1814 | 1822 | the specified repository. |
|
1815 | 1823 | |
|
1816 | 1824 | This command takes the following options: |
|
1817 | 1825 | |
|
1818 | 1826 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1819 | 1827 | :type apiuser: AuthUser |
|
1820 | 1828 | :param repoid: The repository name or repository ID. |
|
1821 | 1829 | :type repoid: str or int |
|
1822 | 1830 | :param revision: The revision you wish to strip. |
|
1823 | 1831 | :type revision: str |
|
1824 | 1832 | :param branch: The branch from which to strip the revision. |
|
1825 | 1833 | :type branch: str |
|
1826 | 1834 | |
|
1827 | 1835 | Example output: |
|
1828 | 1836 | |
|
1829 | 1837 | .. code-block:: bash |
|
1830 | 1838 | |
|
1831 | 1839 | id : <id_given_in_input> |
|
1832 | 1840 | result : { |
|
1833 | 1841 | "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'" |
|
1834 | 1842 | "repository": "<repository name>" |
|
1835 | 1843 | } |
|
1836 | 1844 | error : null |
|
1837 | 1845 | |
|
1838 | 1846 | Example error output: |
|
1839 | 1847 | |
|
1840 | 1848 | .. code-block:: bash |
|
1841 | 1849 | |
|
1842 | 1850 | id : <id_given_in_input> |
|
1843 | 1851 | result : null |
|
1844 | 1852 | error : { |
|
1845 | 1853 | "Unable to strip commit <commit_hash> from repo `<repository name>`" |
|
1846 | 1854 | } |
|
1847 | 1855 | |
|
1848 | 1856 | """ |
|
1849 | 1857 | |
|
1850 | 1858 | repo = get_repo_or_error(repoid) |
|
1851 | 1859 | if not has_superadmin_permission(apiuser): |
|
1852 | 1860 | _perms = ('repository.admin',) |
|
1853 | 1861 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1854 | 1862 | |
|
1855 | 1863 | try: |
|
1856 | 1864 | ScmModel().strip(repo, revision, branch) |
|
1857 | 1865 | audit_logger.store_api( |
|
1858 | 1866 | 'repo.commit.strip', action_data={'commit_id': revision}, |
|
1859 | 1867 | repo=repo, |
|
1860 | 1868 | user=apiuser, commit=True) |
|
1861 | 1869 | |
|
1862 | 1870 | return { |
|
1863 | 1871 | 'msg': 'Stripped commit %s from repo `%s`' % ( |
|
1864 | 1872 | revision, repo.repo_name), |
|
1865 | 1873 | 'repository': repo.repo_name |
|
1866 | 1874 | } |
|
1867 | 1875 | except Exception: |
|
1868 | 1876 | log.exception("Exception while trying to strip") |
|
1869 | 1877 | raise JSONRPCError( |
|
1870 | 1878 | 'Unable to strip commit %s from repo `%s`' % ( |
|
1871 | 1879 | revision, repo.repo_name) |
|
1872 | 1880 | ) |
|
1873 | 1881 | |
|
1874 | 1882 | |
|
1875 | 1883 | @jsonrpc_method() |
|
1876 | 1884 | def get_repo_settings(request, apiuser, repoid, key=Optional(None)): |
|
1877 | 1885 | """ |
|
1878 | 1886 | Returns all settings for a repository. If key is given it only returns the |
|
1879 | 1887 | setting identified by the key or null. |
|
1880 | 1888 | |
|
1881 | 1889 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1882 | 1890 | :type apiuser: AuthUser |
|
1883 | 1891 | :param repoid: The repository name or repository id. |
|
1884 | 1892 | :type repoid: str or int |
|
1885 | 1893 | :param key: Key of the setting to return. |
|
1886 | 1894 | :type: key: Optional(str) |
|
1887 | 1895 | |
|
1888 | 1896 | Example output: |
|
1889 | 1897 | |
|
1890 | 1898 | .. code-block:: bash |
|
1891 | 1899 | |
|
1892 | 1900 | { |
|
1893 | 1901 | "error": null, |
|
1894 | 1902 | "id": 237, |
|
1895 | 1903 | "result": { |
|
1896 | 1904 | "extensions_largefiles": true, |
|
1897 | 1905 | "extensions_evolve": true, |
|
1898 | 1906 | "hooks_changegroup_push_logger": true, |
|
1899 | 1907 | "hooks_changegroup_repo_size": false, |
|
1900 | 1908 | "hooks_outgoing_pull_logger": true, |
|
1901 | 1909 | "phases_publish": "True", |
|
1902 | 1910 | "rhodecode_hg_use_rebase_for_merging": true, |
|
1903 | 1911 | "rhodecode_pr_merge_enabled": true, |
|
1904 | 1912 | "rhodecode_use_outdated_comments": true |
|
1905 | 1913 | } |
|
1906 | 1914 | } |
|
1907 | 1915 | """ |
|
1908 | 1916 | |
|
1909 | 1917 | # Restrict access to this api method to admins only. |
|
1910 | 1918 | if not has_superadmin_permission(apiuser): |
|
1911 | 1919 | raise JSONRPCForbidden() |
|
1912 | 1920 | |
|
1913 | 1921 | try: |
|
1914 | 1922 | repo = get_repo_or_error(repoid) |
|
1915 | 1923 | settings_model = VcsSettingsModel(repo=repo) |
|
1916 | 1924 | settings = settings_model.get_global_settings() |
|
1917 | 1925 | settings.update(settings_model.get_repo_settings()) |
|
1918 | 1926 | |
|
1919 | 1927 | # If only a single setting is requested fetch it from all settings. |
|
1920 | 1928 | key = Optional.extract(key) |
|
1921 | 1929 | if key is not None: |
|
1922 | 1930 | settings = settings.get(key, None) |
|
1923 | 1931 | except Exception: |
|
1924 | 1932 | msg = 'Failed to fetch settings for repository `{}`'.format(repoid) |
|
1925 | 1933 | log.exception(msg) |
|
1926 | 1934 | raise JSONRPCError(msg) |
|
1927 | 1935 | |
|
1928 | 1936 | return settings |
|
1929 | 1937 | |
|
1930 | 1938 | |
|
1931 | 1939 | @jsonrpc_method() |
|
1932 | 1940 | def set_repo_settings(request, apiuser, repoid, settings): |
|
1933 | 1941 | """ |
|
1934 | 1942 | Update repository settings. Returns true on success. |
|
1935 | 1943 | |
|
1936 | 1944 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1937 | 1945 | :type apiuser: AuthUser |
|
1938 | 1946 | :param repoid: The repository name or repository id. |
|
1939 | 1947 | :type repoid: str or int |
|
1940 | 1948 | :param settings: The new settings for the repository. |
|
1941 | 1949 | :type: settings: dict |
|
1942 | 1950 | |
|
1943 | 1951 | Example output: |
|
1944 | 1952 | |
|
1945 | 1953 | .. code-block:: bash |
|
1946 | 1954 | |
|
1947 | 1955 | { |
|
1948 | 1956 | "error": null, |
|
1949 | 1957 | "id": 237, |
|
1950 | 1958 | "result": true |
|
1951 | 1959 | } |
|
1952 | 1960 | """ |
|
1953 | 1961 | # Restrict access to this api method to admins only. |
|
1954 | 1962 | if not has_superadmin_permission(apiuser): |
|
1955 | 1963 | raise JSONRPCForbidden() |
|
1956 | 1964 | |
|
1957 | 1965 | if type(settings) is not dict: |
|
1958 | 1966 | raise JSONRPCError('Settings have to be a JSON Object.') |
|
1959 | 1967 | |
|
1960 | 1968 | try: |
|
1961 | 1969 | settings_model = VcsSettingsModel(repo=repoid) |
|
1962 | 1970 | |
|
1963 | 1971 | # Merge global, repo and incoming settings. |
|
1964 | 1972 | new_settings = settings_model.get_global_settings() |
|
1965 | 1973 | new_settings.update(settings_model.get_repo_settings()) |
|
1966 | 1974 | new_settings.update(settings) |
|
1967 | 1975 | |
|
1968 | 1976 | # Update the settings. |
|
1969 | 1977 | inherit_global_settings = new_settings.get( |
|
1970 | 1978 | 'inherit_global_settings', False) |
|
1971 | 1979 | settings_model.create_or_update_repo_settings( |
|
1972 | 1980 | new_settings, inherit_global_settings=inherit_global_settings) |
|
1973 | 1981 | Session().commit() |
|
1974 | 1982 | except Exception: |
|
1975 | 1983 | msg = 'Failed to update settings for repository `{}`'.format(repoid) |
|
1976 | 1984 | log.exception(msg) |
|
1977 | 1985 | raise JSONRPCError(msg) |
|
1978 | 1986 | |
|
1979 | 1987 | # Indicate success. |
|
1980 | 1988 | return True |
|
1981 | 1989 | |
|
1982 | 1990 | |
|
1983 | 1991 | @jsonrpc_method() |
|
1984 | 1992 | def maintenance(request, apiuser, repoid): |
|
1985 | 1993 | """ |
|
1986 | 1994 | Triggers a maintenance on the given repository. |
|
1987 | 1995 | |
|
1988 | 1996 | This command can only be run using an |authtoken| with admin |
|
1989 | 1997 | rights to the specified repository. For more information, |
|
1990 | 1998 | see :ref:`config-token-ref`. |
|
1991 | 1999 | |
|
1992 | 2000 | This command takes the following options: |
|
1993 | 2001 | |
|
1994 | 2002 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1995 | 2003 | :type apiuser: AuthUser |
|
1996 | 2004 | :param repoid: The repository name or repository ID. |
|
1997 | 2005 | :type repoid: str or int |
|
1998 | 2006 | |
|
1999 | 2007 | Example output: |
|
2000 | 2008 | |
|
2001 | 2009 | .. code-block:: bash |
|
2002 | 2010 | |
|
2003 | 2011 | id : <id_given_in_input> |
|
2004 | 2012 | result : { |
|
2005 | 2013 | "msg": "executed maintenance command", |
|
2006 | 2014 | "executed_actions": [ |
|
2007 | 2015 | <action_message>, <action_message2>... |
|
2008 | 2016 | ], |
|
2009 | 2017 | "repository": "<repository name>" |
|
2010 | 2018 | } |
|
2011 | 2019 | error : null |
|
2012 | 2020 | |
|
2013 | 2021 | Example error output: |
|
2014 | 2022 | |
|
2015 | 2023 | .. code-block:: bash |
|
2016 | 2024 | |
|
2017 | 2025 | id : <id_given_in_input> |
|
2018 | 2026 | result : null |
|
2019 | 2027 | error : { |
|
2020 | 2028 | "Unable to execute maintenance on `<reponame>`" |
|
2021 | 2029 | } |
|
2022 | 2030 | |
|
2023 | 2031 | """ |
|
2024 | 2032 | |
|
2025 | 2033 | repo = get_repo_or_error(repoid) |
|
2026 | 2034 | if not has_superadmin_permission(apiuser): |
|
2027 | 2035 | _perms = ('repository.admin',) |
|
2028 | 2036 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2029 | 2037 | |
|
2030 | 2038 | try: |
|
2031 | 2039 | maintenance = repo_maintenance.RepoMaintenance() |
|
2032 | 2040 | executed_actions = maintenance.execute(repo) |
|
2033 | 2041 | |
|
2034 | 2042 | return { |
|
2035 | 2043 | 'msg': 'executed maintenance command', |
|
2036 | 2044 | 'executed_actions': executed_actions, |
|
2037 | 2045 | 'repository': repo.repo_name |
|
2038 | 2046 | } |
|
2039 | 2047 | except Exception: |
|
2040 | 2048 | log.exception("Exception occurred while trying to run maintenance") |
|
2041 | 2049 | raise JSONRPCError( |
|
2042 | 2050 | 'Unable to execute maintenance on `%s`' % repo.repo_name) |
@@ -1,455 +1,458 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | from rhodecode.apps._base import add_route_with_slash |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | def includeme(config): |
|
24 | 24 | |
|
25 | 25 | # repo creating checks, special cases that aren't repo routes |
|
26 | 26 | config.add_route( |
|
27 | 27 | name='repo_creating', |
|
28 | 28 | pattern='/{repo_name:.*?[^/]}/repo_creating') |
|
29 | 29 | |
|
30 | 30 | config.add_route( |
|
31 | 31 | name='repo_creating_check', |
|
32 | 32 | pattern='/{repo_name:.*?[^/]}/repo_creating_check') |
|
33 | 33 | |
|
34 | 34 | # Summary |
|
35 | 35 | # NOTE(marcink): one additional route is defined in very bottom, catch |
|
36 | 36 | # all pattern |
|
37 | 37 | config.add_route( |
|
38 | 38 | name='repo_summary_explicit', |
|
39 | 39 | pattern='/{repo_name:.*?[^/]}/summary', repo_route=True) |
|
40 | 40 | config.add_route( |
|
41 | 41 | name='repo_summary_commits', |
|
42 | 42 | pattern='/{repo_name:.*?[^/]}/summary-commits', repo_route=True) |
|
43 | 43 | |
|
44 | 44 | # Commits |
|
45 | 45 | config.add_route( |
|
46 | 46 | name='repo_commit', |
|
47 | 47 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}', repo_route=True) |
|
48 | 48 | |
|
49 | 49 | config.add_route( |
|
50 | 50 | name='repo_commit_children', |
|
51 | 51 | pattern='/{repo_name:.*?[^/]}/changeset_children/{commit_id}', repo_route=True) |
|
52 | 52 | |
|
53 | 53 | config.add_route( |
|
54 | 54 | name='repo_commit_parents', |
|
55 | 55 | pattern='/{repo_name:.*?[^/]}/changeset_parents/{commit_id}', repo_route=True) |
|
56 | 56 | |
|
57 | 57 | config.add_route( |
|
58 | 58 | name='repo_commit_raw', |
|
59 | 59 | pattern='/{repo_name:.*?[^/]}/changeset-diff/{commit_id}', repo_route=True) |
|
60 | 60 | |
|
61 | 61 | config.add_route( |
|
62 | 62 | name='repo_commit_patch', |
|
63 | 63 | pattern='/{repo_name:.*?[^/]}/changeset-patch/{commit_id}', repo_route=True) |
|
64 | 64 | |
|
65 | 65 | config.add_route( |
|
66 | 66 | name='repo_commit_download', |
|
67 | 67 | pattern='/{repo_name:.*?[^/]}/changeset-download/{commit_id}', repo_route=True) |
|
68 | 68 | |
|
69 | 69 | config.add_route( |
|
70 | 70 | name='repo_commit_data', |
|
71 | 71 | pattern='/{repo_name:.*?[^/]}/changeset-data/{commit_id}', repo_route=True) |
|
72 | 72 | |
|
73 | 73 | config.add_route( |
|
74 | 74 | name='repo_commit_comment_create', |
|
75 | 75 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/create', repo_route=True) |
|
76 | 76 | |
|
77 | 77 | config.add_route( |
|
78 | 78 | name='repo_commit_comment_preview', |
|
79 | 79 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/preview', repo_route=True) |
|
80 | 80 | |
|
81 | 81 | config.add_route( |
|
82 | 82 | name='repo_commit_comment_delete', |
|
83 | 83 | pattern='/{repo_name:.*?[^/]}/changeset/{commit_id}/comment/{comment_id}/delete', repo_route=True) |
|
84 | 84 | |
|
85 | 85 | # still working url for backward compat. |
|
86 | 86 | config.add_route( |
|
87 | 87 | name='repo_commit_raw_deprecated', |
|
88 | 88 | pattern='/{repo_name:.*?[^/]}/raw-changeset/{commit_id}', repo_route=True) |
|
89 | 89 | |
|
90 | 90 | # Files |
|
91 | 91 | config.add_route( |
|
92 | 92 | name='repo_archivefile', |
|
93 | 93 | pattern='/{repo_name:.*?[^/]}/archive/{fname}', repo_route=True) |
|
94 | 94 | |
|
95 | 95 | config.add_route( |
|
96 | 96 | name='repo_files_diff', |
|
97 | 97 | pattern='/{repo_name:.*?[^/]}/diff/{f_path:.*}', repo_route=True) |
|
98 | 98 | config.add_route( # legacy route to make old links work |
|
99 | 99 | name='repo_files_diff_2way_redirect', |
|
100 | 100 | pattern='/{repo_name:.*?[^/]}/diff-2way/{f_path:.*}', repo_route=True) |
|
101 | 101 | |
|
102 | 102 | config.add_route( |
|
103 | 103 | name='repo_files', |
|
104 | 104 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/{f_path:.*}', repo_route=True) |
|
105 | 105 | config.add_route( |
|
106 | 106 | name='repo_files:default_path', |
|
107 | 107 | pattern='/{repo_name:.*?[^/]}/files/{commit_id}/', repo_route=True) |
|
108 | 108 | config.add_route( |
|
109 | 109 | name='repo_files:default_commit', |
|
110 | 110 | pattern='/{repo_name:.*?[^/]}/files', repo_route=True) |
|
111 | 111 | |
|
112 | 112 | config.add_route( |
|
113 | 113 | name='repo_files:rendered', |
|
114 | 114 | pattern='/{repo_name:.*?[^/]}/render/{commit_id}/{f_path:.*}', repo_route=True) |
|
115 | 115 | |
|
116 | 116 | config.add_route( |
|
117 | 117 | name='repo_files:annotated', |
|
118 | 118 | pattern='/{repo_name:.*?[^/]}/annotate/{commit_id}/{f_path:.*}', repo_route=True) |
|
119 | 119 | config.add_route( |
|
120 | 120 | name='repo_files:annotated_previous', |
|
121 | 121 | pattern='/{repo_name:.*?[^/]}/annotate-previous/{commit_id}/{f_path:.*}', repo_route=True) |
|
122 | 122 | |
|
123 | 123 | config.add_route( |
|
124 | 124 | name='repo_nodetree_full', |
|
125 | 125 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/{f_path:.*}', repo_route=True) |
|
126 | 126 | config.add_route( |
|
127 | 127 | name='repo_nodetree_full:default_path', |
|
128 | 128 | pattern='/{repo_name:.*?[^/]}/nodetree_full/{commit_id}/', repo_route=True) |
|
129 | 129 | |
|
130 | 130 | config.add_route( |
|
131 | 131 | name='repo_files_nodelist', |
|
132 | 132 | pattern='/{repo_name:.*?[^/]}/nodelist/{commit_id}/{f_path:.*}', repo_route=True) |
|
133 | 133 | |
|
134 | 134 | config.add_route( |
|
135 | 135 | name='repo_file_raw', |
|
136 | 136 | pattern='/{repo_name:.*?[^/]}/raw/{commit_id}/{f_path:.*}', repo_route=True) |
|
137 | 137 | |
|
138 | 138 | config.add_route( |
|
139 | 139 | name='repo_file_download', |
|
140 | 140 | pattern='/{repo_name:.*?[^/]}/download/{commit_id}/{f_path:.*}', repo_route=True) |
|
141 | 141 | config.add_route( # backward compat to keep old links working |
|
142 | 142 | name='repo_file_download:legacy', |
|
143 | 143 | pattern='/{repo_name:.*?[^/]}/rawfile/{commit_id}/{f_path:.*}', |
|
144 | 144 | repo_route=True) |
|
145 | 145 | |
|
146 | 146 | config.add_route( |
|
147 | 147 | name='repo_file_history', |
|
148 | 148 | pattern='/{repo_name:.*?[^/]}/history/{commit_id}/{f_path:.*}', repo_route=True) |
|
149 | 149 | |
|
150 | 150 | config.add_route( |
|
151 | 151 | name='repo_file_authors', |
|
152 | 152 | pattern='/{repo_name:.*?[^/]}/authors/{commit_id}/{f_path:.*}', repo_route=True) |
|
153 | 153 | |
|
154 | 154 | config.add_route( |
|
155 | 155 | name='repo_files_remove_file', |
|
156 | 156 | pattern='/{repo_name:.*?[^/]}/remove_file/{commit_id}/{f_path:.*}', |
|
157 | 157 | repo_route=True) |
|
158 | 158 | config.add_route( |
|
159 | 159 | name='repo_files_delete_file', |
|
160 | 160 | pattern='/{repo_name:.*?[^/]}/delete_file/{commit_id}/{f_path:.*}', |
|
161 | 161 | repo_route=True) |
|
162 | 162 | config.add_route( |
|
163 | 163 | name='repo_files_edit_file', |
|
164 | 164 | pattern='/{repo_name:.*?[^/]}/edit_file/{commit_id}/{f_path:.*}', |
|
165 | 165 | repo_route=True) |
|
166 | 166 | config.add_route( |
|
167 | 167 | name='repo_files_update_file', |
|
168 | 168 | pattern='/{repo_name:.*?[^/]}/update_file/{commit_id}/{f_path:.*}', |
|
169 | 169 | repo_route=True) |
|
170 | 170 | config.add_route( |
|
171 | 171 | name='repo_files_add_file', |
|
172 | 172 | pattern='/{repo_name:.*?[^/]}/add_file/{commit_id}/{f_path:.*}', |
|
173 | 173 | repo_route=True) |
|
174 | 174 | config.add_route( |
|
175 | 175 | name='repo_files_create_file', |
|
176 | 176 | pattern='/{repo_name:.*?[^/]}/create_file/{commit_id}/{f_path:.*}', |
|
177 | 177 | repo_route=True) |
|
178 | 178 | |
|
179 | 179 | # Refs data |
|
180 | 180 | config.add_route( |
|
181 | 181 | name='repo_refs_data', |
|
182 | 182 | pattern='/{repo_name:.*?[^/]}/refs-data', repo_route=True) |
|
183 | 183 | |
|
184 | 184 | config.add_route( |
|
185 | 185 | name='repo_refs_changelog_data', |
|
186 | 186 | pattern='/{repo_name:.*?[^/]}/refs-data-changelog', repo_route=True) |
|
187 | 187 | |
|
188 | 188 | config.add_route( |
|
189 | 189 | name='repo_stats', |
|
190 | 190 | pattern='/{repo_name:.*?[^/]}/repo_stats/{commit_id}', repo_route=True) |
|
191 | 191 | |
|
192 | 192 | # Changelog |
|
193 | 193 | config.add_route( |
|
194 | 194 | name='repo_changelog', |
|
195 | 195 | pattern='/{repo_name:.*?[^/]}/changelog', repo_route=True) |
|
196 | 196 | config.add_route( |
|
197 | 197 | name='repo_changelog_file', |
|
198 | 198 | pattern='/{repo_name:.*?[^/]}/changelog/{commit_id}/{f_path:.*}', repo_route=True) |
|
199 | 199 | config.add_route( |
|
200 | 200 | name='repo_changelog_elements', |
|
201 | 201 | pattern='/{repo_name:.*?[^/]}/changelog_elements', repo_route=True) |
|
202 | 202 | config.add_route( |
|
203 | 203 | name='repo_changelog_elements_file', |
|
204 | 204 | pattern='/{repo_name:.*?[^/]}/changelog_elements/{commit_id}/{f_path:.*}', repo_route=True) |
|
205 | 205 | |
|
206 | 206 | # Compare |
|
207 | 207 | config.add_route( |
|
208 | 208 | name='repo_compare_select', |
|
209 | 209 | pattern='/{repo_name:.*?[^/]}/compare', repo_route=True) |
|
210 | 210 | |
|
211 | 211 | config.add_route( |
|
212 | 212 | name='repo_compare', |
|
213 | 213 | pattern='/{repo_name:.*?[^/]}/compare/{source_ref_type}@{source_ref:.*?}...{target_ref_type}@{target_ref:.*?}', repo_route=True) |
|
214 | 214 | |
|
215 | 215 | # Tags |
|
216 | 216 | config.add_route( |
|
217 | 217 | name='tags_home', |
|
218 | 218 | pattern='/{repo_name:.*?[^/]}/tags', repo_route=True) |
|
219 | 219 | |
|
220 | 220 | # Branches |
|
221 | 221 | config.add_route( |
|
222 | 222 | name='branches_home', |
|
223 | 223 | pattern='/{repo_name:.*?[^/]}/branches', repo_route=True) |
|
224 | 224 | |
|
225 | 225 | # Bookmarks |
|
226 | 226 | config.add_route( |
|
227 | 227 | name='bookmarks_home', |
|
228 | 228 | pattern='/{repo_name:.*?[^/]}/bookmarks', repo_route=True) |
|
229 | 229 | |
|
230 | 230 | # Forks |
|
231 | 231 | config.add_route( |
|
232 | 232 | name='repo_fork_new', |
|
233 | 233 | pattern='/{repo_name:.*?[^/]}/fork', repo_route=True, |
|
234 | 234 | repo_accepted_types=['hg', 'git']) |
|
235 | 235 | |
|
236 | 236 | config.add_route( |
|
237 | 237 | name='repo_fork_create', |
|
238 | 238 | pattern='/{repo_name:.*?[^/]}/fork/create', repo_route=True, |
|
239 | 239 | repo_accepted_types=['hg', 'git']) |
|
240 | 240 | |
|
241 | 241 | config.add_route( |
|
242 | 242 | name='repo_forks_show_all', |
|
243 | 243 | pattern='/{repo_name:.*?[^/]}/forks', repo_route=True, |
|
244 | 244 | repo_accepted_types=['hg', 'git']) |
|
245 | 245 | config.add_route( |
|
246 | 246 | name='repo_forks_data', |
|
247 | 247 | pattern='/{repo_name:.*?[^/]}/forks/data', repo_route=True, |
|
248 | 248 | repo_accepted_types=['hg', 'git']) |
|
249 | 249 | |
|
250 | 250 | # Pull Requests |
|
251 | 251 | config.add_route( |
|
252 | 252 | name='pullrequest_show', |
|
253 | 253 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}', |
|
254 | 254 | repo_route=True) |
|
255 | 255 | |
|
256 | 256 | config.add_route( |
|
257 | 257 | name='pullrequest_show_all', |
|
258 | 258 | pattern='/{repo_name:.*?[^/]}/pull-request', |
|
259 | 259 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
260 | 260 | |
|
261 | 261 | config.add_route( |
|
262 | 262 | name='pullrequest_show_all_data', |
|
263 | 263 | pattern='/{repo_name:.*?[^/]}/pull-request-data', |
|
264 | 264 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
265 | 265 | |
|
266 | 266 | config.add_route( |
|
267 | 267 | name='pullrequest_repo_refs', |
|
268 | 268 | pattern='/{repo_name:.*?[^/]}/pull-request/refs/{target_repo_name:.*?[^/]}', |
|
269 | 269 | repo_route=True) |
|
270 | 270 | |
|
271 | 271 | config.add_route( |
|
272 | 272 | name='pullrequest_repo_destinations', |
|
273 | 273 | pattern='/{repo_name:.*?[^/]}/pull-request/repo-destinations', |
|
274 | 274 | repo_route=True) |
|
275 | 275 | |
|
276 | 276 | config.add_route( |
|
277 | 277 | name='pullrequest_new', |
|
278 | 278 | pattern='/{repo_name:.*?[^/]}/pull-request/new', |
|
279 | 279 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
280 | 280 | |
|
281 | 281 | config.add_route( |
|
282 | 282 | name='pullrequest_create', |
|
283 | 283 | pattern='/{repo_name:.*?[^/]}/pull-request/create', |
|
284 | 284 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
285 | 285 | |
|
286 | 286 | config.add_route( |
|
287 | 287 | name='pullrequest_update', |
|
288 | 288 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/update', |
|
289 | 289 | repo_route=True) |
|
290 | 290 | |
|
291 | 291 | config.add_route( |
|
292 | 292 | name='pullrequest_merge', |
|
293 | 293 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/merge', |
|
294 | 294 | repo_route=True) |
|
295 | 295 | |
|
296 | 296 | config.add_route( |
|
297 | 297 | name='pullrequest_delete', |
|
298 | 298 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/delete', |
|
299 | 299 | repo_route=True) |
|
300 | 300 | |
|
301 | 301 | config.add_route( |
|
302 | 302 | name='pullrequest_comment_create', |
|
303 | 303 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment', |
|
304 | 304 | repo_route=True) |
|
305 | 305 | |
|
306 | 306 | config.add_route( |
|
307 | 307 | name='pullrequest_comment_delete', |
|
308 | 308 | pattern='/{repo_name:.*?[^/]}/pull-request/{pull_request_id:\d+}/comment/{comment_id}/delete', |
|
309 | 309 | repo_route=True, repo_accepted_types=['hg', 'git']) |
|
310 | 310 | |
|
311 | 311 | # Settings |
|
312 | 312 | config.add_route( |
|
313 | 313 | name='edit_repo', |
|
314 | 314 | pattern='/{repo_name:.*?[^/]}/settings', repo_route=True) |
|
315 | 315 | # update is POST on edit_repo |
|
316 | 316 | |
|
317 | 317 | # Settings advanced |
|
318 | 318 | config.add_route( |
|
319 | 319 | name='edit_repo_advanced', |
|
320 | 320 | pattern='/{repo_name:.*?[^/]}/settings/advanced', repo_route=True) |
|
321 | 321 | config.add_route( |
|
322 | 322 | name='edit_repo_advanced_delete', |
|
323 | 323 | pattern='/{repo_name:.*?[^/]}/settings/advanced/delete', repo_route=True) |
|
324 | 324 | config.add_route( |
|
325 | 325 | name='edit_repo_advanced_locking', |
|
326 | 326 | pattern='/{repo_name:.*?[^/]}/settings/advanced/locking', repo_route=True) |
|
327 | 327 | config.add_route( |
|
328 | 328 | name='edit_repo_advanced_journal', |
|
329 | 329 | pattern='/{repo_name:.*?[^/]}/settings/advanced/journal', repo_route=True) |
|
330 | 330 | config.add_route( |
|
331 | 331 | name='edit_repo_advanced_fork', |
|
332 | 332 | pattern='/{repo_name:.*?[^/]}/settings/advanced/fork', repo_route=True) |
|
333 | 333 | |
|
334 | 334 | # Caches |
|
335 | 335 | config.add_route( |
|
336 | 336 | name='edit_repo_caches', |
|
337 | 337 | pattern='/{repo_name:.*?[^/]}/settings/caches', repo_route=True) |
|
338 | 338 | |
|
339 | 339 | # Permissions |
|
340 | 340 | config.add_route( |
|
341 | 341 | name='edit_repo_perms', |
|
342 | 342 | pattern='/{repo_name:.*?[^/]}/settings/permissions', repo_route=True) |
|
343 | 343 | |
|
344 | 344 | # Maintenance |
|
345 | 345 | config.add_route( |
|
346 | 346 | name='edit_repo_maintenance', |
|
347 | 347 | pattern='/{repo_name:.*?[^/]}/settings/maintenance', repo_route=True) |
|
348 | 348 | |
|
349 | 349 | config.add_route( |
|
350 | 350 | name='edit_repo_maintenance_execute', |
|
351 | 351 | pattern='/{repo_name:.*?[^/]}/settings/maintenance/execute', repo_route=True) |
|
352 | 352 | |
|
353 | 353 | # Fields |
|
354 | 354 | config.add_route( |
|
355 | 355 | name='edit_repo_fields', |
|
356 | 356 | pattern='/{repo_name:.*?[^/]}/settings/fields', repo_route=True) |
|
357 | 357 | config.add_route( |
|
358 | 358 | name='edit_repo_fields_create', |
|
359 | 359 | pattern='/{repo_name:.*?[^/]}/settings/fields/create', repo_route=True) |
|
360 | 360 | config.add_route( |
|
361 | 361 | name='edit_repo_fields_delete', |
|
362 | 362 | pattern='/{repo_name:.*?[^/]}/settings/fields/{field_id}/delete', repo_route=True) |
|
363 | 363 | |
|
364 | 364 | # Locking |
|
365 | 365 | config.add_route( |
|
366 | 366 | name='repo_edit_toggle_locking', |
|
367 | 367 | pattern='/{repo_name:.*?[^/]}/settings/toggle_locking', repo_route=True) |
|
368 | 368 | |
|
369 | 369 | # Remote |
|
370 | 370 | config.add_route( |
|
371 | 371 | name='edit_repo_remote', |
|
372 | 372 | pattern='/{repo_name:.*?[^/]}/settings/remote', repo_route=True) |
|
373 | 373 | config.add_route( |
|
374 | 374 | name='edit_repo_remote_pull', |
|
375 | 375 | pattern='/{repo_name:.*?[^/]}/settings/remote/pull', repo_route=True) |
|
376 | config.add_route( | |
|
377 | name='edit_repo_remote_push', | |
|
378 | pattern='/{repo_name:.*?[^/]}/settings/remote/push', repo_route=True) | |
|
376 | 379 | |
|
377 | 380 | # Statistics |
|
378 | 381 | config.add_route( |
|
379 | 382 | name='edit_repo_statistics', |
|
380 | 383 | pattern='/{repo_name:.*?[^/]}/settings/statistics', repo_route=True) |
|
381 | 384 | config.add_route( |
|
382 | 385 | name='edit_repo_statistics_reset', |
|
383 | 386 | pattern='/{repo_name:.*?[^/]}/settings/statistics/update', repo_route=True) |
|
384 | 387 | |
|
385 | 388 | # Issue trackers |
|
386 | 389 | config.add_route( |
|
387 | 390 | name='edit_repo_issuetracker', |
|
388 | 391 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers', repo_route=True) |
|
389 | 392 | config.add_route( |
|
390 | 393 | name='edit_repo_issuetracker_test', |
|
391 | 394 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/test', repo_route=True) |
|
392 | 395 | config.add_route( |
|
393 | 396 | name='edit_repo_issuetracker_delete', |
|
394 | 397 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/delete', repo_route=True) |
|
395 | 398 | config.add_route( |
|
396 | 399 | name='edit_repo_issuetracker_update', |
|
397 | 400 | pattern='/{repo_name:.*?[^/]}/settings/issue_trackers/update', repo_route=True) |
|
398 | 401 | |
|
399 | 402 | # VCS Settings |
|
400 | 403 | config.add_route( |
|
401 | 404 | name='edit_repo_vcs', |
|
402 | 405 | pattern='/{repo_name:.*?[^/]}/settings/vcs', repo_route=True) |
|
403 | 406 | config.add_route( |
|
404 | 407 | name='edit_repo_vcs_update', |
|
405 | 408 | pattern='/{repo_name:.*?[^/]}/settings/vcs/update', repo_route=True) |
|
406 | 409 | |
|
407 | 410 | # svn pattern |
|
408 | 411 | config.add_route( |
|
409 | 412 | name='edit_repo_vcs_svn_pattern_delete', |
|
410 | 413 | pattern='/{repo_name:.*?[^/]}/settings/vcs/svn_pattern/delete', repo_route=True) |
|
411 | 414 | |
|
412 | 415 | # Repo Review Rules (EE feature) |
|
413 | 416 | config.add_route( |
|
414 | 417 | name='repo_reviewers', |
|
415 | 418 | pattern='/{repo_name:.*?[^/]}/settings/review/rules', repo_route=True) |
|
416 | 419 | |
|
417 | 420 | config.add_route( |
|
418 | 421 | name='repo_default_reviewers_data', |
|
419 | 422 | pattern='/{repo_name:.*?[^/]}/settings/review/default-reviewers', repo_route=True) |
|
420 | 423 | |
|
421 | 424 | # Strip |
|
422 | 425 | config.add_route( |
|
423 | 426 | name='edit_repo_strip', |
|
424 | 427 | pattern='/{repo_name:.*?[^/]}/settings/strip', repo_route=True) |
|
425 | 428 | |
|
426 | 429 | config.add_route( |
|
427 | 430 | name='strip_check', |
|
428 | 431 | pattern='/{repo_name:.*?[^/]}/settings/strip_check', repo_route=True) |
|
429 | 432 | |
|
430 | 433 | config.add_route( |
|
431 | 434 | name='strip_execute', |
|
432 | 435 | pattern='/{repo_name:.*?[^/]}/settings/strip_execute', repo_route=True) |
|
433 | 436 | |
|
434 | 437 | # Audit logs |
|
435 | 438 | config.add_route( |
|
436 | 439 | name='edit_repo_audit_logs', |
|
437 | 440 | pattern='/{repo_name:.*?[^/]}/settings/audit_logs', repo_route=True) |
|
438 | 441 | |
|
439 | 442 | # ATOM/RSS Feed |
|
440 | 443 | config.add_route( |
|
441 | 444 | name='rss_feed_home', |
|
442 | 445 | pattern='/{repo_name:.*?[^/]}/feed/rss', repo_route=True) |
|
443 | 446 | |
|
444 | 447 | config.add_route( |
|
445 | 448 | name='atom_feed_home', |
|
446 | 449 | pattern='/{repo_name:.*?[^/]}/feed/atom', repo_route=True) |
|
447 | 450 | |
|
448 | 451 | # NOTE(marcink): needs to be at the end for catch-all |
|
449 | 452 | add_route_with_slash( |
|
450 | 453 | config, |
|
451 | 454 | name='repo_summary', |
|
452 | 455 | pattern='/{repo_name:.*?[^/]}', repo_route=True) |
|
453 | 456 | |
|
454 | 457 | # Scan module for configuration decorators. |
|
455 | 458 | config.scan('.views', ignore='.tests') |
@@ -1,252 +1,256 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2011-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | import deform |
|
24 | 24 | from pyramid.httpexceptions import HTTPFound |
|
25 | 25 | from pyramid.view import view_config |
|
26 | 26 | |
|
27 | 27 | from rhodecode.apps._base import RepoAppView |
|
28 | 28 | from rhodecode.forms import RcForm |
|
29 | 29 | from rhodecode.lib import helpers as h |
|
30 | 30 | from rhodecode.lib import audit_logger |
|
31 | 31 | from rhodecode.lib.auth import ( |
|
32 | 32 | LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired) |
|
33 | 33 | from rhodecode.model.db import RepositoryField, RepoGroup, Repository |
|
34 | 34 | from rhodecode.model.meta import Session |
|
35 | 35 | from rhodecode.model.repo import RepoModel |
|
36 | 36 | from rhodecode.model.scm import RepoGroupList, ScmModel |
|
37 | 37 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
38 | 38 | |
|
39 | 39 | log = logging.getLogger(__name__) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | class RepoSettingsView(RepoAppView): |
|
43 | 43 | |
|
44 | 44 | def load_default_context(self): |
|
45 | 45 | c = self._get_local_tmpl_context() |
|
46 | 46 | |
|
47 | 47 | acl_groups = RepoGroupList( |
|
48 | 48 | RepoGroup.query().all(), |
|
49 | 49 | perm_set=['group.write', 'group.admin']) |
|
50 | 50 | c.repo_groups = RepoGroup.groups_choices(groups=acl_groups) |
|
51 | 51 | c.repo_groups_choices = map(lambda k: k[0], c.repo_groups) |
|
52 | 52 | |
|
53 | 53 | # in case someone no longer have a group.write access to a repository |
|
54 | 54 | # pre fill the list with this entry, we don't care if this is the same |
|
55 | 55 | # but it will allow saving repo data properly. |
|
56 | 56 | repo_group = self.db_repo.group |
|
57 | 57 | if repo_group and repo_group.group_id not in c.repo_groups_choices: |
|
58 | 58 | c.repo_groups_choices.append(repo_group.group_id) |
|
59 | 59 | c.repo_groups.append(RepoGroup._generate_choice(repo_group)) |
|
60 | 60 | |
|
61 | 61 | if c.repository_requirements_missing or self.rhodecode_vcs_repo is None: |
|
62 | 62 | # we might be in missing requirement state, so we load things |
|
63 | 63 | # without touching scm_instance() |
|
64 | 64 | c.landing_revs_choices, c.landing_revs = \ |
|
65 | 65 | ScmModel().get_repo_landing_revs(self.request.translate) |
|
66 | 66 | else: |
|
67 | 67 | c.landing_revs_choices, c.landing_revs = \ |
|
68 | 68 | ScmModel().get_repo_landing_revs( |
|
69 | 69 | self.request.translate, self.db_repo) |
|
70 | 70 | |
|
71 | 71 | c.personal_repo_group = c.auth_user.personal_repo_group |
|
72 | 72 | c.repo_fields = RepositoryField.query()\ |
|
73 | 73 | .filter(RepositoryField.repository == self.db_repo).all() |
|
74 | 74 | |
|
75 | 75 | |
|
76 | 76 | return c |
|
77 | 77 | |
|
78 | 78 | def _get_schema(self, c, old_values=None): |
|
79 | 79 | return repo_schema.RepoSettingsSchema().bind( |
|
80 | 80 | repo_type=self.db_repo.repo_type, |
|
81 | 81 | repo_type_options=[self.db_repo.repo_type], |
|
82 | 82 | repo_ref_options=c.landing_revs_choices, |
|
83 | 83 | repo_ref_items=c.landing_revs, |
|
84 | 84 | repo_repo_group_options=c.repo_groups_choices, |
|
85 | 85 | repo_repo_group_items=c.repo_groups, |
|
86 | 86 | # user caller |
|
87 | 87 | user=self._rhodecode_user, |
|
88 | 88 | old_values=old_values |
|
89 | 89 | ) |
|
90 | 90 | |
|
91 | 91 | @LoginRequired() |
|
92 | 92 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
93 | 93 | @view_config( |
|
94 | 94 | route_name='edit_repo', request_method='GET', |
|
95 | 95 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
96 | 96 | def edit_settings(self): |
|
97 | 97 | c = self.load_default_context() |
|
98 | 98 | c.active = 'settings' |
|
99 | 99 | |
|
100 | 100 | defaults = RepoModel()._get_defaults(self.db_repo_name) |
|
101 | 101 | defaults['repo_owner'] = defaults['user'] |
|
102 | 102 | defaults['repo_landing_commit_ref'] = defaults['repo_landing_rev'] |
|
103 | 103 | |
|
104 | 104 | schema = self._get_schema(c) |
|
105 | 105 | c.form = RcForm(schema, appstruct=defaults) |
|
106 | 106 | return self._get_template_context(c) |
|
107 | 107 | |
|
108 | 108 | @LoginRequired() |
|
109 | 109 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
110 | 110 | @CSRFRequired() |
|
111 | 111 | @view_config( |
|
112 | 112 | route_name='edit_repo', request_method='POST', |
|
113 | 113 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
114 | 114 | def edit_settings_update(self): |
|
115 | 115 | _ = self.request.translate |
|
116 | 116 | c = self.load_default_context() |
|
117 | 117 | c.active = 'settings' |
|
118 | 118 | old_repo_name = self.db_repo_name |
|
119 | 119 | |
|
120 | 120 | old_values = self.db_repo.get_api_data() |
|
121 | 121 | schema = self._get_schema(c, old_values=old_values) |
|
122 | 122 | |
|
123 | 123 | c.form = RcForm(schema) |
|
124 | 124 | pstruct = self.request.POST.items() |
|
125 | 125 | pstruct.append(('repo_type', self.db_repo.repo_type)) |
|
126 | 126 | try: |
|
127 | 127 | schema_data = c.form.validate(pstruct) |
|
128 | 128 | except deform.ValidationFailure as err_form: |
|
129 | 129 | return self._get_template_context(c) |
|
130 | 130 | |
|
131 | 131 | # data is now VALID, proceed with updates |
|
132 | 132 | # save validated data back into the updates dict |
|
133 | 133 | validated_updates = dict( |
|
134 | 134 | repo_name=schema_data['repo_group']['repo_name_without_group'], |
|
135 | 135 | repo_group=schema_data['repo_group']['repo_group_id'], |
|
136 | 136 | |
|
137 | 137 | user=schema_data['repo_owner'], |
|
138 | 138 | repo_description=schema_data['repo_description'], |
|
139 | 139 | repo_private=schema_data['repo_private'], |
|
140 | 140 | clone_uri=schema_data['repo_clone_uri'], |
|
141 | push_uri=schema_data['repo_push_uri'], | |
|
141 | 142 | repo_landing_rev=schema_data['repo_landing_commit_ref'], |
|
142 | 143 | repo_enable_statistics=schema_data['repo_enable_statistics'], |
|
143 | 144 | repo_enable_locking=schema_data['repo_enable_locking'], |
|
144 | 145 | repo_enable_downloads=schema_data['repo_enable_downloads'], |
|
145 | 146 | ) |
|
146 |
# detect if C |
|
|
147 | # detect if SYNC URI changed, if we get OLD means we keep old values | |
|
147 | 148 | if schema_data['repo_clone_uri_change'] == 'OLD': |
|
148 | 149 | validated_updates['clone_uri'] = self.db_repo.clone_uri |
|
149 | 150 | |
|
151 | if schema_data['repo_push_uri_change'] == 'OLD': | |
|
152 | validated_updates['push_uri'] = self.db_repo.push_uri | |
|
153 | ||
|
150 | 154 | # use the new full name for redirect |
|
151 | 155 | new_repo_name = schema_data['repo_group']['repo_name_with_group'] |
|
152 | 156 | |
|
153 | 157 | # save extra fields into our validated data |
|
154 | 158 | for key, value in pstruct: |
|
155 | 159 | if key.startswith(RepositoryField.PREFIX): |
|
156 | 160 | validated_updates[key] = value |
|
157 | 161 | |
|
158 | 162 | try: |
|
159 | 163 | RepoModel().update(self.db_repo, **validated_updates) |
|
160 | 164 | ScmModel().mark_for_invalidation(new_repo_name) |
|
161 | 165 | |
|
162 | 166 | audit_logger.store_web( |
|
163 | 167 | 'repo.edit', action_data={'old_data': old_values}, |
|
164 | 168 | user=self._rhodecode_user, repo=self.db_repo) |
|
165 | 169 | |
|
166 | 170 | Session().commit() |
|
167 | 171 | |
|
168 | 172 | h.flash(_('Repository `{}` updated successfully').format( |
|
169 | 173 | old_repo_name), category='success') |
|
170 | 174 | except Exception: |
|
171 | 175 | log.exception("Exception during update of repository") |
|
172 | 176 | h.flash(_('Error occurred during update of repository {}').format( |
|
173 | 177 | old_repo_name), category='error') |
|
174 | 178 | |
|
175 | 179 | raise HTTPFound( |
|
176 | 180 | h.route_path('edit_repo', repo_name=new_repo_name)) |
|
177 | 181 | |
|
178 | 182 | @LoginRequired() |
|
179 | 183 | @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin') |
|
180 | 184 | @view_config( |
|
181 | 185 | route_name='repo_edit_toggle_locking', request_method='GET', |
|
182 | 186 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
183 | 187 | def toggle_locking(self): |
|
184 | 188 | """ |
|
185 | 189 | Toggle locking of repository by simple GET call to url |
|
186 | 190 | """ |
|
187 | 191 | _ = self.request.translate |
|
188 | 192 | repo = self.db_repo |
|
189 | 193 | |
|
190 | 194 | try: |
|
191 | 195 | if repo.enable_locking: |
|
192 | 196 | if repo.locked[0]: |
|
193 | 197 | Repository.unlock(repo) |
|
194 | 198 | action = _('Unlocked') |
|
195 | 199 | else: |
|
196 | 200 | Repository.lock( |
|
197 | 201 | repo, self._rhodecode_user.user_id, |
|
198 | 202 | lock_reason=Repository.LOCK_WEB) |
|
199 | 203 | action = _('Locked') |
|
200 | 204 | |
|
201 | 205 | h.flash(_('Repository has been %s') % action, |
|
202 | 206 | category='success') |
|
203 | 207 | except Exception: |
|
204 | 208 | log.exception("Exception during unlocking") |
|
205 | 209 | h.flash(_('An error occurred during unlocking'), |
|
206 | 210 | category='error') |
|
207 | 211 | raise HTTPFound( |
|
208 | 212 | h.route_path('repo_summary', repo_name=self.db_repo_name)) |
|
209 | 213 | |
|
210 | 214 | @LoginRequired() |
|
211 | 215 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
212 | 216 | @view_config( |
|
213 | 217 | route_name='edit_repo_statistics', request_method='GET', |
|
214 | 218 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
215 | 219 | def edit_statistics_form(self): |
|
216 | 220 | c = self.load_default_context() |
|
217 | 221 | |
|
218 | 222 | if self.db_repo.stats: |
|
219 | 223 | # this is on what revision we ended up so we add +1 for count |
|
220 | 224 | last_rev = self.db_repo.stats.stat_on_revision + 1 |
|
221 | 225 | else: |
|
222 | 226 | last_rev = 0 |
|
223 | 227 | |
|
224 | 228 | c.active = 'statistics' |
|
225 | 229 | c.stats_revision = last_rev |
|
226 | 230 | c.repo_last_rev = self.rhodecode_vcs_repo.count() |
|
227 | 231 | |
|
228 | 232 | if last_rev == 0 or c.repo_last_rev == 0: |
|
229 | 233 | c.stats_percentage = 0 |
|
230 | 234 | else: |
|
231 | 235 | c.stats_percentage = '%.2f' % ( |
|
232 | 236 | (float((last_rev)) / c.repo_last_rev) * 100) |
|
233 | 237 | return self._get_template_context(c) |
|
234 | 238 | |
|
235 | 239 | @LoginRequired() |
|
236 | 240 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
237 | 241 | @CSRFRequired() |
|
238 | 242 | @view_config( |
|
239 | 243 | route_name='edit_repo_statistics_reset', request_method='POST', |
|
240 | 244 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
241 | 245 | def repo_statistics_reset(self): |
|
242 | 246 | _ = self.request.translate |
|
243 | 247 | |
|
244 | 248 | try: |
|
245 | 249 | RepoModel().delete_stats(self.db_repo_name) |
|
246 | 250 | Session().commit() |
|
247 | 251 | except Exception: |
|
248 | 252 | log.exception('Edit statistics failure') |
|
249 | 253 | h.flash(_('An error occurred during deletion of repository stats'), |
|
250 | 254 | category='error') |
|
251 | 255 | raise HTTPFound( |
|
252 | 256 | h.route_path('edit_repo_statistics', repo_name=self.db_repo_name)) |
@@ -1,72 +1,70 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2017-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import logging |
|
22 | 22 | |
|
23 | 23 | from pyramid.httpexceptions import HTTPFound |
|
24 | 24 | from pyramid.view import view_config |
|
25 | 25 | |
|
26 | 26 | from rhodecode.apps._base import RepoAppView |
|
27 | 27 | from rhodecode.lib import helpers as h |
|
28 | 28 | from rhodecode.lib.auth import ( |
|
29 | 29 | LoginRequired, CSRFRequired, HasRepoPermissionAnyDecorator) |
|
30 | 30 | from rhodecode.model.scm import ScmModel |
|
31 | 31 | |
|
32 | 32 | log = logging.getLogger(__name__) |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | class RepoSettingsRemoteView(RepoAppView): |
|
36 | 36 | def load_default_context(self): |
|
37 | 37 | c = self._get_local_tmpl_context() |
|
38 | ||
|
39 | ||
|
40 | 38 | return c |
|
41 | 39 | |
|
42 | 40 | @LoginRequired() |
|
43 | 41 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
44 | 42 | @view_config( |
|
45 | 43 | route_name='edit_repo_remote', request_method='GET', |
|
46 | 44 | renderer='rhodecode:templates/admin/repos/repo_edit.mako') |
|
47 | 45 | def repo_remote_edit_form(self): |
|
48 | 46 | c = self.load_default_context() |
|
49 | 47 | c.active = 'remote' |
|
50 | 48 | |
|
51 | 49 | return self._get_template_context(c) |
|
52 | 50 | |
|
53 | 51 | @LoginRequired() |
|
54 | 52 | @HasRepoPermissionAnyDecorator('repository.admin') |
|
55 | 53 | @CSRFRequired() |
|
56 | 54 | @view_config( |
|
57 | 55 | route_name='edit_repo_remote_pull', request_method='POST', |
|
58 | 56 | renderer=None) |
|
59 | 57 | def repo_remote_pull_changes(self): |
|
60 | 58 | _ = self.request.translate |
|
61 | 59 | self.load_default_context() |
|
62 | 60 | |
|
63 | 61 | try: |
|
64 | 62 | ScmModel().pull_changes( |
|
65 | 63 | self.db_repo_name, self._rhodecode_user.username) |
|
66 | 64 | h.flash(_('Pulled from remote location'), category='success') |
|
67 | 65 | except Exception: |
|
68 | 66 | log.exception("Exception during pull from remote") |
|
69 | 67 | h.flash(_('An error occurred during pull from remote location'), |
|
70 | 68 | category='error') |
|
71 | 69 | raise HTTPFound( |
|
72 | 70 | h.route_path('edit_repo_remote', repo_name=self.db_repo_name)) |
@@ -1,1032 +1,1036 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2010-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import os |
|
22 | 22 | import re |
|
23 | 23 | import shutil |
|
24 | 24 | import time |
|
25 | 25 | import logging |
|
26 | 26 | import traceback |
|
27 | 27 | import datetime |
|
28 | 28 | |
|
29 | 29 | from pyramid.threadlocal import get_current_request |
|
30 | 30 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
31 | 31 | |
|
32 | 32 | from rhodecode import events |
|
33 | 33 | from rhodecode.lib.auth import HasUserGroupPermissionAny |
|
34 | 34 | from rhodecode.lib.caching_query import FromCache |
|
35 | 35 | from rhodecode.lib.exceptions import AttachedForksError |
|
36 | 36 | from rhodecode.lib.hooks_base import log_delete_repository |
|
37 | 37 | from rhodecode.lib.user_log_filter import user_log_filter |
|
38 | 38 | from rhodecode.lib.utils import make_db_config |
|
39 | 39 | from rhodecode.lib.utils2 import ( |
|
40 | 40 | safe_str, safe_unicode, remove_prefix, obfuscate_url_pw, |
|
41 | 41 | get_current_rhodecode_user, safe_int, datetime_to_time, |
|
42 | 42 | action_logger_generic) |
|
43 | 43 | from rhodecode.lib.vcs.backends import get_backend |
|
44 | 44 | from rhodecode.model import BaseModel |
|
45 | 45 | from rhodecode.model.db import ( |
|
46 | 46 | _hash_key, joinedload, or_, Repository, UserRepoToPerm, UserGroupRepoToPerm, |
|
47 | 47 | UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission, |
|
48 | 48 | Statistics, UserGroup, RepoGroup, RepositoryField, UserLog) |
|
49 | 49 | |
|
50 | 50 | from rhodecode.model.settings import VcsSettingsModel |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | log = logging.getLogger(__name__) |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | class RepoModel(BaseModel): |
|
57 | 57 | |
|
58 | 58 | cls = Repository |
|
59 | 59 | |
|
60 | 60 | def _get_user_group(self, users_group): |
|
61 | 61 | return self._get_instance(UserGroup, users_group, |
|
62 | 62 | callback=UserGroup.get_by_group_name) |
|
63 | 63 | |
|
64 | 64 | def _get_repo_group(self, repo_group): |
|
65 | 65 | return self._get_instance(RepoGroup, repo_group, |
|
66 | 66 | callback=RepoGroup.get_by_group_name) |
|
67 | 67 | |
|
68 | 68 | def _create_default_perms(self, repository, private): |
|
69 | 69 | # create default permission |
|
70 | 70 | default = 'repository.read' |
|
71 | 71 | def_user = User.get_default_user() |
|
72 | 72 | for p in def_user.user_perms: |
|
73 | 73 | if p.permission.permission_name.startswith('repository.'): |
|
74 | 74 | default = p.permission.permission_name |
|
75 | 75 | break |
|
76 | 76 | |
|
77 | 77 | default_perm = 'repository.none' if private else default |
|
78 | 78 | |
|
79 | 79 | repo_to_perm = UserRepoToPerm() |
|
80 | 80 | repo_to_perm.permission = Permission.get_by_key(default_perm) |
|
81 | 81 | |
|
82 | 82 | repo_to_perm.repository = repository |
|
83 | 83 | repo_to_perm.user_id = def_user.user_id |
|
84 | 84 | |
|
85 | 85 | return repo_to_perm |
|
86 | 86 | |
|
87 | 87 | @LazyProperty |
|
88 | 88 | def repos_path(self): |
|
89 | 89 | """ |
|
90 | 90 | Gets the repositories root path from database |
|
91 | 91 | """ |
|
92 | 92 | settings_model = VcsSettingsModel(sa=self.sa) |
|
93 | 93 | return settings_model.get_repos_location() |
|
94 | 94 | |
|
95 | 95 | def get(self, repo_id, cache=False): |
|
96 | 96 | repo = self.sa.query(Repository) \ |
|
97 | 97 | .filter(Repository.repo_id == repo_id) |
|
98 | 98 | |
|
99 | 99 | if cache: |
|
100 | 100 | repo = repo.options( |
|
101 | 101 | FromCache("sql_cache_short", "get_repo_%s" % repo_id)) |
|
102 | 102 | return repo.scalar() |
|
103 | 103 | |
|
104 | 104 | def get_repo(self, repository): |
|
105 | 105 | return self._get_repo(repository) |
|
106 | 106 | |
|
107 | 107 | def get_by_repo_name(self, repo_name, cache=False): |
|
108 | 108 | repo = self.sa.query(Repository) \ |
|
109 | 109 | .filter(Repository.repo_name == repo_name) |
|
110 | 110 | |
|
111 | 111 | if cache: |
|
112 | 112 | name_key = _hash_key(repo_name) |
|
113 | 113 | repo = repo.options( |
|
114 | 114 | FromCache("sql_cache_short", "get_repo_%s" % name_key)) |
|
115 | 115 | return repo.scalar() |
|
116 | 116 | |
|
117 | 117 | def _extract_id_from_repo_name(self, repo_name): |
|
118 | 118 | if repo_name.startswith('/'): |
|
119 | 119 | repo_name = repo_name.lstrip('/') |
|
120 | 120 | by_id_match = re.match(r'^_(\d{1,})', repo_name) |
|
121 | 121 | if by_id_match: |
|
122 | 122 | return by_id_match.groups()[0] |
|
123 | 123 | |
|
124 | 124 | def get_repo_by_id(self, repo_name): |
|
125 | 125 | """ |
|
126 | 126 | Extracts repo_name by id from special urls. |
|
127 | 127 | Example url is _11/repo_name |
|
128 | 128 | |
|
129 | 129 | :param repo_name: |
|
130 | 130 | :return: repo object if matched else None |
|
131 | 131 | """ |
|
132 | 132 | |
|
133 | 133 | try: |
|
134 | 134 | _repo_id = self._extract_id_from_repo_name(repo_name) |
|
135 | 135 | if _repo_id: |
|
136 | 136 | return self.get(_repo_id) |
|
137 | 137 | except Exception: |
|
138 | 138 | log.exception('Failed to extract repo_name from URL') |
|
139 | 139 | |
|
140 | 140 | return None |
|
141 | 141 | |
|
142 | 142 | def get_repos_for_root(self, root, traverse=False): |
|
143 | 143 | if traverse: |
|
144 | 144 | like_expression = u'{}%'.format(safe_unicode(root)) |
|
145 | 145 | repos = Repository.query().filter( |
|
146 | 146 | Repository.repo_name.like(like_expression)).all() |
|
147 | 147 | else: |
|
148 | 148 | if root and not isinstance(root, RepoGroup): |
|
149 | 149 | raise ValueError( |
|
150 | 150 | 'Root must be an instance ' |
|
151 | 151 | 'of RepoGroup, got:{} instead'.format(type(root))) |
|
152 | 152 | repos = Repository.query().filter(Repository.group == root).all() |
|
153 | 153 | return repos |
|
154 | 154 | |
|
155 | 155 | def get_url(self, repo, request=None, permalink=False): |
|
156 | 156 | if not request: |
|
157 | 157 | request = get_current_request() |
|
158 | 158 | |
|
159 | 159 | if not request: |
|
160 | 160 | return |
|
161 | 161 | |
|
162 | 162 | if permalink: |
|
163 | 163 | return request.route_url( |
|
164 | 164 | 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id))) |
|
165 | 165 | else: |
|
166 | 166 | return request.route_url( |
|
167 | 167 | 'repo_summary', repo_name=safe_str(repo.repo_name)) |
|
168 | 168 | |
|
169 | 169 | def get_commit_url(self, repo, commit_id, request=None, permalink=False): |
|
170 | 170 | if not request: |
|
171 | 171 | request = get_current_request() |
|
172 | 172 | |
|
173 | 173 | if not request: |
|
174 | 174 | return |
|
175 | 175 | |
|
176 | 176 | if permalink: |
|
177 | 177 | return request.route_url( |
|
178 | 178 | 'repo_commit', repo_name=safe_str(repo.repo_id), |
|
179 | 179 | commit_id=commit_id) |
|
180 | 180 | |
|
181 | 181 | else: |
|
182 | 182 | return request.route_url( |
|
183 | 183 | 'repo_commit', repo_name=safe_str(repo.repo_name), |
|
184 | 184 | commit_id=commit_id) |
|
185 | 185 | |
|
186 | 186 | def get_repo_log(self, repo, filter_term): |
|
187 | 187 | repo_log = UserLog.query()\ |
|
188 | 188 | .filter(or_(UserLog.repository_id == repo.repo_id, |
|
189 | 189 | UserLog.repository_name == repo.repo_name))\ |
|
190 | 190 | .options(joinedload(UserLog.user))\ |
|
191 | 191 | .options(joinedload(UserLog.repository))\ |
|
192 | 192 | .order_by(UserLog.action_date.desc()) |
|
193 | 193 | |
|
194 | 194 | repo_log = user_log_filter(repo_log, filter_term) |
|
195 | 195 | return repo_log |
|
196 | 196 | |
|
197 | 197 | @classmethod |
|
198 | 198 | def update_repoinfo(cls, repositories=None): |
|
199 | 199 | if not repositories: |
|
200 | 200 | repositories = Repository.getAll() |
|
201 | 201 | for repo in repositories: |
|
202 | 202 | repo.update_commit_cache() |
|
203 | 203 | |
|
204 | 204 | def get_repos_as_dict(self, repo_list=None, admin=False, |
|
205 | 205 | super_user_actions=False): |
|
206 | 206 | _render = get_current_request().get_partial_renderer( |
|
207 | 207 | 'rhodecode:templates/data_table/_dt_elements.mako') |
|
208 | 208 | c = _render.get_call_context() |
|
209 | 209 | |
|
210 | 210 | def quick_menu(repo_name): |
|
211 | 211 | return _render('quick_menu', repo_name) |
|
212 | 212 | |
|
213 | 213 | def repo_lnk(name, rtype, rstate, private, fork_of): |
|
214 | 214 | return _render('repo_name', name, rtype, rstate, private, fork_of, |
|
215 | 215 | short_name=not admin, admin=False) |
|
216 | 216 | |
|
217 | 217 | def last_change(last_change): |
|
218 | 218 | if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo: |
|
219 | 219 | last_change = last_change + datetime.timedelta(seconds= |
|
220 | 220 | (datetime.datetime.now() - datetime.datetime.utcnow()).seconds) |
|
221 | 221 | return _render("last_change", last_change) |
|
222 | 222 | |
|
223 | 223 | def rss_lnk(repo_name): |
|
224 | 224 | return _render("rss", repo_name) |
|
225 | 225 | |
|
226 | 226 | def atom_lnk(repo_name): |
|
227 | 227 | return _render("atom", repo_name) |
|
228 | 228 | |
|
229 | 229 | def last_rev(repo_name, cs_cache): |
|
230 | 230 | return _render('revision', repo_name, cs_cache.get('revision'), |
|
231 | 231 | cs_cache.get('raw_id'), cs_cache.get('author'), |
|
232 | 232 | cs_cache.get('message'), cs_cache.get('date')) |
|
233 | 233 | |
|
234 | 234 | def desc(desc): |
|
235 | 235 | return _render('repo_desc', desc, c.visual.stylify_metatags) |
|
236 | 236 | |
|
237 | 237 | def state(repo_state): |
|
238 | 238 | return _render("repo_state", repo_state) |
|
239 | 239 | |
|
240 | 240 | def repo_actions(repo_name): |
|
241 | 241 | return _render('repo_actions', repo_name, super_user_actions) |
|
242 | 242 | |
|
243 | 243 | def user_profile(username): |
|
244 | 244 | return _render('user_profile', username) |
|
245 | 245 | |
|
246 | 246 | repos_data = [] |
|
247 | 247 | for repo in repo_list: |
|
248 | 248 | cs_cache = repo.changeset_cache |
|
249 | 249 | row = { |
|
250 | 250 | "menu": quick_menu(repo.repo_name), |
|
251 | 251 | |
|
252 | 252 | "name": repo_lnk(repo.repo_name, repo.repo_type, |
|
253 | 253 | repo.repo_state, repo.private, repo.fork), |
|
254 | 254 | "name_raw": repo.repo_name.lower(), |
|
255 | 255 | |
|
256 | 256 | "last_change": last_change(repo.last_db_change), |
|
257 | 257 | "last_change_raw": datetime_to_time(repo.last_db_change), |
|
258 | 258 | |
|
259 | 259 | "last_changeset": last_rev(repo.repo_name, cs_cache), |
|
260 | 260 | "last_changeset_raw": cs_cache.get('revision'), |
|
261 | 261 | |
|
262 | 262 | "desc": desc(repo.description_safe), |
|
263 | 263 | "owner": user_profile(repo.user.username), |
|
264 | 264 | |
|
265 | 265 | "state": state(repo.repo_state), |
|
266 | 266 | "rss": rss_lnk(repo.repo_name), |
|
267 | 267 | |
|
268 | 268 | "atom": atom_lnk(repo.repo_name), |
|
269 | 269 | } |
|
270 | 270 | if admin: |
|
271 | 271 | row.update({ |
|
272 | 272 | "action": repo_actions(repo.repo_name), |
|
273 | 273 | }) |
|
274 | 274 | repos_data.append(row) |
|
275 | 275 | |
|
276 | 276 | return repos_data |
|
277 | 277 | |
|
278 | 278 | def _get_defaults(self, repo_name): |
|
279 | 279 | """ |
|
280 | 280 | Gets information about repository, and returns a dict for |
|
281 | 281 | usage in forms |
|
282 | 282 | |
|
283 | 283 | :param repo_name: |
|
284 | 284 | """ |
|
285 | 285 | |
|
286 | 286 | repo_info = Repository.get_by_repo_name(repo_name) |
|
287 | 287 | |
|
288 | 288 | if repo_info is None: |
|
289 | 289 | return None |
|
290 | 290 | |
|
291 | 291 | defaults = repo_info.get_dict() |
|
292 | 292 | defaults['repo_name'] = repo_info.just_name |
|
293 | 293 | |
|
294 | 294 | groups = repo_info.groups_with_parents |
|
295 | 295 | parent_group = groups[-1] if groups else None |
|
296 | 296 | |
|
297 | 297 | # we use -1 as this is how in HTML, we mark an empty group |
|
298 | 298 | defaults['repo_group'] = getattr(parent_group, 'group_id', -1) |
|
299 | 299 | |
|
300 | 300 | keys_to_process = ( |
|
301 | 301 | {'k': 'repo_type', 'strip': False}, |
|
302 | 302 | {'k': 'repo_enable_downloads', 'strip': True}, |
|
303 | 303 | {'k': 'repo_description', 'strip': True}, |
|
304 | 304 | {'k': 'repo_enable_locking', 'strip': True}, |
|
305 | 305 | {'k': 'repo_landing_rev', 'strip': True}, |
|
306 | 306 | {'k': 'clone_uri', 'strip': False}, |
|
307 | {'k': 'push_uri', 'strip': False}, | |
|
307 | 308 | {'k': 'repo_private', 'strip': True}, |
|
308 | 309 | {'k': 'repo_enable_statistics', 'strip': True} |
|
309 | 310 | ) |
|
310 | 311 | |
|
311 | 312 | for item in keys_to_process: |
|
312 | 313 | attr = item['k'] |
|
313 | 314 | if item['strip']: |
|
314 | 315 | attr = remove_prefix(item['k'], 'repo_') |
|
315 | 316 | |
|
316 | 317 | val = defaults[attr] |
|
317 | 318 | if item['k'] == 'repo_landing_rev': |
|
318 | 319 | val = ':'.join(defaults[attr]) |
|
319 | 320 | defaults[item['k']] = val |
|
320 | 321 | if item['k'] == 'clone_uri': |
|
321 | 322 | defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden |
|
323 | if item['k'] == 'push_uri': | |
|
324 | defaults['push_uri_hidden'] = repo_info.push_uri_hidden | |
|
322 | 325 | |
|
323 | 326 | # fill owner |
|
324 | 327 | if repo_info.user: |
|
325 | 328 | defaults.update({'user': repo_info.user.username}) |
|
326 | 329 | else: |
|
327 | 330 | replacement_user = User.get_first_super_admin().username |
|
328 | 331 | defaults.update({'user': replacement_user}) |
|
329 | 332 | |
|
330 | 333 | return defaults |
|
331 | 334 | |
|
332 | 335 | def update(self, repo, **kwargs): |
|
333 | 336 | try: |
|
334 | 337 | cur_repo = self._get_repo(repo) |
|
335 | 338 | source_repo_name = cur_repo.repo_name |
|
336 | 339 | if 'user' in kwargs: |
|
337 | 340 | cur_repo.user = User.get_by_username(kwargs['user']) |
|
338 | 341 | |
|
339 | 342 | if 'repo_group' in kwargs: |
|
340 | 343 | cur_repo.group = RepoGroup.get(kwargs['repo_group']) |
|
341 | 344 | log.debug('Updating repo %s with params:%s', cur_repo, kwargs) |
|
342 | 345 | |
|
343 | 346 | update_keys = [ |
|
344 | 347 | (1, 'repo_description'), |
|
345 | 348 | (1, 'repo_landing_rev'), |
|
346 | 349 | (1, 'repo_private'), |
|
347 | 350 | (1, 'repo_enable_downloads'), |
|
348 | 351 | (1, 'repo_enable_locking'), |
|
349 | 352 | (1, 'repo_enable_statistics'), |
|
350 | 353 | (0, 'clone_uri'), |
|
354 | (0, 'push_uri'), | |
|
351 | 355 | (0, 'fork_id') |
|
352 | 356 | ] |
|
353 | 357 | for strip, k in update_keys: |
|
354 | 358 | if k in kwargs: |
|
355 | 359 | val = kwargs[k] |
|
356 | 360 | if strip: |
|
357 | 361 | k = remove_prefix(k, 'repo_') |
|
358 | 362 | |
|
359 | 363 | setattr(cur_repo, k, val) |
|
360 | 364 | |
|
361 | 365 | new_name = cur_repo.get_new_name(kwargs['repo_name']) |
|
362 | 366 | cur_repo.repo_name = new_name |
|
363 | 367 | |
|
364 | 368 | # if private flag is set, reset default permission to NONE |
|
365 | 369 | if kwargs.get('repo_private'): |
|
366 | 370 | EMPTY_PERM = 'repository.none' |
|
367 | 371 | RepoModel().grant_user_permission( |
|
368 | 372 | repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM |
|
369 | 373 | ) |
|
370 | 374 | |
|
371 | 375 | # handle extra fields |
|
372 | 376 | for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), |
|
373 | 377 | kwargs): |
|
374 | 378 | k = RepositoryField.un_prefix_key(field) |
|
375 | 379 | ex_field = RepositoryField.get_by_key_name( |
|
376 | 380 | key=k, repo=cur_repo) |
|
377 | 381 | if ex_field: |
|
378 | 382 | ex_field.field_value = kwargs[field] |
|
379 | 383 | self.sa.add(ex_field) |
|
380 | 384 | cur_repo.updated_on = datetime.datetime.now() |
|
381 | 385 | self.sa.add(cur_repo) |
|
382 | 386 | |
|
383 | 387 | if source_repo_name != new_name: |
|
384 | 388 | # rename repository |
|
385 | 389 | self._rename_filesystem_repo( |
|
386 | 390 | old=source_repo_name, new=new_name) |
|
387 | 391 | |
|
388 | 392 | return cur_repo |
|
389 | 393 | except Exception: |
|
390 | 394 | log.error(traceback.format_exc()) |
|
391 | 395 | raise |
|
392 | 396 | |
|
393 | 397 | def _create_repo(self, repo_name, repo_type, description, owner, |
|
394 | 398 | private=False, clone_uri=None, repo_group=None, |
|
395 | 399 | landing_rev='rev:tip', fork_of=None, |
|
396 | 400 | copy_fork_permissions=False, enable_statistics=False, |
|
397 | 401 | enable_locking=False, enable_downloads=False, |
|
398 | 402 | copy_group_permissions=False, |
|
399 | 403 | state=Repository.STATE_PENDING): |
|
400 | 404 | """ |
|
401 | 405 | Create repository inside database with PENDING state, this should be |
|
402 | 406 | only executed by create() repo. With exception of importing existing |
|
403 | 407 | repos |
|
404 | 408 | """ |
|
405 | 409 | from rhodecode.model.scm import ScmModel |
|
406 | 410 | |
|
407 | 411 | owner = self._get_user(owner) |
|
408 | 412 | fork_of = self._get_repo(fork_of) |
|
409 | 413 | repo_group = self._get_repo_group(safe_int(repo_group)) |
|
410 | 414 | |
|
411 | 415 | try: |
|
412 | 416 | repo_name = safe_unicode(repo_name) |
|
413 | 417 | description = safe_unicode(description) |
|
414 | 418 | # repo name is just a name of repository |
|
415 | 419 | # while repo_name_full is a full qualified name that is combined |
|
416 | 420 | # with name and path of group |
|
417 | 421 | repo_name_full = repo_name |
|
418 | 422 | repo_name = repo_name.split(Repository.NAME_SEP)[-1] |
|
419 | 423 | |
|
420 | 424 | new_repo = Repository() |
|
421 | 425 | new_repo.repo_state = state |
|
422 | 426 | new_repo.enable_statistics = False |
|
423 | 427 | new_repo.repo_name = repo_name_full |
|
424 | 428 | new_repo.repo_type = repo_type |
|
425 | 429 | new_repo.user = owner |
|
426 | 430 | new_repo.group = repo_group |
|
427 | 431 | new_repo.description = description or repo_name |
|
428 | 432 | new_repo.private = private |
|
429 | 433 | new_repo.clone_uri = clone_uri |
|
430 | 434 | new_repo.landing_rev = landing_rev |
|
431 | 435 | |
|
432 | 436 | new_repo.enable_statistics = enable_statistics |
|
433 | 437 | new_repo.enable_locking = enable_locking |
|
434 | 438 | new_repo.enable_downloads = enable_downloads |
|
435 | 439 | |
|
436 | 440 | if repo_group: |
|
437 | 441 | new_repo.enable_locking = repo_group.enable_locking |
|
438 | 442 | |
|
439 | 443 | if fork_of: |
|
440 | 444 | parent_repo = fork_of |
|
441 | 445 | new_repo.fork = parent_repo |
|
442 | 446 | |
|
443 | 447 | events.trigger(events.RepoPreCreateEvent(new_repo)) |
|
444 | 448 | |
|
445 | 449 | self.sa.add(new_repo) |
|
446 | 450 | |
|
447 | 451 | EMPTY_PERM = 'repository.none' |
|
448 | 452 | if fork_of and copy_fork_permissions: |
|
449 | 453 | repo = fork_of |
|
450 | 454 | user_perms = UserRepoToPerm.query() \ |
|
451 | 455 | .filter(UserRepoToPerm.repository == repo).all() |
|
452 | 456 | group_perms = UserGroupRepoToPerm.query() \ |
|
453 | 457 | .filter(UserGroupRepoToPerm.repository == repo).all() |
|
454 | 458 | |
|
455 | 459 | for perm in user_perms: |
|
456 | 460 | UserRepoToPerm.create( |
|
457 | 461 | perm.user, new_repo, perm.permission) |
|
458 | 462 | |
|
459 | 463 | for perm in group_perms: |
|
460 | 464 | UserGroupRepoToPerm.create( |
|
461 | 465 | perm.users_group, new_repo, perm.permission) |
|
462 | 466 | # in case we copy permissions and also set this repo to private |
|
463 | 467 | # override the default user permission to make it a private |
|
464 | 468 | # repo |
|
465 | 469 | if private: |
|
466 | 470 | RepoModel(self.sa).grant_user_permission( |
|
467 | 471 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
468 | 472 | |
|
469 | 473 | elif repo_group and copy_group_permissions: |
|
470 | 474 | user_perms = UserRepoGroupToPerm.query() \ |
|
471 | 475 | .filter(UserRepoGroupToPerm.group == repo_group).all() |
|
472 | 476 | |
|
473 | 477 | group_perms = UserGroupRepoGroupToPerm.query() \ |
|
474 | 478 | .filter(UserGroupRepoGroupToPerm.group == repo_group).all() |
|
475 | 479 | |
|
476 | 480 | for perm in user_perms: |
|
477 | 481 | perm_name = perm.permission.permission_name.replace( |
|
478 | 482 | 'group.', 'repository.') |
|
479 | 483 | perm_obj = Permission.get_by_key(perm_name) |
|
480 | 484 | UserRepoToPerm.create(perm.user, new_repo, perm_obj) |
|
481 | 485 | |
|
482 | 486 | for perm in group_perms: |
|
483 | 487 | perm_name = perm.permission.permission_name.replace( |
|
484 | 488 | 'group.', 'repository.') |
|
485 | 489 | perm_obj = Permission.get_by_key(perm_name) |
|
486 | 490 | UserGroupRepoToPerm.create( |
|
487 | 491 | perm.users_group, new_repo, perm_obj) |
|
488 | 492 | |
|
489 | 493 | if private: |
|
490 | 494 | RepoModel(self.sa).grant_user_permission( |
|
491 | 495 | repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM) |
|
492 | 496 | |
|
493 | 497 | else: |
|
494 | 498 | perm_obj = self._create_default_perms(new_repo, private) |
|
495 | 499 | self.sa.add(perm_obj) |
|
496 | 500 | |
|
497 | 501 | # now automatically start following this repository as owner |
|
498 | 502 | ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, |
|
499 | 503 | owner.user_id) |
|
500 | 504 | |
|
501 | 505 | # we need to flush here, in order to check if database won't |
|
502 | 506 | # throw any exceptions, create filesystem dirs at the very end |
|
503 | 507 | self.sa.flush() |
|
504 | 508 | events.trigger(events.RepoCreateEvent(new_repo)) |
|
505 | 509 | return new_repo |
|
506 | 510 | |
|
507 | 511 | except Exception: |
|
508 | 512 | log.error(traceback.format_exc()) |
|
509 | 513 | raise |
|
510 | 514 | |
|
511 | 515 | def create(self, form_data, cur_user): |
|
512 | 516 | """ |
|
513 | 517 | Create repository using celery tasks |
|
514 | 518 | |
|
515 | 519 | :param form_data: |
|
516 | 520 | :param cur_user: |
|
517 | 521 | """ |
|
518 | 522 | from rhodecode.lib.celerylib import tasks, run_task |
|
519 | 523 | return run_task(tasks.create_repo, form_data, cur_user) |
|
520 | 524 | |
|
521 | 525 | def update_permissions(self, repo, perm_additions=None, perm_updates=None, |
|
522 | 526 | perm_deletions=None, check_perms=True, |
|
523 | 527 | cur_user=None): |
|
524 | 528 | if not perm_additions: |
|
525 | 529 | perm_additions = [] |
|
526 | 530 | if not perm_updates: |
|
527 | 531 | perm_updates = [] |
|
528 | 532 | if not perm_deletions: |
|
529 | 533 | perm_deletions = [] |
|
530 | 534 | |
|
531 | 535 | req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin') |
|
532 | 536 | |
|
533 | 537 | changes = { |
|
534 | 538 | 'added': [], |
|
535 | 539 | 'updated': [], |
|
536 | 540 | 'deleted': [] |
|
537 | 541 | } |
|
538 | 542 | # update permissions |
|
539 | 543 | for member_id, perm, member_type in perm_updates: |
|
540 | 544 | member_id = int(member_id) |
|
541 | 545 | if member_type == 'user': |
|
542 | 546 | member_name = User.get(member_id).username |
|
543 | 547 | # this updates also current one if found |
|
544 | 548 | self.grant_user_permission( |
|
545 | 549 | repo=repo, user=member_id, perm=perm) |
|
546 | 550 | else: # set for user group |
|
547 | 551 | # check if we have permissions to alter this usergroup |
|
548 | 552 | member_name = UserGroup.get(member_id).users_group_name |
|
549 | 553 | if not check_perms or HasUserGroupPermissionAny( |
|
550 | 554 | *req_perms)(member_name, user=cur_user): |
|
551 | 555 | self.grant_user_group_permission( |
|
552 | 556 | repo=repo, group_name=member_id, perm=perm) |
|
553 | 557 | |
|
554 | 558 | changes['updated'].append({'type': member_type, 'id': member_id, |
|
555 | 559 | 'name': member_name, 'new_perm': perm}) |
|
556 | 560 | |
|
557 | 561 | # set new permissions |
|
558 | 562 | for member_id, perm, member_type in perm_additions: |
|
559 | 563 | member_id = int(member_id) |
|
560 | 564 | if member_type == 'user': |
|
561 | 565 | member_name = User.get(member_id).username |
|
562 | 566 | self.grant_user_permission( |
|
563 | 567 | repo=repo, user=member_id, perm=perm) |
|
564 | 568 | else: # set for user group |
|
565 | 569 | # check if we have permissions to alter this usergroup |
|
566 | 570 | member_name = UserGroup.get(member_id).users_group_name |
|
567 | 571 | if not check_perms or HasUserGroupPermissionAny( |
|
568 | 572 | *req_perms)(member_name, user=cur_user): |
|
569 | 573 | self.grant_user_group_permission( |
|
570 | 574 | repo=repo, group_name=member_id, perm=perm) |
|
571 | 575 | changes['added'].append({'type': member_type, 'id': member_id, |
|
572 | 576 | 'name': member_name, 'new_perm': perm}) |
|
573 | 577 | # delete permissions |
|
574 | 578 | for member_id, perm, member_type in perm_deletions: |
|
575 | 579 | member_id = int(member_id) |
|
576 | 580 | if member_type == 'user': |
|
577 | 581 | member_name = User.get(member_id).username |
|
578 | 582 | self.revoke_user_permission(repo=repo, user=member_id) |
|
579 | 583 | else: # set for user group |
|
580 | 584 | # check if we have permissions to alter this usergroup |
|
581 | 585 | member_name = UserGroup.get(member_id).users_group_name |
|
582 | 586 | if not check_perms or HasUserGroupPermissionAny( |
|
583 | 587 | *req_perms)(member_name, user=cur_user): |
|
584 | 588 | self.revoke_user_group_permission( |
|
585 | 589 | repo=repo, group_name=member_id) |
|
586 | 590 | |
|
587 | 591 | changes['deleted'].append({'type': member_type, 'id': member_id, |
|
588 | 592 | 'name': member_name, 'new_perm': perm}) |
|
589 | 593 | return changes |
|
590 | 594 | |
|
591 | 595 | def create_fork(self, form_data, cur_user): |
|
592 | 596 | """ |
|
593 | 597 | Simple wrapper into executing celery task for fork creation |
|
594 | 598 | |
|
595 | 599 | :param form_data: |
|
596 | 600 | :param cur_user: |
|
597 | 601 | """ |
|
598 | 602 | from rhodecode.lib.celerylib import tasks, run_task |
|
599 | 603 | return run_task(tasks.create_repo_fork, form_data, cur_user) |
|
600 | 604 | |
|
601 | 605 | def delete(self, repo, forks=None, fs_remove=True, cur_user=None): |
|
602 | 606 | """ |
|
603 | 607 | Delete given repository, forks parameter defines what do do with |
|
604 | 608 | attached forks. Throws AttachedForksError if deleted repo has attached |
|
605 | 609 | forks |
|
606 | 610 | |
|
607 | 611 | :param repo: |
|
608 | 612 | :param forks: str 'delete' or 'detach' |
|
609 | 613 | :param fs_remove: remove(archive) repo from filesystem |
|
610 | 614 | """ |
|
611 | 615 | if not cur_user: |
|
612 | 616 | cur_user = getattr(get_current_rhodecode_user(), 'username', None) |
|
613 | 617 | repo = self._get_repo(repo) |
|
614 | 618 | if repo: |
|
615 | 619 | if forks == 'detach': |
|
616 | 620 | for r in repo.forks: |
|
617 | 621 | r.fork = None |
|
618 | 622 | self.sa.add(r) |
|
619 | 623 | elif forks == 'delete': |
|
620 | 624 | for r in repo.forks: |
|
621 | 625 | self.delete(r, forks='delete') |
|
622 | 626 | elif [f for f in repo.forks]: |
|
623 | 627 | raise AttachedForksError() |
|
624 | 628 | |
|
625 | 629 | old_repo_dict = repo.get_dict() |
|
626 | 630 | events.trigger(events.RepoPreDeleteEvent(repo)) |
|
627 | 631 | try: |
|
628 | 632 | self.sa.delete(repo) |
|
629 | 633 | if fs_remove: |
|
630 | 634 | self._delete_filesystem_repo(repo) |
|
631 | 635 | else: |
|
632 | 636 | log.debug('skipping removal from filesystem') |
|
633 | 637 | old_repo_dict.update({ |
|
634 | 638 | 'deleted_by': cur_user, |
|
635 | 639 | 'deleted_on': time.time(), |
|
636 | 640 | }) |
|
637 | 641 | log_delete_repository(**old_repo_dict) |
|
638 | 642 | events.trigger(events.RepoDeleteEvent(repo)) |
|
639 | 643 | except Exception: |
|
640 | 644 | log.error(traceback.format_exc()) |
|
641 | 645 | raise |
|
642 | 646 | |
|
643 | 647 | def grant_user_permission(self, repo, user, perm): |
|
644 | 648 | """ |
|
645 | 649 | Grant permission for user on given repository, or update existing one |
|
646 | 650 | if found |
|
647 | 651 | |
|
648 | 652 | :param repo: Instance of Repository, repository_id, or repository name |
|
649 | 653 | :param user: Instance of User, user_id or username |
|
650 | 654 | :param perm: Instance of Permission, or permission_name |
|
651 | 655 | """ |
|
652 | 656 | user = self._get_user(user) |
|
653 | 657 | repo = self._get_repo(repo) |
|
654 | 658 | permission = self._get_perm(perm) |
|
655 | 659 | |
|
656 | 660 | # check if we have that permission already |
|
657 | 661 | obj = self.sa.query(UserRepoToPerm) \ |
|
658 | 662 | .filter(UserRepoToPerm.user == user) \ |
|
659 | 663 | .filter(UserRepoToPerm.repository == repo) \ |
|
660 | 664 | .scalar() |
|
661 | 665 | if obj is None: |
|
662 | 666 | # create new ! |
|
663 | 667 | obj = UserRepoToPerm() |
|
664 | 668 | obj.repository = repo |
|
665 | 669 | obj.user = user |
|
666 | 670 | obj.permission = permission |
|
667 | 671 | self.sa.add(obj) |
|
668 | 672 | log.debug('Granted perm %s to %s on %s', perm, user, repo) |
|
669 | 673 | action_logger_generic( |
|
670 | 674 | 'granted permission: {} to user: {} on repo: {}'.format( |
|
671 | 675 | perm, user, repo), namespace='security.repo') |
|
672 | 676 | return obj |
|
673 | 677 | |
|
674 | 678 | def revoke_user_permission(self, repo, user): |
|
675 | 679 | """ |
|
676 | 680 | Revoke permission for user on given repository |
|
677 | 681 | |
|
678 | 682 | :param repo: Instance of Repository, repository_id, or repository name |
|
679 | 683 | :param user: Instance of User, user_id or username |
|
680 | 684 | """ |
|
681 | 685 | |
|
682 | 686 | user = self._get_user(user) |
|
683 | 687 | repo = self._get_repo(repo) |
|
684 | 688 | |
|
685 | 689 | obj = self.sa.query(UserRepoToPerm) \ |
|
686 | 690 | .filter(UserRepoToPerm.repository == repo) \ |
|
687 | 691 | .filter(UserRepoToPerm.user == user) \ |
|
688 | 692 | .scalar() |
|
689 | 693 | if obj: |
|
690 | 694 | self.sa.delete(obj) |
|
691 | 695 | log.debug('Revoked perm on %s on %s', repo, user) |
|
692 | 696 | action_logger_generic( |
|
693 | 697 | 'revoked permission from user: {} on repo: {}'.format( |
|
694 | 698 | user, repo), namespace='security.repo') |
|
695 | 699 | |
|
696 | 700 | def grant_user_group_permission(self, repo, group_name, perm): |
|
697 | 701 | """ |
|
698 | 702 | Grant permission for user group on given repository, or update |
|
699 | 703 | existing one if found |
|
700 | 704 | |
|
701 | 705 | :param repo: Instance of Repository, repository_id, or repository name |
|
702 | 706 | :param group_name: Instance of UserGroup, users_group_id, |
|
703 | 707 | or user group name |
|
704 | 708 | :param perm: Instance of Permission, or permission_name |
|
705 | 709 | """ |
|
706 | 710 | repo = self._get_repo(repo) |
|
707 | 711 | group_name = self._get_user_group(group_name) |
|
708 | 712 | permission = self._get_perm(perm) |
|
709 | 713 | |
|
710 | 714 | # check if we have that permission already |
|
711 | 715 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
712 | 716 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
713 | 717 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
714 | 718 | .scalar() |
|
715 | 719 | |
|
716 | 720 | if obj is None: |
|
717 | 721 | # create new |
|
718 | 722 | obj = UserGroupRepoToPerm() |
|
719 | 723 | |
|
720 | 724 | obj.repository = repo |
|
721 | 725 | obj.users_group = group_name |
|
722 | 726 | obj.permission = permission |
|
723 | 727 | self.sa.add(obj) |
|
724 | 728 | log.debug('Granted perm %s to %s on %s', perm, group_name, repo) |
|
725 | 729 | action_logger_generic( |
|
726 | 730 | 'granted permission: {} to usergroup: {} on repo: {}'.format( |
|
727 | 731 | perm, group_name, repo), namespace='security.repo') |
|
728 | 732 | |
|
729 | 733 | return obj |
|
730 | 734 | |
|
731 | 735 | def revoke_user_group_permission(self, repo, group_name): |
|
732 | 736 | """ |
|
733 | 737 | Revoke permission for user group on given repository |
|
734 | 738 | |
|
735 | 739 | :param repo: Instance of Repository, repository_id, or repository name |
|
736 | 740 | :param group_name: Instance of UserGroup, users_group_id, |
|
737 | 741 | or user group name |
|
738 | 742 | """ |
|
739 | 743 | repo = self._get_repo(repo) |
|
740 | 744 | group_name = self._get_user_group(group_name) |
|
741 | 745 | |
|
742 | 746 | obj = self.sa.query(UserGroupRepoToPerm) \ |
|
743 | 747 | .filter(UserGroupRepoToPerm.repository == repo) \ |
|
744 | 748 | .filter(UserGroupRepoToPerm.users_group == group_name) \ |
|
745 | 749 | .scalar() |
|
746 | 750 | if obj: |
|
747 | 751 | self.sa.delete(obj) |
|
748 | 752 | log.debug('Revoked perm to %s on %s', repo, group_name) |
|
749 | 753 | action_logger_generic( |
|
750 | 754 | 'revoked permission from usergroup: {} on repo: {}'.format( |
|
751 | 755 | group_name, repo), namespace='security.repo') |
|
752 | 756 | |
|
753 | 757 | def delete_stats(self, repo_name): |
|
754 | 758 | """ |
|
755 | 759 | removes stats for given repo |
|
756 | 760 | |
|
757 | 761 | :param repo_name: |
|
758 | 762 | """ |
|
759 | 763 | repo = self._get_repo(repo_name) |
|
760 | 764 | try: |
|
761 | 765 | obj = self.sa.query(Statistics) \ |
|
762 | 766 | .filter(Statistics.repository == repo).scalar() |
|
763 | 767 | if obj: |
|
764 | 768 | self.sa.delete(obj) |
|
765 | 769 | except Exception: |
|
766 | 770 | log.error(traceback.format_exc()) |
|
767 | 771 | raise |
|
768 | 772 | |
|
769 | 773 | def add_repo_field(self, repo_name, field_key, field_label, field_value='', |
|
770 | 774 | field_type='str', field_desc=''): |
|
771 | 775 | |
|
772 | 776 | repo = self._get_repo(repo_name) |
|
773 | 777 | |
|
774 | 778 | new_field = RepositoryField() |
|
775 | 779 | new_field.repository = repo |
|
776 | 780 | new_field.field_key = field_key |
|
777 | 781 | new_field.field_type = field_type # python type |
|
778 | 782 | new_field.field_value = field_value |
|
779 | 783 | new_field.field_desc = field_desc |
|
780 | 784 | new_field.field_label = field_label |
|
781 | 785 | self.sa.add(new_field) |
|
782 | 786 | return new_field |
|
783 | 787 | |
|
784 | 788 | def delete_repo_field(self, repo_name, field_key): |
|
785 | 789 | repo = self._get_repo(repo_name) |
|
786 | 790 | field = RepositoryField.get_by_key_name(field_key, repo) |
|
787 | 791 | if field: |
|
788 | 792 | self.sa.delete(field) |
|
789 | 793 | |
|
790 | 794 | def _create_filesystem_repo(self, repo_name, repo_type, repo_group, |
|
791 | 795 | clone_uri=None, repo_store_location=None, |
|
792 | 796 | use_global_config=False): |
|
793 | 797 | """ |
|
794 | 798 | makes repository on filesystem. It's group aware means it'll create |
|
795 | 799 | a repository within a group, and alter the paths accordingly of |
|
796 | 800 | group location |
|
797 | 801 | |
|
798 | 802 | :param repo_name: |
|
799 | 803 | :param alias: |
|
800 | 804 | :param parent: |
|
801 | 805 | :param clone_uri: |
|
802 | 806 | :param repo_store_location: |
|
803 | 807 | """ |
|
804 | 808 | from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group |
|
805 | 809 | from rhodecode.model.scm import ScmModel |
|
806 | 810 | |
|
807 | 811 | if Repository.NAME_SEP in repo_name: |
|
808 | 812 | raise ValueError( |
|
809 | 813 | 'repo_name must not contain groups got `%s`' % repo_name) |
|
810 | 814 | |
|
811 | 815 | if isinstance(repo_group, RepoGroup): |
|
812 | 816 | new_parent_path = os.sep.join(repo_group.full_path_splitted) |
|
813 | 817 | else: |
|
814 | 818 | new_parent_path = repo_group or '' |
|
815 | 819 | |
|
816 | 820 | if repo_store_location: |
|
817 | 821 | _paths = [repo_store_location] |
|
818 | 822 | else: |
|
819 | 823 | _paths = [self.repos_path, new_parent_path, repo_name] |
|
820 | 824 | # we need to make it str for mercurial |
|
821 | 825 | repo_path = os.path.join(*map(lambda x: safe_str(x), _paths)) |
|
822 | 826 | |
|
823 | 827 | # check if this path is not a repository |
|
824 | 828 | if is_valid_repo(repo_path, self.repos_path): |
|
825 | 829 | raise Exception('This path %s is a valid repository' % repo_path) |
|
826 | 830 | |
|
827 | 831 | # check if this path is a group |
|
828 | 832 | if is_valid_repo_group(repo_path, self.repos_path): |
|
829 | 833 | raise Exception('This path %s is a valid group' % repo_path) |
|
830 | 834 | |
|
831 | 835 | log.info('creating repo %s in %s from url: `%s`', |
|
832 | 836 | repo_name, safe_unicode(repo_path), |
|
833 | 837 | obfuscate_url_pw(clone_uri)) |
|
834 | 838 | |
|
835 | 839 | backend = get_backend(repo_type) |
|
836 | 840 | |
|
837 | 841 | config_repo = None if use_global_config else repo_name |
|
838 | 842 | if config_repo and new_parent_path: |
|
839 | 843 | config_repo = Repository.NAME_SEP.join( |
|
840 | 844 | (new_parent_path, config_repo)) |
|
841 | 845 | config = make_db_config(clear_session=False, repo=config_repo) |
|
842 | 846 | config.set('extensions', 'largefiles', '') |
|
843 | 847 | |
|
844 | 848 | # patch and reset hooks section of UI config to not run any |
|
845 | 849 | # hooks on creating remote repo |
|
846 | 850 | config.clear_section('hooks') |
|
847 | 851 | |
|
848 | 852 | # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice |
|
849 | 853 | if repo_type == 'git': |
|
850 | 854 | repo = backend( |
|
851 | 855 | repo_path, config=config, create=True, src_url=clone_uri, |
|
852 | 856 | bare=True) |
|
853 | 857 | else: |
|
854 | 858 | repo = backend( |
|
855 | 859 | repo_path, config=config, create=True, src_url=clone_uri) |
|
856 | 860 | |
|
857 | 861 | ScmModel().install_hooks(repo, repo_type=repo_type) |
|
858 | 862 | |
|
859 | 863 | log.debug('Created repo %s with %s backend', |
|
860 | 864 | safe_unicode(repo_name), safe_unicode(repo_type)) |
|
861 | 865 | return repo |
|
862 | 866 | |
|
863 | 867 | def _rename_filesystem_repo(self, old, new): |
|
864 | 868 | """ |
|
865 | 869 | renames repository on filesystem |
|
866 | 870 | |
|
867 | 871 | :param old: old name |
|
868 | 872 | :param new: new name |
|
869 | 873 | """ |
|
870 | 874 | log.info('renaming repo from %s to %s', old, new) |
|
871 | 875 | |
|
872 | 876 | old_path = os.path.join(self.repos_path, old) |
|
873 | 877 | new_path = os.path.join(self.repos_path, new) |
|
874 | 878 | if os.path.isdir(new_path): |
|
875 | 879 | raise Exception( |
|
876 | 880 | 'Was trying to rename to already existing dir %s' % new_path |
|
877 | 881 | ) |
|
878 | 882 | shutil.move(old_path, new_path) |
|
879 | 883 | |
|
880 | 884 | def _delete_filesystem_repo(self, repo): |
|
881 | 885 | """ |
|
882 | 886 | removes repo from filesystem, the removal is acctually made by |
|
883 | 887 | added rm__ prefix into dir, and rename internat .hg/.git dirs so this |
|
884 | 888 | repository is no longer valid for rhodecode, can be undeleted later on |
|
885 | 889 | by reverting the renames on this repository |
|
886 | 890 | |
|
887 | 891 | :param repo: repo object |
|
888 | 892 | """ |
|
889 | 893 | rm_path = os.path.join(self.repos_path, repo.repo_name) |
|
890 | 894 | repo_group = repo.group |
|
891 | 895 | log.info("Removing repository %s", rm_path) |
|
892 | 896 | # disable hg/git internal that it doesn't get detected as repo |
|
893 | 897 | alias = repo.repo_type |
|
894 | 898 | |
|
895 | 899 | config = make_db_config(clear_session=False) |
|
896 | 900 | config.set('extensions', 'largefiles', '') |
|
897 | 901 | bare = getattr(repo.scm_instance(config=config), 'bare', False) |
|
898 | 902 | |
|
899 | 903 | # skip this for bare git repos |
|
900 | 904 | if not bare: |
|
901 | 905 | # disable VCS repo |
|
902 | 906 | vcs_path = os.path.join(rm_path, '.%s' % alias) |
|
903 | 907 | if os.path.exists(vcs_path): |
|
904 | 908 | shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias)) |
|
905 | 909 | |
|
906 | 910 | _now = datetime.datetime.now() |
|
907 | 911 | _ms = str(_now.microsecond).rjust(6, '0') |
|
908 | 912 | _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms), |
|
909 | 913 | repo.just_name) |
|
910 | 914 | if repo_group: |
|
911 | 915 | # if repository is in group, prefix the removal path with the group |
|
912 | 916 | args = repo_group.full_path_splitted + [_d] |
|
913 | 917 | _d = os.path.join(*args) |
|
914 | 918 | |
|
915 | 919 | if os.path.isdir(rm_path): |
|
916 | 920 | shutil.move(rm_path, os.path.join(self.repos_path, _d)) |
|
917 | 921 | |
|
918 | 922 | |
|
919 | 923 | class ReadmeFinder: |
|
920 | 924 | """ |
|
921 | 925 | Utility which knows how to find a readme for a specific commit. |
|
922 | 926 | |
|
923 | 927 | The main idea is that this is a configurable algorithm. When creating an |
|
924 | 928 | instance you can define parameters, currently only the `default_renderer`. |
|
925 | 929 | Based on this configuration the method :meth:`search` behaves slightly |
|
926 | 930 | different. |
|
927 | 931 | """ |
|
928 | 932 | |
|
929 | 933 | readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE) |
|
930 | 934 | path_re = re.compile(r'^docs?', re.IGNORECASE) |
|
931 | 935 | |
|
932 | 936 | default_priorities = { |
|
933 | 937 | None: 0, |
|
934 | 938 | '.text': 2, |
|
935 | 939 | '.txt': 3, |
|
936 | 940 | '.rst': 1, |
|
937 | 941 | '.rest': 2, |
|
938 | 942 | '.md': 1, |
|
939 | 943 | '.mkdn': 2, |
|
940 | 944 | '.mdown': 3, |
|
941 | 945 | '.markdown': 4, |
|
942 | 946 | } |
|
943 | 947 | |
|
944 | 948 | path_priority = { |
|
945 | 949 | 'doc': 0, |
|
946 | 950 | 'docs': 1, |
|
947 | 951 | } |
|
948 | 952 | |
|
949 | 953 | FALLBACK_PRIORITY = 99 |
|
950 | 954 | |
|
951 | 955 | RENDERER_TO_EXTENSION = { |
|
952 | 956 | 'rst': ['.rst', '.rest'], |
|
953 | 957 | 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'], |
|
954 | 958 | } |
|
955 | 959 | |
|
956 | 960 | def __init__(self, default_renderer=None): |
|
957 | 961 | self._default_renderer = default_renderer |
|
958 | 962 | self._renderer_extensions = self.RENDERER_TO_EXTENSION.get( |
|
959 | 963 | default_renderer, []) |
|
960 | 964 | |
|
961 | 965 | def search(self, commit, path='/'): |
|
962 | 966 | """ |
|
963 | 967 | Find a readme in the given `commit`. |
|
964 | 968 | """ |
|
965 | 969 | nodes = commit.get_nodes(path) |
|
966 | 970 | matches = self._match_readmes(nodes) |
|
967 | 971 | matches = self._sort_according_to_priority(matches) |
|
968 | 972 | if matches: |
|
969 | 973 | return matches[0].node |
|
970 | 974 | |
|
971 | 975 | paths = self._match_paths(nodes) |
|
972 | 976 | paths = self._sort_paths_according_to_priority(paths) |
|
973 | 977 | for path in paths: |
|
974 | 978 | match = self.search(commit, path=path) |
|
975 | 979 | if match: |
|
976 | 980 | return match |
|
977 | 981 | |
|
978 | 982 | return None |
|
979 | 983 | |
|
980 | 984 | def _match_readmes(self, nodes): |
|
981 | 985 | for node in nodes: |
|
982 | 986 | if not node.is_file(): |
|
983 | 987 | continue |
|
984 | 988 | path = node.path.rsplit('/', 1)[-1] |
|
985 | 989 | match = self.readme_re.match(path) |
|
986 | 990 | if match: |
|
987 | 991 | extension = match.group(1) |
|
988 | 992 | yield ReadmeMatch(node, match, self._priority(extension)) |
|
989 | 993 | |
|
990 | 994 | def _match_paths(self, nodes): |
|
991 | 995 | for node in nodes: |
|
992 | 996 | if not node.is_dir(): |
|
993 | 997 | continue |
|
994 | 998 | match = self.path_re.match(node.path) |
|
995 | 999 | if match: |
|
996 | 1000 | yield node.path |
|
997 | 1001 | |
|
998 | 1002 | def _priority(self, extension): |
|
999 | 1003 | renderer_priority = ( |
|
1000 | 1004 | 0 if extension in self._renderer_extensions else 1) |
|
1001 | 1005 | extension_priority = self.default_priorities.get( |
|
1002 | 1006 | extension, self.FALLBACK_PRIORITY) |
|
1003 | 1007 | return (renderer_priority, extension_priority) |
|
1004 | 1008 | |
|
1005 | 1009 | def _sort_according_to_priority(self, matches): |
|
1006 | 1010 | |
|
1007 | 1011 | def priority_and_path(match): |
|
1008 | 1012 | return (match.priority, match.path) |
|
1009 | 1013 | |
|
1010 | 1014 | return sorted(matches, key=priority_and_path) |
|
1011 | 1015 | |
|
1012 | 1016 | def _sort_paths_according_to_priority(self, paths): |
|
1013 | 1017 | |
|
1014 | 1018 | def priority_and_path(path): |
|
1015 | 1019 | return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path) |
|
1016 | 1020 | |
|
1017 | 1021 | return sorted(paths, key=priority_and_path) |
|
1018 | 1022 | |
|
1019 | 1023 | |
|
1020 | 1024 | class ReadmeMatch: |
|
1021 | 1025 | |
|
1022 | 1026 | def __init__(self, node, match, priority): |
|
1023 | 1027 | self.node = node |
|
1024 | 1028 | self._match = match |
|
1025 | 1029 | self.priority = priority |
|
1026 | 1030 | |
|
1027 | 1031 | @property |
|
1028 | 1032 | def path(self): |
|
1029 | 1033 | return self.node.path |
|
1030 | 1034 | |
|
1031 | 1035 | def __repr__(self): |
|
1032 | 1036 | return '<ReadmeMatch {} priority={}'.format(self.path, self.priority) |
@@ -1,414 +1,430 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # Copyright (C) 2016-2018 RhodeCode GmbH |
|
4 | 4 | # |
|
5 | 5 | # This program is free software: you can redistribute it and/or modify |
|
6 | 6 | # it under the terms of the GNU Affero General Public License, version 3 |
|
7 | 7 | # (only), as published by the Free Software Foundation. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU Affero General Public License |
|
15 | 15 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
16 | 16 | # |
|
17 | 17 | # This program is dual-licensed. If you wish to learn more about the |
|
18 | 18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
19 | 19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
20 | 20 | |
|
21 | 21 | import colander |
|
22 | 22 | import deform.widget |
|
23 | 23 | |
|
24 | 24 | from rhodecode.translation import _ |
|
25 | 25 | from rhodecode.model.validation_schema.utils import convert_to_optgroup |
|
26 | 26 | from rhodecode.model.validation_schema import validators, preparers, types |
|
27 | 27 | |
|
28 | 28 | DEFAULT_LANDING_REF = 'rev:tip' |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | def get_group_and_repo(repo_name): |
|
32 | 32 | from rhodecode.model.repo_group import RepoGroupModel |
|
33 | 33 | return RepoGroupModel()._get_group_name_and_parent( |
|
34 | 34 | repo_name, get_object=True) |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | def get_repo_group(repo_group_id): |
|
38 | 38 | from rhodecode.model.repo_group import RepoGroup |
|
39 | 39 | return RepoGroup.get(repo_group_id), RepoGroup.CHOICES_SEPARATOR |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | @colander.deferred |
|
43 | 43 | def deferred_repo_type_validator(node, kw): |
|
44 | 44 | options = kw.get('repo_type_options', []) |
|
45 | 45 | return colander.OneOf([x for x in options]) |
|
46 | 46 | |
|
47 | 47 | |
|
48 | 48 | @colander.deferred |
|
49 | 49 | def deferred_repo_owner_validator(node, kw): |
|
50 | 50 | |
|
51 | 51 | def repo_owner_validator(node, value): |
|
52 | 52 | from rhodecode.model.db import User |
|
53 | 53 | existing = User.get_by_username(value) |
|
54 | 54 | if not existing: |
|
55 | 55 | msg = _(u'Repo owner with id `{}` does not exists').format(value) |
|
56 | 56 | raise colander.Invalid(node, msg) |
|
57 | 57 | |
|
58 | 58 | return repo_owner_validator |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | @colander.deferred |
|
62 | 62 | def deferred_landing_ref_validator(node, kw): |
|
63 | 63 | options = kw.get( |
|
64 | 64 | 'repo_ref_options', [DEFAULT_LANDING_REF]) |
|
65 | 65 | return colander.OneOf([x for x in options]) |
|
66 | 66 | |
|
67 | 67 | |
|
68 | 68 | @colander.deferred |
|
69 |
def deferred_c |
|
|
69 | def deferred_sync_uri_validator(node, kw): | |
|
70 | 70 | repo_type = kw.get('repo_type') |
|
71 | 71 | validator = validators.CloneUriValidator(repo_type) |
|
72 | 72 | return validator |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | @colander.deferred |
|
76 | 76 | def deferred_landing_ref_widget(node, kw): |
|
77 | 77 | items = kw.get( |
|
78 | 78 | 'repo_ref_items', [(DEFAULT_LANDING_REF, DEFAULT_LANDING_REF)]) |
|
79 | 79 | items = convert_to_optgroup(items) |
|
80 | 80 | return deform.widget.Select2Widget(values=items) |
|
81 | 81 | |
|
82 | 82 | |
|
83 | 83 | @colander.deferred |
|
84 | 84 | def deferred_fork_of_validator(node, kw): |
|
85 | 85 | old_values = kw.get('old_values') or {} |
|
86 | 86 | |
|
87 | 87 | def fork_of_validator(node, value): |
|
88 | 88 | from rhodecode.model.db import Repository, RepoGroup |
|
89 | 89 | existing = Repository.get_by_repo_name(value) |
|
90 | 90 | if not existing: |
|
91 | 91 | msg = _(u'Fork with id `{}` does not exists').format(value) |
|
92 | 92 | raise colander.Invalid(node, msg) |
|
93 | 93 | elif old_values['repo_name'] == existing.repo_name: |
|
94 | 94 | msg = _(u'Cannot set fork of ' |
|
95 | 95 | u'parameter of this repository to itself').format(value) |
|
96 | 96 | raise colander.Invalid(node, msg) |
|
97 | 97 | |
|
98 | 98 | return fork_of_validator |
|
99 | 99 | |
|
100 | 100 | |
|
101 | 101 | @colander.deferred |
|
102 | 102 | def deferred_can_write_to_group_validator(node, kw): |
|
103 | 103 | request_user = kw.get('user') |
|
104 | 104 | old_values = kw.get('old_values') or {} |
|
105 | 105 | |
|
106 | 106 | def can_write_to_group_validator(node, value): |
|
107 | 107 | """ |
|
108 | 108 | Checks if given repo path is writable by user. This includes checks if |
|
109 | 109 | user is allowed to create repositories under root path or under |
|
110 | 110 | repo group paths |
|
111 | 111 | """ |
|
112 | 112 | |
|
113 | 113 | from rhodecode.lib.auth import ( |
|
114 | 114 | HasPermissionAny, HasRepoGroupPermissionAny) |
|
115 | 115 | from rhodecode.model.repo_group import RepoGroupModel |
|
116 | 116 | |
|
117 | 117 | messages = { |
|
118 | 118 | 'invalid_repo_group': |
|
119 | 119 | _(u"Repository group `{}` does not exist"), |
|
120 | 120 | # permissions denied we expose as not existing, to prevent |
|
121 | 121 | # resource discovery |
|
122 | 122 | 'permission_denied': |
|
123 | 123 | _(u"Repository group `{}` does not exist"), |
|
124 | 124 | 'permission_denied_root': |
|
125 | 125 | _(u"You do not have the permission to store " |
|
126 | 126 | u"repositories in the root location.") |
|
127 | 127 | } |
|
128 | 128 | |
|
129 | 129 | value = value['repo_group_name'] |
|
130 | 130 | |
|
131 | 131 | is_root_location = value is types.RootLocation |
|
132 | 132 | # NOT initialized validators, we must call them |
|
133 | 133 | can_create_repos_at_root = HasPermissionAny( |
|
134 | 134 | 'hg.admin', 'hg.create.repository') |
|
135 | 135 | |
|
136 | 136 | # if values is root location, we simply need to check if we can write |
|
137 | 137 | # to root location ! |
|
138 | 138 | if is_root_location: |
|
139 | 139 | if can_create_repos_at_root(user=request_user): |
|
140 | 140 | # we can create repo group inside tool-level. No more checks |
|
141 | 141 | # are required |
|
142 | 142 | return |
|
143 | 143 | else: |
|
144 | 144 | # "fake" node name as repo_name, otherwise we oddly report |
|
145 | 145 | # the error as if it was coming form repo_group |
|
146 | 146 | # however repo_group is empty when using root location. |
|
147 | 147 | node.name = 'repo_name' |
|
148 | 148 | raise colander.Invalid(node, messages['permission_denied_root']) |
|
149 | 149 | |
|
150 | 150 | # parent group not exists ? throw an error |
|
151 | 151 | repo_group = RepoGroupModel().get_by_group_name(value) |
|
152 | 152 | if value and not repo_group: |
|
153 | 153 | raise colander.Invalid( |
|
154 | 154 | node, messages['invalid_repo_group'].format(value)) |
|
155 | 155 | |
|
156 | 156 | gr_name = repo_group.group_name |
|
157 | 157 | |
|
158 | 158 | # create repositories with write permission on group is set to true |
|
159 | 159 | create_on_write = HasPermissionAny( |
|
160 | 160 | 'hg.create.write_on_repogroup.true')(user=request_user) |
|
161 | 161 | |
|
162 | 162 | group_admin = HasRepoGroupPermissionAny('group.admin')( |
|
163 | 163 | gr_name, 'can write into group validator', user=request_user) |
|
164 | 164 | group_write = HasRepoGroupPermissionAny('group.write')( |
|
165 | 165 | gr_name, 'can write into group validator', user=request_user) |
|
166 | 166 | |
|
167 | 167 | forbidden = not (group_admin or (group_write and create_on_write)) |
|
168 | 168 | |
|
169 | 169 | # TODO: handling of old values, and detecting no-change in path |
|
170 | 170 | # to skip permission checks in such cases. This only needs to be |
|
171 | 171 | # implemented if we use this schema in forms as well |
|
172 | 172 | |
|
173 | 173 | # gid = (old_data['repo_group'].get('group_id') |
|
174 | 174 | # if (old_data and 'repo_group' in old_data) else None) |
|
175 | 175 | # value_changed = gid != safe_int(value) |
|
176 | 176 | # new = not old_data |
|
177 | 177 | |
|
178 | 178 | # do check if we changed the value, there's a case that someone got |
|
179 | 179 | # revoked write permissions to a repository, he still created, we |
|
180 | 180 | # don't need to check permission if he didn't change the value of |
|
181 | 181 | # groups in form box |
|
182 | 182 | # if value_changed or new: |
|
183 | 183 | # # parent group need to be existing |
|
184 | 184 | # TODO: ENDS HERE |
|
185 | 185 | |
|
186 | 186 | if repo_group and forbidden: |
|
187 | 187 | msg = messages['permission_denied'].format(value) |
|
188 | 188 | raise colander.Invalid(node, msg) |
|
189 | 189 | |
|
190 | 190 | return can_write_to_group_validator |
|
191 | 191 | |
|
192 | 192 | |
|
193 | 193 | @colander.deferred |
|
194 | 194 | def deferred_unique_name_validator(node, kw): |
|
195 | 195 | request_user = kw.get('user') |
|
196 | 196 | old_values = kw.get('old_values') or {} |
|
197 | 197 | |
|
198 | 198 | def unique_name_validator(node, value): |
|
199 | 199 | from rhodecode.model.db import Repository, RepoGroup |
|
200 | 200 | name_changed = value != old_values.get('repo_name') |
|
201 | 201 | |
|
202 | 202 | existing = Repository.get_by_repo_name(value) |
|
203 | 203 | if name_changed and existing: |
|
204 | 204 | msg = _(u'Repository with name `{}` already exists').format(value) |
|
205 | 205 | raise colander.Invalid(node, msg) |
|
206 | 206 | |
|
207 | 207 | existing_group = RepoGroup.get_by_group_name(value) |
|
208 | 208 | if name_changed and existing_group: |
|
209 | 209 | msg = _(u'Repository group with name `{}` already exists').format( |
|
210 | 210 | value) |
|
211 | 211 | raise colander.Invalid(node, msg) |
|
212 | 212 | return unique_name_validator |
|
213 | 213 | |
|
214 | 214 | |
|
215 | 215 | @colander.deferred |
|
216 | 216 | def deferred_repo_name_validator(node, kw): |
|
217 | 217 | def no_git_suffix_validator(node, value): |
|
218 | 218 | if value.endswith('.git'): |
|
219 | 219 | msg = _('Repository name cannot end with .git') |
|
220 | 220 | raise colander.Invalid(node, msg) |
|
221 | 221 | return colander.All( |
|
222 | 222 | no_git_suffix_validator, validators.valid_name_validator) |
|
223 | 223 | |
|
224 | 224 | |
|
225 | 225 | @colander.deferred |
|
226 | 226 | def deferred_repo_group_validator(node, kw): |
|
227 | 227 | options = kw.get( |
|
228 | 228 | 'repo_repo_group_options') |
|
229 | 229 | return colander.OneOf([x for x in options]) |
|
230 | 230 | |
|
231 | 231 | |
|
232 | 232 | @colander.deferred |
|
233 | 233 | def deferred_repo_group_widget(node, kw): |
|
234 | 234 | items = kw.get('repo_repo_group_items') |
|
235 | 235 | return deform.widget.Select2Widget(values=items) |
|
236 | 236 | |
|
237 | 237 | |
|
238 | 238 | class GroupType(colander.Mapping): |
|
239 | 239 | def _validate(self, node, value): |
|
240 | 240 | try: |
|
241 | 241 | return dict(repo_group_name=value) |
|
242 | 242 | except Exception as e: |
|
243 | 243 | raise colander.Invalid( |
|
244 | 244 | node, '"${val}" is not a mapping type: ${err}'.format( |
|
245 | 245 | val=value, err=e)) |
|
246 | 246 | |
|
247 | 247 | def deserialize(self, node, cstruct): |
|
248 | 248 | if cstruct is colander.null: |
|
249 | 249 | return cstruct |
|
250 | 250 | |
|
251 | 251 | appstruct = super(GroupType, self).deserialize(node, cstruct) |
|
252 | 252 | validated_name = appstruct['repo_group_name'] |
|
253 | 253 | |
|
254 | 254 | # inject group based on once deserialized data |
|
255 | 255 | (repo_name_without_group, |
|
256 | 256 | parent_group_name, |
|
257 | 257 | parent_group) = get_group_and_repo(validated_name) |
|
258 | 258 | |
|
259 | 259 | appstruct['repo_name_with_group'] = validated_name |
|
260 | 260 | appstruct['repo_name_without_group'] = repo_name_without_group |
|
261 | 261 | appstruct['repo_group_name'] = parent_group_name or types.RootLocation |
|
262 | 262 | |
|
263 | 263 | if parent_group: |
|
264 | 264 | appstruct['repo_group_id'] = parent_group.group_id |
|
265 | 265 | |
|
266 | 266 | return appstruct |
|
267 | 267 | |
|
268 | 268 | |
|
269 | 269 | class GroupSchema(colander.SchemaNode): |
|
270 | 270 | schema_type = GroupType |
|
271 | 271 | validator = deferred_can_write_to_group_validator |
|
272 | 272 | missing = colander.null |
|
273 | 273 | |
|
274 | 274 | |
|
275 | 275 | class RepoGroup(GroupSchema): |
|
276 | 276 | repo_group_name = colander.SchemaNode( |
|
277 | 277 | types.GroupNameType()) |
|
278 | 278 | repo_group_id = colander.SchemaNode( |
|
279 | 279 | colander.String(), missing=None) |
|
280 | 280 | repo_name_without_group = colander.SchemaNode( |
|
281 | 281 | colander.String(), missing=None) |
|
282 | 282 | |
|
283 | 283 | |
|
284 | 284 | class RepoGroupAccessSchema(colander.MappingSchema): |
|
285 | 285 | repo_group = RepoGroup() |
|
286 | 286 | |
|
287 | 287 | |
|
288 | 288 | class RepoNameUniqueSchema(colander.MappingSchema): |
|
289 | 289 | unique_repo_name = colander.SchemaNode( |
|
290 | 290 | colander.String(), |
|
291 | 291 | validator=deferred_unique_name_validator) |
|
292 | 292 | |
|
293 | 293 | |
|
294 | 294 | class RepoSchema(colander.MappingSchema): |
|
295 | 295 | |
|
296 | 296 | repo_name = colander.SchemaNode( |
|
297 | 297 | types.RepoNameType(), |
|
298 | 298 | validator=deferred_repo_name_validator) |
|
299 | 299 | |
|
300 | 300 | repo_type = colander.SchemaNode( |
|
301 | 301 | colander.String(), |
|
302 | 302 | validator=deferred_repo_type_validator) |
|
303 | 303 | |
|
304 | 304 | repo_owner = colander.SchemaNode( |
|
305 | 305 | colander.String(), |
|
306 | 306 | validator=deferred_repo_owner_validator, |
|
307 | 307 | widget=deform.widget.TextInputWidget()) |
|
308 | 308 | |
|
309 | 309 | repo_description = colander.SchemaNode( |
|
310 | 310 | colander.String(), missing='', |
|
311 | 311 | widget=deform.widget.TextAreaWidget()) |
|
312 | 312 | |
|
313 | 313 | repo_landing_commit_ref = colander.SchemaNode( |
|
314 | 314 | colander.String(), |
|
315 | 315 | validator=deferred_landing_ref_validator, |
|
316 | 316 | preparers=[preparers.strip_preparer], |
|
317 | 317 | missing=DEFAULT_LANDING_REF, |
|
318 | 318 | widget=deferred_landing_ref_widget) |
|
319 | 319 | |
|
320 | 320 | repo_clone_uri = colander.SchemaNode( |
|
321 | 321 | colander.String(), |
|
322 | 322 | validator=colander.All(colander.Length(min=1)), |
|
323 | 323 | preparers=[preparers.strip_preparer], |
|
324 | 324 | missing='') |
|
325 | 325 | |
|
326 | repo_push_uri = colander.SchemaNode( | |
|
327 | colander.String(), | |
|
328 | validator=colander.All(colander.Length(min=1)), | |
|
329 | preparers=[preparers.strip_preparer], | |
|
330 | missing='') | |
|
331 | ||
|
326 | 332 | repo_fork_of = colander.SchemaNode( |
|
327 | 333 | colander.String(), |
|
328 | 334 | validator=deferred_fork_of_validator, |
|
329 | 335 | missing=None) |
|
330 | 336 | |
|
331 | 337 | repo_private = colander.SchemaNode( |
|
332 | 338 | types.StringBooleanType(), |
|
333 | 339 | missing=False, widget=deform.widget.CheckboxWidget()) |
|
334 | 340 | repo_copy_permissions = colander.SchemaNode( |
|
335 | 341 | types.StringBooleanType(), |
|
336 | 342 | missing=False, widget=deform.widget.CheckboxWidget()) |
|
337 | 343 | repo_enable_statistics = colander.SchemaNode( |
|
338 | 344 | types.StringBooleanType(), |
|
339 | 345 | missing=False, widget=deform.widget.CheckboxWidget()) |
|
340 | 346 | repo_enable_downloads = colander.SchemaNode( |
|
341 | 347 | types.StringBooleanType(), |
|
342 | 348 | missing=False, widget=deform.widget.CheckboxWidget()) |
|
343 | 349 | repo_enable_locking = colander.SchemaNode( |
|
344 | 350 | types.StringBooleanType(), |
|
345 | 351 | missing=False, widget=deform.widget.CheckboxWidget()) |
|
346 | 352 | |
|
347 | 353 | def deserialize(self, cstruct): |
|
348 | 354 | """ |
|
349 | 355 | Custom deserialize that allows to chain validation, and verify |
|
350 | 356 | permissions, and as last step uniqueness |
|
351 | 357 | """ |
|
352 | 358 | |
|
353 | 359 | # first pass, to validate given data |
|
354 | 360 | appstruct = super(RepoSchema, self).deserialize(cstruct) |
|
355 | 361 | validated_name = appstruct['repo_name'] |
|
356 | 362 | |
|
357 | 363 | # second pass to validate permissions to repo_group |
|
358 | 364 | second = RepoGroupAccessSchema().bind(**self.bindings) |
|
359 | 365 | appstruct_second = second.deserialize({'repo_group': validated_name}) |
|
360 | 366 | # save result |
|
361 | 367 | appstruct['repo_group'] = appstruct_second['repo_group'] |
|
362 | 368 | |
|
363 | 369 | # thirds to validate uniqueness |
|
364 | 370 | third = RepoNameUniqueSchema().bind(**self.bindings) |
|
365 | 371 | third.deserialize({'unique_repo_name': validated_name}) |
|
366 | 372 | |
|
367 | 373 | return appstruct |
|
368 | 374 | |
|
369 | 375 | |
|
370 | 376 | class RepoSettingsSchema(RepoSchema): |
|
371 | 377 | repo_group = colander.SchemaNode( |
|
372 | 378 | colander.Integer(), |
|
373 | 379 | validator=deferred_repo_group_validator, |
|
374 | 380 | widget=deferred_repo_group_widget, |
|
375 | 381 | missing='') |
|
376 | 382 | |
|
377 | 383 | repo_clone_uri_change = colander.SchemaNode( |
|
378 | 384 | colander.String(), |
|
379 | 385 | missing='NEW') |
|
380 | 386 | |
|
381 | 387 | repo_clone_uri = colander.SchemaNode( |
|
382 | 388 | colander.String(), |
|
383 | 389 | preparers=[preparers.strip_preparer], |
|
384 |
validator=deferred_c |
|
|
390 | validator=deferred_sync_uri_validator, | |
|
391 | missing='') | |
|
392 | ||
|
393 | repo_push_uri_change = colander.SchemaNode( | |
|
394 | colander.String(), | |
|
395 | missing='NEW') | |
|
396 | ||
|
397 | repo_push_uri = colander.SchemaNode( | |
|
398 | colander.String(), | |
|
399 | preparers=[preparers.strip_preparer], | |
|
400 | validator=deferred_sync_uri_validator, | |
|
385 | 401 | missing='') |
|
386 | 402 | |
|
387 | 403 | def deserialize(self, cstruct): |
|
388 | 404 | """ |
|
389 | 405 | Custom deserialize that allows to chain validation, and verify |
|
390 | 406 | permissions, and as last step uniqueness |
|
391 | 407 | """ |
|
392 | 408 | |
|
393 | 409 | # first pass, to validate given data |
|
394 | 410 | appstruct = super(RepoSchema, self).deserialize(cstruct) |
|
395 | 411 | validated_name = appstruct['repo_name'] |
|
396 | 412 | # because of repoSchema adds repo-group as an ID, we inject it as |
|
397 | 413 | # full name here because validators require it, it's unwrapped later |
|
398 | 414 | # so it's safe to use and final name is going to be without group anyway |
|
399 | 415 | |
|
400 | 416 | group, separator = get_repo_group(appstruct['repo_group']) |
|
401 | 417 | if group: |
|
402 | 418 | validated_name = separator.join([group.group_name, validated_name]) |
|
403 | 419 | |
|
404 | 420 | # second pass to validate permissions to repo_group |
|
405 | 421 | second = RepoGroupAccessSchema().bind(**self.bindings) |
|
406 | 422 | appstruct_second = second.deserialize({'repo_group': validated_name}) |
|
407 | 423 | # save result |
|
408 | 424 | appstruct['repo_group'] = appstruct_second['repo_group'] |
|
409 | 425 | |
|
410 | 426 | # thirds to validate uniqueness |
|
411 | 427 | third = RepoNameUniqueSchema().bind(**self.bindings) |
|
412 | 428 | third.deserialize({'unique_repo_name': validated_name}) |
|
413 | 429 | |
|
414 | 430 | return appstruct |
@@ -1,319 +1,320 b'' | |||
|
1 | 1 | |
|
2 | 2 | /****************************************************************************** |
|
3 | 3 | * * |
|
4 | 4 | * DO NOT CHANGE THIS FILE MANUALLY * |
|
5 | 5 | * * |
|
6 | 6 | * * |
|
7 | 7 | * This file is automatically generated when the app starts up with * |
|
8 | 8 | * generate_js_files = true * |
|
9 | 9 | * * |
|
10 | 10 | * To add a route here pass jsroute=True to the route definition in the app * |
|
11 | 11 | * * |
|
12 | 12 | ******************************************************************************/ |
|
13 | 13 | function registerRCRoutes() { |
|
14 | 14 | // routes registration |
|
15 | 15 | pyroutes.register('favicon', '/favicon.ico', []); |
|
16 | 16 | pyroutes.register('robots', '/robots.txt', []); |
|
17 | 17 | pyroutes.register('auth_home', '/_admin/auth*traverse', []); |
|
18 | 18 | pyroutes.register('global_integrations_new', '/_admin/integrations/new', []); |
|
19 | 19 | pyroutes.register('global_integrations_home', '/_admin/integrations', []); |
|
20 | 20 | pyroutes.register('global_integrations_list', '/_admin/integrations/%(integration)s', ['integration']); |
|
21 | 21 | pyroutes.register('global_integrations_create', '/_admin/integrations/%(integration)s/new', ['integration']); |
|
22 | 22 | pyroutes.register('global_integrations_edit', '/_admin/integrations/%(integration)s/%(integration_id)s', ['integration', 'integration_id']); |
|
23 | 23 | pyroutes.register('repo_group_integrations_home', '/%(repo_group_name)s/_settings/integrations', ['repo_group_name']); |
|
24 | 24 | pyroutes.register('repo_group_integrations_new', '/%(repo_group_name)s/_settings/integrations/new', ['repo_group_name']); |
|
25 | 25 | pyroutes.register('repo_group_integrations_list', '/%(repo_group_name)s/_settings/integrations/%(integration)s', ['repo_group_name', 'integration']); |
|
26 | 26 | pyroutes.register('repo_group_integrations_create', '/%(repo_group_name)s/_settings/integrations/%(integration)s/new', ['repo_group_name', 'integration']); |
|
27 | 27 | pyroutes.register('repo_group_integrations_edit', '/%(repo_group_name)s/_settings/integrations/%(integration)s/%(integration_id)s', ['repo_group_name', 'integration', 'integration_id']); |
|
28 | 28 | pyroutes.register('repo_integrations_home', '/%(repo_name)s/settings/integrations', ['repo_name']); |
|
29 | 29 | pyroutes.register('repo_integrations_new', '/%(repo_name)s/settings/integrations/new', ['repo_name']); |
|
30 | 30 | pyroutes.register('repo_integrations_list', '/%(repo_name)s/settings/integrations/%(integration)s', ['repo_name', 'integration']); |
|
31 | 31 | pyroutes.register('repo_integrations_create', '/%(repo_name)s/settings/integrations/%(integration)s/new', ['repo_name', 'integration']); |
|
32 | 32 | pyroutes.register('repo_integrations_edit', '/%(repo_name)s/settings/integrations/%(integration)s/%(integration_id)s', ['repo_name', 'integration', 'integration_id']); |
|
33 | 33 | pyroutes.register('ops_ping', '/_admin/ops/ping', []); |
|
34 | 34 | pyroutes.register('ops_error_test', '/_admin/ops/error', []); |
|
35 | 35 | pyroutes.register('ops_redirect_test', '/_admin/ops/redirect', []); |
|
36 | 36 | pyroutes.register('ops_ping_legacy', '/_admin/ping', []); |
|
37 | 37 | pyroutes.register('ops_error_test_legacy', '/_admin/error_test', []); |
|
38 | 38 | pyroutes.register('admin_home', '/_admin', []); |
|
39 | 39 | pyroutes.register('admin_audit_logs', '/_admin/audit_logs', []); |
|
40 | 40 | pyroutes.register('admin_audit_log_entry', '/_admin/audit_logs/%(audit_log_id)s', ['audit_log_id']); |
|
41 | 41 | pyroutes.register('pull_requests_global_0', '/_admin/pull_requests/%(pull_request_id)s', ['pull_request_id']); |
|
42 | 42 | pyroutes.register('pull_requests_global_1', '/_admin/pull-requests/%(pull_request_id)s', ['pull_request_id']); |
|
43 | 43 | pyroutes.register('pull_requests_global', '/_admin/pull-request/%(pull_request_id)s', ['pull_request_id']); |
|
44 | 44 | pyroutes.register('admin_settings_open_source', '/_admin/settings/open_source', []); |
|
45 | 45 | pyroutes.register('admin_settings_vcs_svn_generate_cfg', '/_admin/settings/vcs/svn_generate_cfg', []); |
|
46 | 46 | pyroutes.register('admin_settings_system', '/_admin/settings/system', []); |
|
47 | 47 | pyroutes.register('admin_settings_system_update', '/_admin/settings/system/updates', []); |
|
48 | 48 | pyroutes.register('admin_settings_sessions', '/_admin/settings/sessions', []); |
|
49 | 49 | pyroutes.register('admin_settings_sessions_cleanup', '/_admin/settings/sessions/cleanup', []); |
|
50 | 50 | pyroutes.register('admin_settings_process_management', '/_admin/settings/process_management', []); |
|
51 | 51 | pyroutes.register('admin_settings_process_management_data', '/_admin/settings/process_management/data', []); |
|
52 | 52 | pyroutes.register('admin_settings_process_management_signal', '/_admin/settings/process_management/signal', []); |
|
53 | 53 | pyroutes.register('admin_settings_process_management_master_signal', '/_admin/settings/process_management/master_signal', []); |
|
54 | 54 | pyroutes.register('admin_defaults_repositories', '/_admin/defaults/repositories', []); |
|
55 | 55 | pyroutes.register('admin_defaults_repositories_update', '/_admin/defaults/repositories/update', []); |
|
56 | 56 | pyroutes.register('admin_settings', '/_admin/settings', []); |
|
57 | 57 | pyroutes.register('admin_settings_update', '/_admin/settings/update', []); |
|
58 | 58 | pyroutes.register('admin_settings_global', '/_admin/settings/global', []); |
|
59 | 59 | pyroutes.register('admin_settings_global_update', '/_admin/settings/global/update', []); |
|
60 | 60 | pyroutes.register('admin_settings_vcs', '/_admin/settings/vcs', []); |
|
61 | 61 | pyroutes.register('admin_settings_vcs_update', '/_admin/settings/vcs/update', []); |
|
62 | 62 | pyroutes.register('admin_settings_vcs_svn_pattern_delete', '/_admin/settings/vcs/svn_pattern_delete', []); |
|
63 | 63 | pyroutes.register('admin_settings_mapping', '/_admin/settings/mapping', []); |
|
64 | 64 | pyroutes.register('admin_settings_mapping_update', '/_admin/settings/mapping/update', []); |
|
65 | 65 | pyroutes.register('admin_settings_visual', '/_admin/settings/visual', []); |
|
66 | 66 | pyroutes.register('admin_settings_visual_update', '/_admin/settings/visual/update', []); |
|
67 | 67 | pyroutes.register('admin_settings_issuetracker', '/_admin/settings/issue-tracker', []); |
|
68 | 68 | pyroutes.register('admin_settings_issuetracker_update', '/_admin/settings/issue-tracker/update', []); |
|
69 | 69 | pyroutes.register('admin_settings_issuetracker_test', '/_admin/settings/issue-tracker/test', []); |
|
70 | 70 | pyroutes.register('admin_settings_issuetracker_delete', '/_admin/settings/issue-tracker/delete', []); |
|
71 | 71 | pyroutes.register('admin_settings_email', '/_admin/settings/email', []); |
|
72 | 72 | pyroutes.register('admin_settings_email_update', '/_admin/settings/email/update', []); |
|
73 | 73 | pyroutes.register('admin_settings_hooks', '/_admin/settings/hooks', []); |
|
74 | 74 | pyroutes.register('admin_settings_hooks_update', '/_admin/settings/hooks/update', []); |
|
75 | 75 | pyroutes.register('admin_settings_hooks_delete', '/_admin/settings/hooks/delete', []); |
|
76 | 76 | pyroutes.register('admin_settings_search', '/_admin/settings/search', []); |
|
77 | 77 | pyroutes.register('admin_settings_labs', '/_admin/settings/labs', []); |
|
78 | 78 | pyroutes.register('admin_settings_labs_update', '/_admin/settings/labs/update', []); |
|
79 | 79 | pyroutes.register('admin_permissions_application', '/_admin/permissions/application', []); |
|
80 | 80 | pyroutes.register('admin_permissions_application_update', '/_admin/permissions/application/update', []); |
|
81 | 81 | pyroutes.register('admin_permissions_global', '/_admin/permissions/global', []); |
|
82 | 82 | pyroutes.register('admin_permissions_global_update', '/_admin/permissions/global/update', []); |
|
83 | 83 | pyroutes.register('admin_permissions_object', '/_admin/permissions/object', []); |
|
84 | 84 | pyroutes.register('admin_permissions_object_update', '/_admin/permissions/object/update', []); |
|
85 | 85 | pyroutes.register('admin_permissions_ips', '/_admin/permissions/ips', []); |
|
86 | 86 | pyroutes.register('admin_permissions_overview', '/_admin/permissions/overview', []); |
|
87 | 87 | pyroutes.register('admin_permissions_auth_token_access', '/_admin/permissions/auth_token_access', []); |
|
88 | 88 | pyroutes.register('admin_permissions_ssh_keys', '/_admin/permissions/ssh_keys', []); |
|
89 | 89 | pyroutes.register('admin_permissions_ssh_keys_data', '/_admin/permissions/ssh_keys/data', []); |
|
90 | 90 | pyroutes.register('admin_permissions_ssh_keys_update', '/_admin/permissions/ssh_keys/update', []); |
|
91 | 91 | pyroutes.register('users', '/_admin/users', []); |
|
92 | 92 | pyroutes.register('users_data', '/_admin/users_data', []); |
|
93 | 93 | pyroutes.register('users_create', '/_admin/users/create', []); |
|
94 | 94 | pyroutes.register('users_new', '/_admin/users/new', []); |
|
95 | 95 | pyroutes.register('user_edit', '/_admin/users/%(user_id)s/edit', ['user_id']); |
|
96 | 96 | pyroutes.register('user_edit_advanced', '/_admin/users/%(user_id)s/edit/advanced', ['user_id']); |
|
97 | 97 | pyroutes.register('user_edit_global_perms', '/_admin/users/%(user_id)s/edit/global_permissions', ['user_id']); |
|
98 | 98 | pyroutes.register('user_edit_global_perms_update', '/_admin/users/%(user_id)s/edit/global_permissions/update', ['user_id']); |
|
99 | 99 | pyroutes.register('user_update', '/_admin/users/%(user_id)s/update', ['user_id']); |
|
100 | 100 | pyroutes.register('user_delete', '/_admin/users/%(user_id)s/delete', ['user_id']); |
|
101 | 101 | pyroutes.register('user_force_password_reset', '/_admin/users/%(user_id)s/password_reset', ['user_id']); |
|
102 | 102 | pyroutes.register('user_create_personal_repo_group', '/_admin/users/%(user_id)s/create_repo_group', ['user_id']); |
|
103 | 103 | pyroutes.register('edit_user_auth_tokens', '/_admin/users/%(user_id)s/edit/auth_tokens', ['user_id']); |
|
104 | 104 | pyroutes.register('edit_user_auth_tokens_add', '/_admin/users/%(user_id)s/edit/auth_tokens/new', ['user_id']); |
|
105 | 105 | pyroutes.register('edit_user_auth_tokens_delete', '/_admin/users/%(user_id)s/edit/auth_tokens/delete', ['user_id']); |
|
106 | 106 | pyroutes.register('edit_user_ssh_keys', '/_admin/users/%(user_id)s/edit/ssh_keys', ['user_id']); |
|
107 | 107 | pyroutes.register('edit_user_ssh_keys_generate_keypair', '/_admin/users/%(user_id)s/edit/ssh_keys/generate', ['user_id']); |
|
108 | 108 | pyroutes.register('edit_user_ssh_keys_add', '/_admin/users/%(user_id)s/edit/ssh_keys/new', ['user_id']); |
|
109 | 109 | pyroutes.register('edit_user_ssh_keys_delete', '/_admin/users/%(user_id)s/edit/ssh_keys/delete', ['user_id']); |
|
110 | 110 | pyroutes.register('edit_user_emails', '/_admin/users/%(user_id)s/edit/emails', ['user_id']); |
|
111 | 111 | pyroutes.register('edit_user_emails_add', '/_admin/users/%(user_id)s/edit/emails/new', ['user_id']); |
|
112 | 112 | pyroutes.register('edit_user_emails_delete', '/_admin/users/%(user_id)s/edit/emails/delete', ['user_id']); |
|
113 | 113 | pyroutes.register('edit_user_ips', '/_admin/users/%(user_id)s/edit/ips', ['user_id']); |
|
114 | 114 | pyroutes.register('edit_user_ips_add', '/_admin/users/%(user_id)s/edit/ips/new', ['user_id']); |
|
115 | 115 | pyroutes.register('edit_user_ips_delete', '/_admin/users/%(user_id)s/edit/ips/delete', ['user_id']); |
|
116 | 116 | pyroutes.register('edit_user_perms_summary', '/_admin/users/%(user_id)s/edit/permissions_summary', ['user_id']); |
|
117 | 117 | pyroutes.register('edit_user_perms_summary_json', '/_admin/users/%(user_id)s/edit/permissions_summary/json', ['user_id']); |
|
118 | 118 | pyroutes.register('edit_user_groups_management', '/_admin/users/%(user_id)s/edit/groups_management', ['user_id']); |
|
119 | 119 | pyroutes.register('edit_user_groups_management_updates', '/_admin/users/%(user_id)s/edit/edit_user_groups_management/updates', ['user_id']); |
|
120 | 120 | pyroutes.register('edit_user_audit_logs', '/_admin/users/%(user_id)s/edit/audit', ['user_id']); |
|
121 | 121 | pyroutes.register('user_groups', '/_admin/user_groups', []); |
|
122 | 122 | pyroutes.register('user_groups_data', '/_admin/user_groups_data', []); |
|
123 | 123 | pyroutes.register('user_groups_new', '/_admin/user_groups/new', []); |
|
124 | 124 | pyroutes.register('user_groups_create', '/_admin/user_groups/create', []); |
|
125 | 125 | pyroutes.register('repos', '/_admin/repos', []); |
|
126 | 126 | pyroutes.register('repo_new', '/_admin/repos/new', []); |
|
127 | 127 | pyroutes.register('repo_create', '/_admin/repos/create', []); |
|
128 | 128 | pyroutes.register('repo_groups', '/_admin/repo_groups', []); |
|
129 | 129 | pyroutes.register('repo_group_new', '/_admin/repo_group/new', []); |
|
130 | 130 | pyroutes.register('repo_group_create', '/_admin/repo_group/create', []); |
|
131 | 131 | pyroutes.register('channelstream_connect', '/_admin/channelstream/connect', []); |
|
132 | 132 | pyroutes.register('channelstream_subscribe', '/_admin/channelstream/subscribe', []); |
|
133 | 133 | pyroutes.register('channelstream_proxy', '/_channelstream', []); |
|
134 | 134 | pyroutes.register('login', '/_admin/login', []); |
|
135 | 135 | pyroutes.register('logout', '/_admin/logout', []); |
|
136 | 136 | pyroutes.register('register', '/_admin/register', []); |
|
137 | 137 | pyroutes.register('reset_password', '/_admin/password_reset', []); |
|
138 | 138 | pyroutes.register('reset_password_confirmation', '/_admin/password_reset_confirmation', []); |
|
139 | 139 | pyroutes.register('home', '/', []); |
|
140 | 140 | pyroutes.register('user_autocomplete_data', '/_users', []); |
|
141 | 141 | pyroutes.register('user_group_autocomplete_data', '/_user_groups', []); |
|
142 | 142 | pyroutes.register('repo_list_data', '/_repos', []); |
|
143 | 143 | pyroutes.register('goto_switcher_data', '/_goto_data', []); |
|
144 | 144 | pyroutes.register('journal', '/_admin/journal', []); |
|
145 | 145 | pyroutes.register('journal_rss', '/_admin/journal/rss', []); |
|
146 | 146 | pyroutes.register('journal_atom', '/_admin/journal/atom', []); |
|
147 | 147 | pyroutes.register('journal_public', '/_admin/public_journal', []); |
|
148 | 148 | pyroutes.register('journal_public_atom', '/_admin/public_journal/atom', []); |
|
149 | 149 | pyroutes.register('journal_public_atom_old', '/_admin/public_journal_atom', []); |
|
150 | 150 | pyroutes.register('journal_public_rss', '/_admin/public_journal/rss', []); |
|
151 | 151 | pyroutes.register('journal_public_rss_old', '/_admin/public_journal_rss', []); |
|
152 | 152 | pyroutes.register('toggle_following', '/_admin/toggle_following', []); |
|
153 | 153 | pyroutes.register('repo_creating', '/%(repo_name)s/repo_creating', ['repo_name']); |
|
154 | 154 | pyroutes.register('repo_creating_check', '/%(repo_name)s/repo_creating_check', ['repo_name']); |
|
155 | 155 | pyroutes.register('repo_summary_explicit', '/%(repo_name)s/summary', ['repo_name']); |
|
156 | 156 | pyroutes.register('repo_summary_commits', '/%(repo_name)s/summary-commits', ['repo_name']); |
|
157 | 157 | pyroutes.register('repo_commit', '/%(repo_name)s/changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
158 | 158 | pyroutes.register('repo_commit_children', '/%(repo_name)s/changeset_children/%(commit_id)s', ['repo_name', 'commit_id']); |
|
159 | 159 | pyroutes.register('repo_commit_parents', '/%(repo_name)s/changeset_parents/%(commit_id)s', ['repo_name', 'commit_id']); |
|
160 | 160 | pyroutes.register('repo_commit_raw', '/%(repo_name)s/changeset-diff/%(commit_id)s', ['repo_name', 'commit_id']); |
|
161 | 161 | pyroutes.register('repo_commit_patch', '/%(repo_name)s/changeset-patch/%(commit_id)s', ['repo_name', 'commit_id']); |
|
162 | 162 | pyroutes.register('repo_commit_download', '/%(repo_name)s/changeset-download/%(commit_id)s', ['repo_name', 'commit_id']); |
|
163 | 163 | pyroutes.register('repo_commit_data', '/%(repo_name)s/changeset-data/%(commit_id)s', ['repo_name', 'commit_id']); |
|
164 | 164 | pyroutes.register('repo_commit_comment_create', '/%(repo_name)s/changeset/%(commit_id)s/comment/create', ['repo_name', 'commit_id']); |
|
165 | 165 | pyroutes.register('repo_commit_comment_preview', '/%(repo_name)s/changeset/%(commit_id)s/comment/preview', ['repo_name', 'commit_id']); |
|
166 | 166 | pyroutes.register('repo_commit_comment_delete', '/%(repo_name)s/changeset/%(commit_id)s/comment/%(comment_id)s/delete', ['repo_name', 'commit_id', 'comment_id']); |
|
167 | 167 | pyroutes.register('repo_commit_raw_deprecated', '/%(repo_name)s/raw-changeset/%(commit_id)s', ['repo_name', 'commit_id']); |
|
168 | 168 | pyroutes.register('repo_archivefile', '/%(repo_name)s/archive/%(fname)s', ['repo_name', 'fname']); |
|
169 | 169 | pyroutes.register('repo_files_diff', '/%(repo_name)s/diff/%(f_path)s', ['repo_name', 'f_path']); |
|
170 | 170 | pyroutes.register('repo_files_diff_2way_redirect', '/%(repo_name)s/diff-2way/%(f_path)s', ['repo_name', 'f_path']); |
|
171 | 171 | pyroutes.register('repo_files', '/%(repo_name)s/files/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
172 | 172 | pyroutes.register('repo_files:default_path', '/%(repo_name)s/files/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
173 | 173 | pyroutes.register('repo_files:default_commit', '/%(repo_name)s/files', ['repo_name']); |
|
174 | 174 | pyroutes.register('repo_files:rendered', '/%(repo_name)s/render/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
175 | 175 | pyroutes.register('repo_files:annotated', '/%(repo_name)s/annotate/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
176 | 176 | pyroutes.register('repo_files:annotated_previous', '/%(repo_name)s/annotate-previous/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
177 | 177 | pyroutes.register('repo_nodetree_full', '/%(repo_name)s/nodetree_full/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
178 | 178 | pyroutes.register('repo_nodetree_full:default_path', '/%(repo_name)s/nodetree_full/%(commit_id)s/', ['repo_name', 'commit_id']); |
|
179 | 179 | pyroutes.register('repo_files_nodelist', '/%(repo_name)s/nodelist/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
180 | 180 | pyroutes.register('repo_file_raw', '/%(repo_name)s/raw/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
181 | 181 | pyroutes.register('repo_file_download', '/%(repo_name)s/download/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
182 | 182 | pyroutes.register('repo_file_download:legacy', '/%(repo_name)s/rawfile/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
183 | 183 | pyroutes.register('repo_file_history', '/%(repo_name)s/history/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
184 | 184 | pyroutes.register('repo_file_authors', '/%(repo_name)s/authors/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
185 | 185 | pyroutes.register('repo_files_remove_file', '/%(repo_name)s/remove_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
186 | 186 | pyroutes.register('repo_files_delete_file', '/%(repo_name)s/delete_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
187 | 187 | pyroutes.register('repo_files_edit_file', '/%(repo_name)s/edit_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
188 | 188 | pyroutes.register('repo_files_update_file', '/%(repo_name)s/update_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
189 | 189 | pyroutes.register('repo_files_add_file', '/%(repo_name)s/add_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
190 | 190 | pyroutes.register('repo_files_create_file', '/%(repo_name)s/create_file/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
191 | 191 | pyroutes.register('repo_refs_data', '/%(repo_name)s/refs-data', ['repo_name']); |
|
192 | 192 | pyroutes.register('repo_refs_changelog_data', '/%(repo_name)s/refs-data-changelog', ['repo_name']); |
|
193 | 193 | pyroutes.register('repo_stats', '/%(repo_name)s/repo_stats/%(commit_id)s', ['repo_name', 'commit_id']); |
|
194 | 194 | pyroutes.register('repo_changelog', '/%(repo_name)s/changelog', ['repo_name']); |
|
195 | 195 | pyroutes.register('repo_changelog_file', '/%(repo_name)s/changelog/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
196 | 196 | pyroutes.register('repo_changelog_elements', '/%(repo_name)s/changelog_elements', ['repo_name']); |
|
197 | 197 | pyroutes.register('repo_changelog_elements_file', '/%(repo_name)s/changelog_elements/%(commit_id)s/%(f_path)s', ['repo_name', 'commit_id', 'f_path']); |
|
198 | 198 | pyroutes.register('repo_compare_select', '/%(repo_name)s/compare', ['repo_name']); |
|
199 | 199 | pyroutes.register('repo_compare', '/%(repo_name)s/compare/%(source_ref_type)s@%(source_ref)s...%(target_ref_type)s@%(target_ref)s', ['repo_name', 'source_ref_type', 'source_ref', 'target_ref_type', 'target_ref']); |
|
200 | 200 | pyroutes.register('tags_home', '/%(repo_name)s/tags', ['repo_name']); |
|
201 | 201 | pyroutes.register('branches_home', '/%(repo_name)s/branches', ['repo_name']); |
|
202 | 202 | pyroutes.register('bookmarks_home', '/%(repo_name)s/bookmarks', ['repo_name']); |
|
203 | 203 | pyroutes.register('repo_fork_new', '/%(repo_name)s/fork', ['repo_name']); |
|
204 | 204 | pyroutes.register('repo_fork_create', '/%(repo_name)s/fork/create', ['repo_name']); |
|
205 | 205 | pyroutes.register('repo_forks_show_all', '/%(repo_name)s/forks', ['repo_name']); |
|
206 | 206 | pyroutes.register('repo_forks_data', '/%(repo_name)s/forks/data', ['repo_name']); |
|
207 | 207 | pyroutes.register('pullrequest_show', '/%(repo_name)s/pull-request/%(pull_request_id)s', ['repo_name', 'pull_request_id']); |
|
208 | 208 | pyroutes.register('pullrequest_show_all', '/%(repo_name)s/pull-request', ['repo_name']); |
|
209 | 209 | pyroutes.register('pullrequest_show_all_data', '/%(repo_name)s/pull-request-data', ['repo_name']); |
|
210 | 210 | pyroutes.register('pullrequest_repo_refs', '/%(repo_name)s/pull-request/refs/%(target_repo_name)s', ['repo_name', 'target_repo_name']); |
|
211 | 211 | pyroutes.register('pullrequest_repo_destinations', '/%(repo_name)s/pull-request/repo-destinations', ['repo_name']); |
|
212 | 212 | pyroutes.register('pullrequest_new', '/%(repo_name)s/pull-request/new', ['repo_name']); |
|
213 | 213 | pyroutes.register('pullrequest_create', '/%(repo_name)s/pull-request/create', ['repo_name']); |
|
214 | 214 | pyroutes.register('pullrequest_update', '/%(repo_name)s/pull-request/%(pull_request_id)s/update', ['repo_name', 'pull_request_id']); |
|
215 | 215 | pyroutes.register('pullrequest_merge', '/%(repo_name)s/pull-request/%(pull_request_id)s/merge', ['repo_name', 'pull_request_id']); |
|
216 | 216 | pyroutes.register('pullrequest_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/delete', ['repo_name', 'pull_request_id']); |
|
217 | 217 | pyroutes.register('pullrequest_comment_create', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment', ['repo_name', 'pull_request_id']); |
|
218 | 218 | pyroutes.register('pullrequest_comment_delete', '/%(repo_name)s/pull-request/%(pull_request_id)s/comment/%(comment_id)s/delete', ['repo_name', 'pull_request_id', 'comment_id']); |
|
219 | 219 | pyroutes.register('edit_repo', '/%(repo_name)s/settings', ['repo_name']); |
|
220 | 220 | pyroutes.register('edit_repo_advanced', '/%(repo_name)s/settings/advanced', ['repo_name']); |
|
221 | 221 | pyroutes.register('edit_repo_advanced_delete', '/%(repo_name)s/settings/advanced/delete', ['repo_name']); |
|
222 | 222 | pyroutes.register('edit_repo_advanced_locking', '/%(repo_name)s/settings/advanced/locking', ['repo_name']); |
|
223 | 223 | pyroutes.register('edit_repo_advanced_journal', '/%(repo_name)s/settings/advanced/journal', ['repo_name']); |
|
224 | 224 | pyroutes.register('edit_repo_advanced_fork', '/%(repo_name)s/settings/advanced/fork', ['repo_name']); |
|
225 | 225 | pyroutes.register('edit_repo_caches', '/%(repo_name)s/settings/caches', ['repo_name']); |
|
226 | 226 | pyroutes.register('edit_repo_perms', '/%(repo_name)s/settings/permissions', ['repo_name']); |
|
227 | 227 | pyroutes.register('edit_repo_maintenance', '/%(repo_name)s/settings/maintenance', ['repo_name']); |
|
228 | 228 | pyroutes.register('edit_repo_maintenance_execute', '/%(repo_name)s/settings/maintenance/execute', ['repo_name']); |
|
229 | 229 | pyroutes.register('edit_repo_fields', '/%(repo_name)s/settings/fields', ['repo_name']); |
|
230 | 230 | pyroutes.register('edit_repo_fields_create', '/%(repo_name)s/settings/fields/create', ['repo_name']); |
|
231 | 231 | pyroutes.register('edit_repo_fields_delete', '/%(repo_name)s/settings/fields/%(field_id)s/delete', ['repo_name', 'field_id']); |
|
232 | 232 | pyroutes.register('repo_edit_toggle_locking', '/%(repo_name)s/settings/toggle_locking', ['repo_name']); |
|
233 | 233 | pyroutes.register('edit_repo_remote', '/%(repo_name)s/settings/remote', ['repo_name']); |
|
234 | 234 | pyroutes.register('edit_repo_remote_pull', '/%(repo_name)s/settings/remote/pull', ['repo_name']); |
|
235 | pyroutes.register('edit_repo_remote_push', '/%(repo_name)s/settings/remote/push', ['repo_name']); | |
|
235 | 236 | pyroutes.register('edit_repo_statistics', '/%(repo_name)s/settings/statistics', ['repo_name']); |
|
236 | 237 | pyroutes.register('edit_repo_statistics_reset', '/%(repo_name)s/settings/statistics/update', ['repo_name']); |
|
237 | 238 | pyroutes.register('edit_repo_issuetracker', '/%(repo_name)s/settings/issue_trackers', ['repo_name']); |
|
238 | 239 | pyroutes.register('edit_repo_issuetracker_test', '/%(repo_name)s/settings/issue_trackers/test', ['repo_name']); |
|
239 | 240 | pyroutes.register('edit_repo_issuetracker_delete', '/%(repo_name)s/settings/issue_trackers/delete', ['repo_name']); |
|
240 | 241 | pyroutes.register('edit_repo_issuetracker_update', '/%(repo_name)s/settings/issue_trackers/update', ['repo_name']); |
|
241 | 242 | pyroutes.register('edit_repo_vcs', '/%(repo_name)s/settings/vcs', ['repo_name']); |
|
242 | 243 | pyroutes.register('edit_repo_vcs_update', '/%(repo_name)s/settings/vcs/update', ['repo_name']); |
|
243 | 244 | pyroutes.register('edit_repo_vcs_svn_pattern_delete', '/%(repo_name)s/settings/vcs/svn_pattern/delete', ['repo_name']); |
|
244 | 245 | pyroutes.register('repo_reviewers', '/%(repo_name)s/settings/review/rules', ['repo_name']); |
|
245 | 246 | pyroutes.register('repo_default_reviewers_data', '/%(repo_name)s/settings/review/default-reviewers', ['repo_name']); |
|
246 | 247 | pyroutes.register('edit_repo_strip', '/%(repo_name)s/settings/strip', ['repo_name']); |
|
247 | 248 | pyroutes.register('strip_check', '/%(repo_name)s/settings/strip_check', ['repo_name']); |
|
248 | 249 | pyroutes.register('strip_execute', '/%(repo_name)s/settings/strip_execute', ['repo_name']); |
|
249 | 250 | pyroutes.register('edit_repo_audit_logs', '/%(repo_name)s/settings/audit_logs', ['repo_name']); |
|
250 | 251 | pyroutes.register('rss_feed_home', '/%(repo_name)s/feed/rss', ['repo_name']); |
|
251 | 252 | pyroutes.register('atom_feed_home', '/%(repo_name)s/feed/atom', ['repo_name']); |
|
252 | 253 | pyroutes.register('repo_summary', '/%(repo_name)s', ['repo_name']); |
|
253 | 254 | pyroutes.register('repo_summary_slash', '/%(repo_name)s/', ['repo_name']); |
|
254 | 255 | pyroutes.register('edit_repo_group', '/%(repo_group_name)s/_edit', ['repo_group_name']); |
|
255 | 256 | pyroutes.register('edit_repo_group_advanced', '/%(repo_group_name)s/_settings/advanced', ['repo_group_name']); |
|
256 | 257 | pyroutes.register('edit_repo_group_advanced_delete', '/%(repo_group_name)s/_settings/advanced/delete', ['repo_group_name']); |
|
257 | 258 | pyroutes.register('edit_repo_group_perms', '/%(repo_group_name)s/_settings/permissions', ['repo_group_name']); |
|
258 | 259 | pyroutes.register('edit_repo_group_perms_update', '/%(repo_group_name)s/_settings/permissions/update', ['repo_group_name']); |
|
259 | 260 | pyroutes.register('repo_group_home', '/%(repo_group_name)s', ['repo_group_name']); |
|
260 | 261 | pyroutes.register('repo_group_home_slash', '/%(repo_group_name)s/', ['repo_group_name']); |
|
261 | 262 | pyroutes.register('user_group_members_data', '/_admin/user_groups/%(user_group_id)s/members', ['user_group_id']); |
|
262 | 263 | pyroutes.register('edit_user_group_perms_summary', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary', ['user_group_id']); |
|
263 | 264 | pyroutes.register('edit_user_group_perms_summary_json', '/_admin/user_groups/%(user_group_id)s/edit/permissions_summary/json', ['user_group_id']); |
|
264 | 265 | pyroutes.register('edit_user_group', '/_admin/user_groups/%(user_group_id)s/edit', ['user_group_id']); |
|
265 | 266 | pyroutes.register('user_groups_update', '/_admin/user_groups/%(user_group_id)s/update', ['user_group_id']); |
|
266 | 267 | pyroutes.register('edit_user_group_global_perms', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions', ['user_group_id']); |
|
267 | 268 | pyroutes.register('edit_user_group_global_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/global_permissions/update', ['user_group_id']); |
|
268 | 269 | pyroutes.register('edit_user_group_perms', '/_admin/user_groups/%(user_group_id)s/edit/permissions', ['user_group_id']); |
|
269 | 270 | pyroutes.register('edit_user_group_perms_update', '/_admin/user_groups/%(user_group_id)s/edit/permissions/update', ['user_group_id']); |
|
270 | 271 | pyroutes.register('edit_user_group_advanced', '/_admin/user_groups/%(user_group_id)s/edit/advanced', ['user_group_id']); |
|
271 | 272 | pyroutes.register('edit_user_group_advanced_sync', '/_admin/user_groups/%(user_group_id)s/edit/advanced/sync', ['user_group_id']); |
|
272 | 273 | pyroutes.register('user_groups_delete', '/_admin/user_groups/%(user_group_id)s/delete', ['user_group_id']); |
|
273 | 274 | pyroutes.register('search', '/_admin/search', []); |
|
274 | 275 | pyroutes.register('search_repo', '/%(repo_name)s/search', ['repo_name']); |
|
275 | 276 | pyroutes.register('user_profile', '/_profiles/%(username)s', ['username']); |
|
276 | 277 | pyroutes.register('my_account_profile', '/_admin/my_account/profile', []); |
|
277 | 278 | pyroutes.register('my_account_edit', '/_admin/my_account/edit', []); |
|
278 | 279 | pyroutes.register('my_account_update', '/_admin/my_account/update', []); |
|
279 | 280 | pyroutes.register('my_account_password', '/_admin/my_account/password', []); |
|
280 | 281 | pyroutes.register('my_account_password_update', '/_admin/my_account/password/update', []); |
|
281 | 282 | pyroutes.register('my_account_auth_tokens', '/_admin/my_account/auth_tokens', []); |
|
282 | 283 | pyroutes.register('my_account_auth_tokens_add', '/_admin/my_account/auth_tokens/new', []); |
|
283 | 284 | pyroutes.register('my_account_auth_tokens_delete', '/_admin/my_account/auth_tokens/delete', []); |
|
284 | 285 | pyroutes.register('my_account_ssh_keys', '/_admin/my_account/ssh_keys', []); |
|
285 | 286 | pyroutes.register('my_account_ssh_keys_generate', '/_admin/my_account/ssh_keys/generate', []); |
|
286 | 287 | pyroutes.register('my_account_ssh_keys_add', '/_admin/my_account/ssh_keys/new', []); |
|
287 | 288 | pyroutes.register('my_account_ssh_keys_delete', '/_admin/my_account/ssh_keys/delete', []); |
|
288 | 289 | pyroutes.register('my_account_user_group_membership', '/_admin/my_account/user_group_membership', []); |
|
289 | 290 | pyroutes.register('my_account_emails', '/_admin/my_account/emails', []); |
|
290 | 291 | pyroutes.register('my_account_emails_add', '/_admin/my_account/emails/new', []); |
|
291 | 292 | pyroutes.register('my_account_emails_delete', '/_admin/my_account/emails/delete', []); |
|
292 | 293 | pyroutes.register('my_account_repos', '/_admin/my_account/repos', []); |
|
293 | 294 | pyroutes.register('my_account_watched', '/_admin/my_account/watched', []); |
|
294 | 295 | pyroutes.register('my_account_perms', '/_admin/my_account/perms', []); |
|
295 | 296 | pyroutes.register('my_account_notifications', '/_admin/my_account/notifications', []); |
|
296 | 297 | pyroutes.register('my_account_notifications_toggle_visibility', '/_admin/my_account/toggle_visibility', []); |
|
297 | 298 | pyroutes.register('my_account_pullrequests', '/_admin/my_account/pull_requests', []); |
|
298 | 299 | pyroutes.register('my_account_pullrequests_data', '/_admin/my_account/pull_requests/data', []); |
|
299 | 300 | pyroutes.register('notifications_show_all', '/_admin/notifications', []); |
|
300 | 301 | pyroutes.register('notifications_mark_all_read', '/_admin/notifications/mark_all_read', []); |
|
301 | 302 | pyroutes.register('notifications_show', '/_admin/notifications/%(notification_id)s', ['notification_id']); |
|
302 | 303 | pyroutes.register('notifications_update', '/_admin/notifications/%(notification_id)s/update', ['notification_id']); |
|
303 | 304 | pyroutes.register('notifications_delete', '/_admin/notifications/%(notification_id)s/delete', ['notification_id']); |
|
304 | 305 | pyroutes.register('my_account_notifications_test_channelstream', '/_admin/my_account/test_channelstream', []); |
|
305 | 306 | pyroutes.register('gists_show', '/_admin/gists', []); |
|
306 | 307 | pyroutes.register('gists_new', '/_admin/gists/new', []); |
|
307 | 308 | pyroutes.register('gists_create', '/_admin/gists/create', []); |
|
308 | 309 | pyroutes.register('gist_show', '/_admin/gists/%(gist_id)s', ['gist_id']); |
|
309 | 310 | pyroutes.register('gist_delete', '/_admin/gists/%(gist_id)s/delete', ['gist_id']); |
|
310 | 311 | pyroutes.register('gist_edit', '/_admin/gists/%(gist_id)s/edit', ['gist_id']); |
|
311 | 312 | pyroutes.register('gist_edit_check_revision', '/_admin/gists/%(gist_id)s/edit/check_revision', ['gist_id']); |
|
312 | 313 | pyroutes.register('gist_update', '/_admin/gists/%(gist_id)s/update', ['gist_id']); |
|
313 | 314 | pyroutes.register('gist_show_rev', '/_admin/gists/%(gist_id)s/%(revision)s', ['gist_id', 'revision']); |
|
314 | 315 | pyroutes.register('gist_show_formatted', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s', ['gist_id', 'revision', 'format']); |
|
315 | 316 | pyroutes.register('gist_show_formatted_path', '/_admin/gists/%(gist_id)s/%(revision)s/%(format)s/%(f_path)s', ['gist_id', 'revision', 'format', 'f_path']); |
|
316 | 317 | pyroutes.register('debug_style_home', '/_admin/debug_style', []); |
|
317 | 318 | pyroutes.register('debug_style_template', '/_admin/debug_style/t/%(t_path)s', ['t_path']); |
|
318 | 319 | pyroutes.register('apiv2', '/_admin/api', []); |
|
319 | 320 | } |
@@ -1,102 +1,102 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | ## |
|
3 | 3 | ## See also repo_settings.html |
|
4 | 4 | ## |
|
5 | 5 | <%inherit file="/base/base.mako"/> |
|
6 | 6 | |
|
7 | 7 | <%def name="title()"> |
|
8 | 8 | ${_('%s repository settings') % c.rhodecode_db_repo.repo_name} |
|
9 | 9 | %if c.rhodecode_name: |
|
10 | 10 | · ${h.branding(c.rhodecode_name)} |
|
11 | 11 | %endif |
|
12 | 12 | </%def> |
|
13 | 13 | |
|
14 | 14 | <%def name="breadcrumbs_links()"> |
|
15 | 15 | ${_('Settings')} |
|
16 | 16 | </%def> |
|
17 | 17 | |
|
18 | 18 | <%def name="menu_bar_nav()"> |
|
19 | 19 | ${self.menu_items(active='repositories')} |
|
20 | 20 | </%def> |
|
21 | 21 | |
|
22 | 22 | <%def name="menu_bar_subnav()"> |
|
23 | 23 | ${self.repo_menu(active='options')} |
|
24 | 24 | </%def> |
|
25 | 25 | |
|
26 | 26 | <%def name="main_content()"> |
|
27 | 27 | % if hasattr(c, 'repo_edit_template'): |
|
28 | 28 | <%include file="${c.repo_edit_template}"/> |
|
29 | 29 | % else: |
|
30 | 30 | <%include file="/admin/repos/repo_edit_${c.active}.mako"/> |
|
31 | 31 | % endif |
|
32 | 32 | </%def> |
|
33 | 33 | |
|
34 | 34 | |
|
35 | 35 | <%def name="main()"> |
|
36 | 36 | <div class="box"> |
|
37 | 37 | <div class="title"> |
|
38 | 38 | ${self.repo_page_title(c.rhodecode_db_repo)} |
|
39 | 39 | ${self.breadcrumbs()} |
|
40 | 40 | </div> |
|
41 | 41 | |
|
42 | 42 | <div class="sidebar-col-wrapper scw-small"> |
|
43 | 43 | <div class="sidebar"> |
|
44 | 44 | <ul class="nav nav-pills nav-stacked"> |
|
45 | 45 | <li class="${'active' if c.active=='settings' else ''}"> |
|
46 | 46 | <a href="${h.route_path('edit_repo', repo_name=c.repo_name)}">${_('Settings')}</a> |
|
47 | 47 | </li> |
|
48 | 48 | <li class="${'active' if c.active=='permissions' else ''}"> |
|
49 | 49 | <a href="${h.route_path('edit_repo_perms', repo_name=c.repo_name)}">${_('Permissions')}</a> |
|
50 | 50 | </li> |
|
51 | 51 | <li class="${'active' if c.active=='advanced' else ''}"> |
|
52 | 52 | <a href="${h.route_path('edit_repo_advanced', repo_name=c.repo_name)}">${_('Advanced')}</a> |
|
53 | 53 | </li> |
|
54 | 54 | <li class="${'active' if c.active=='vcs' else ''}"> |
|
55 | 55 | <a href="${h.route_path('edit_repo_vcs', repo_name=c.repo_name)}">${_('VCS')}</a> |
|
56 | 56 | </li> |
|
57 | 57 | <li class="${'active' if c.active=='fields' else ''}"> |
|
58 | 58 | <a href="${h.route_path('edit_repo_fields', repo_name=c.repo_name)}">${_('Extra Fields')}</a> |
|
59 | 59 | </li> |
|
60 | 60 | <li class="${'active' if c.active=='issuetracker' else ''}"> |
|
61 | 61 | <a href="${h.route_path('edit_repo_issuetracker', repo_name=c.repo_name)}">${_('Issue Tracker')}</a> |
|
62 | 62 | </li> |
|
63 | 63 | <li class="${'active' if c.active=='caches' else ''}"> |
|
64 | 64 | <a href="${h.route_path('edit_repo_caches', repo_name=c.repo_name)}">${_('Caches')}</a> |
|
65 | 65 | </li> |
|
66 | 66 | %if c.rhodecode_db_repo.repo_type != 'svn': |
|
67 | 67 | <li class="${'active' if c.active=='remote' else ''}"> |
|
68 | <a href="${h.route_path('edit_repo_remote', repo_name=c.repo_name)}">${_('Remote')}</a> | |
|
68 | <a href="${h.route_path('edit_repo_remote', repo_name=c.repo_name)}">${_('Remote sync')}</a> | |
|
69 | 69 | </li> |
|
70 | 70 | %endif |
|
71 | 71 | <li class="${'active' if c.active=='statistics' else ''}"> |
|
72 | 72 | <a href="${h.route_path('edit_repo_statistics', repo_name=c.repo_name)}">${_('Statistics')}</a> |
|
73 | 73 | </li> |
|
74 | 74 | <li class="${'active' if c.active=='integrations' else ''}"> |
|
75 | 75 | <a href="${h.route_path('repo_integrations_home', repo_name=c.repo_name)}">${_('Integrations')}</a> |
|
76 | 76 | </li> |
|
77 | 77 | %if c.rhodecode_db_repo.repo_type != 'svn': |
|
78 | 78 | <li class="${'active' if c.active=='reviewers' else ''}"> |
|
79 | 79 | <a href="${h.route_path('repo_reviewers', repo_name=c.repo_name)}">${_('Reviewer Rules')}</a> |
|
80 | 80 | </li> |
|
81 | 81 | %endif |
|
82 | 82 | <li class="${'active' if c.active=='maintenance' else ''}"> |
|
83 | 83 | <a href="${h.route_path('edit_repo_maintenance', repo_name=c.repo_name)}">${_('Maintenance')}</a> |
|
84 | 84 | </li> |
|
85 | 85 | <li class="${'active' if c.active=='strip' else ''}"> |
|
86 | 86 | <a href="${h.route_path('edit_repo_strip', repo_name=c.repo_name)}">${_('Strip')}</a> |
|
87 | 87 | </li> |
|
88 | 88 | <li class="${'active' if c.active=='audit' else ''}"> |
|
89 | 89 | <a href="${h.route_path('edit_repo_audit_logs', repo_name=c.repo_name)}">${_('Audit logs')}</a> |
|
90 | 90 | </li> |
|
91 | 91 | |
|
92 | 92 | </ul> |
|
93 | 93 | </div> |
|
94 | 94 | |
|
95 | 95 | <div class="main-content-full-width"> |
|
96 | 96 | ${self.main_content()} |
|
97 | 97 | </div> |
|
98 | 98 | |
|
99 | 99 | </div> |
|
100 | 100 | </div> |
|
101 | 101 | |
|
102 | 102 | </%def> No newline at end of file |
@@ -1,40 +1,69 b'' | |||
|
1 | 1 | <div class="panel panel-default"> |
|
2 | 2 | <div class="panel-heading"> |
|
3 |
<h3 class="panel-title">${_('Remote |
|
|
3 | <h3 class="panel-title">${_('Remote Sync')}</h3> | |
|
4 | 4 | </div> |
|
5 | 5 | <div class="panel-body"> |
|
6 | 6 | |
|
7 |
<h4>${_('Manually pull changes from external |
|
|
7 | <h4>${_('Manually pull/push changes from/to external URLs.')}</h4> | |
|
8 | 8 | |
|
9 | %if c.rhodecode_db_repo.clone_uri: | |
|
9 | %if c.rhodecode_db_repo.clone_uri or c.rhodecode_db_repo.push_uri: | |
|
10 | 10 | |
|
11 | ${_('Remote mirror url')}: | |
|
12 | <a href="${c.rhodecode_db_repo.clone_uri}">${c.rhodecode_db_repo.clone_uri_hidden}</a> | |
|
13 | ||
|
11 | <table> | |
|
12 | % if c.rhodecode_db_repo.clone_uri: | |
|
13 | <tr> | |
|
14 | <td><div style="min-width: 80px"><strong>${_('Pull url')}</strong></div></td> | |
|
15 | <td><a href="${c.rhodecode_db_repo.clone_uri}">${c.rhodecode_db_repo.clone_uri_hidden}</a></td> | |
|
16 | </tr> | |
|
17 | <tr> | |
|
18 | <td></td> | |
|
19 | <td> | |
|
14 | 20 | <p> |
|
15 | 21 | ${_('Pull can be automated by such api call. Can be called periodically in crontab etc.')} |
|
16 | 22 | <br/> |
|
17 | 23 | <code> |
|
18 | 24 | ${h.api_call_example(method='pull', args={"repoid": c.rhodecode_db_repo.repo_name})} |
|
19 | 25 | </code> |
|
20 | 26 | </p> |
|
21 | ||
|
27 | </td> | |
|
28 | </tr> | |
|
29 | <tr> | |
|
30 | <td></td> | |
|
31 | <td> | |
|
22 | 32 | ${h.secure_form(h.route_path('edit_repo_remote_pull', repo_name=c.repo_name), request=request)} |
|
23 | 33 | <div class="form"> |
|
24 | 34 | <div class="fields"> |
|
25 | 35 | ${h.submit('remote_pull_%s' % c.rhodecode_db_repo.repo_name,_('Pull changes from remote location'),class_="btn btn-small",onclick="return confirm('"+_('Confirm to pull changes from remote side')+"');")} |
|
26 | 36 | </div> |
|
27 | 37 | </div> |
|
28 | 38 | ${h.end_form()} |
|
39 | </td> | |
|
40 | </tr> | |
|
41 | % endif | |
|
42 | ||
|
43 | % if c.rhodecode_db_repo.push_uri: | |
|
44 | <tr> | |
|
45 | <td><div style="min-width: 80px"><strong>${_('Push url')}</strong></div></td> | |
|
46 | <td><a href="${c.rhodecode_db_repo.push_uri_hidden}">${c.rhodecode_db_repo.push_uri_hidden}</a></td> | |
|
47 | </tr> | |
|
48 | <tr> | |
|
49 | <td></td> | |
|
50 | <td> | |
|
51 | ${_('This feature is available in RhodeCode EE edition only. Contact {sales_email} to obtain a trial license.').format(sales_email='<a href="mailto:sales@rhodecode.com">sales@rhodecode.com</a>')|n} | |
|
52 | </td> | |
|
53 | </tr> | |
|
54 | % endif | |
|
55 | ||
|
56 | </table> | |
|
57 | ||
|
29 | 58 | %else: |
|
30 | 59 | |
|
31 | 60 | ${_('This repository does not have any remote mirror url set.')} |
|
32 | 61 | <a href="${h.route_path('edit_repo', repo_name=c.rhodecode_db_repo.repo_name)}">${_('Set remote url.')}</a> |
|
33 | 62 | <br/> |
|
34 | 63 | <br/> |
|
35 | 64 | <button class="btn disabled" type="submit" disabled="disabled"> |
|
36 | 65 | ${_('Pull changes from remote location')} |
|
37 | 66 | </button> |
|
38 | 67 | %endif |
|
39 | 68 | </div> |
|
40 | 69 | </div> |
@@ -1,266 +1,329 b'' | |||
|
1 | 1 | ## -*- coding: utf-8 -*- |
|
2 | 2 | <%namespace name="base" file="/base/base.mako"/> |
|
3 | 3 | |
|
4 | 4 | <div class="panel panel-default"> |
|
5 | 5 | <div class="panel-heading"> |
|
6 | 6 | <h3 class="panel-title">${_('Settings for Repository: %s') % c.rhodecode_db_repo.repo_name}</h3> |
|
7 | 7 | </div> |
|
8 | 8 | <div class="panel-body"> |
|
9 | 9 | ${h.secure_form(h.route_path('edit_repo', repo_name=c.rhodecode_db_repo.repo_name), request=request)} |
|
10 | 10 | <div class="form"> |
|
11 | 11 | <!-- fields --> |
|
12 | 12 | <div class="fields"> |
|
13 | 13 | |
|
14 | 14 | <div class="field"> |
|
15 | 15 | <div class="label"> |
|
16 | 16 | <label for="repo_name">${_('Name')}:</label> |
|
17 | 17 | </div> |
|
18 | 18 | <div class="input"> |
|
19 | 19 | ${c.form['repo_name'].render(css_class='medium', oid='repo_name')|n} |
|
20 | 20 | ${c.form.render_error(request, c.form['repo_name'])|n} |
|
21 | 21 | |
|
22 | 22 | <p class="help-block">${_('permalink id')}: `_${c.rhodecode_db_repo.repo_id}` <span><a href="#" onclick="$('#clone_id').toggle();return false">${_('what is that ?')}</a></span></p> |
|
23 | 23 | <p id="clone_id" style="display:none;"> |
|
24 | 24 | ${_('URL by id')}: `${c.rhodecode_db_repo.clone_url(with_id=True)}` <br/> |
|
25 | 25 | ${_('''In case this repository is renamed or moved into another group the repository url changes. |
|
26 | 26 | Using above url guarantees that this repository will always be accessible under such url. |
|
27 | 27 | Useful for CI systems, or any other cases that you need to hardcode the url into 3rd party service.''')}</p> |
|
28 | 28 | </div> |
|
29 | 29 | </div> |
|
30 | 30 | |
|
31 | 31 | <div class="field"> |
|
32 | 32 | <div class="label"> |
|
33 | 33 | <label for="repo_group">${_('Repository group')}:</label> |
|
34 | 34 | </div> |
|
35 | 35 | <div class="select"> |
|
36 | 36 | ${c.form['repo_group'].render(css_class='medium', oid='repo_group')|n} |
|
37 | 37 | ${c.form.render_error(request, c.form['repo_group'])|n} |
|
38 | 38 | |
|
39 | 39 | % if c.personal_repo_group: |
|
40 | 40 | <a class="btn" href="#" data-personal-group-name="${c.personal_repo_group.group_name}" data-personal-group-id="${c.personal_repo_group.group_id}" onclick="selectMyGroup(this); return false"> |
|
41 | 41 | ${_('Select my personal group (`%(repo_group_name)s`)') % {'repo_group_name': c.personal_repo_group.group_name}} |
|
42 | 42 | </a> |
|
43 | 43 | % endif |
|
44 | 44 | <p class="help-block">${_('Optional select a group to put this repository into.')}</p> |
|
45 | 45 | </div> |
|
46 | 46 | </div> |
|
47 | 47 | |
|
48 | 48 | % if c.rhodecode_db_repo.repo_type != 'svn': |
|
49 | <% sync_link = h.literal(h.link_to('remote sync', h.route_path('edit_repo_remote', repo_name=c.repo_name))) %> | |
|
49 | 50 | <div class="field"> |
|
50 | 51 | <div class="label"> |
|
51 | <label for="clone_uri">${_('Remote uri')}:</label> | |
|
52 | <label for="clone_uri">${_('Remote pull uri')}:</label> | |
|
52 | 53 | </div> |
|
53 | 54 | <div class="input"> |
|
54 | 55 | %if c.rhodecode_db_repo.clone_uri: |
|
55 | 56 | ## display, if we don't have any errors |
|
56 | 57 | % if not c.form['repo_clone_uri'].error: |
|
57 | 58 | <div id="clone_uri_hidden" class='text-as-placeholder'> |
|
58 | 59 | <span id="clone_uri_hidden_value">${c.rhodecode_db_repo.clone_uri_hidden}</span> |
|
59 | 60 | <span class="link" id="edit_clone_uri"><i class="icon-edit"></i>${_('edit')}</span> |
|
60 | 61 | </div> |
|
61 | 62 | % endif |
|
62 | 63 | |
|
63 | 64 | ## alter field |
|
64 | 65 | <div id="alter_clone_uri" style="${'' if c.form['repo_clone_uri'].error else 'display: none'}"> |
|
65 | 66 | ${c.form['repo_clone_uri'].render(css_class='medium', oid='clone_uri', placeholder=_('enter new value, or leave empty to remove'))|n} |
|
66 | 67 | ${c.form.render_error(request, c.form['repo_clone_uri'])|n} |
|
67 | 68 | % if c.form['repo_clone_uri'].error: |
|
68 | 69 | ## we got error from form subit, means we modify the url |
|
69 | 70 | ${h.hidden('repo_clone_uri_change', 'MOD')} |
|
70 | 71 | % else: |
|
71 | 72 | ${h.hidden('repo_clone_uri_change', 'OLD')} |
|
72 | 73 | % endif |
|
73 | 74 | |
|
74 | 75 | % if not c.form['repo_clone_uri'].error: |
|
75 | 76 | <span class="link" id="cancel_edit_clone_uri">${_('cancel')}</span> |
|
76 | 77 | % endif |
|
77 | 78 | |
|
78 | 79 | </div> |
|
79 | 80 | %else: |
|
80 | 81 | ## not set yet, display form to set it |
|
81 | 82 | ${c.form['repo_clone_uri'].render(css_class='medium', oid='clone_uri')|n} |
|
82 | 83 | ${c.form.render_error(request, c.form['repo_clone_uri'])|n} |
|
83 | 84 | ${h.hidden('repo_clone_uri_change', 'NEW')} |
|
84 | 85 | %endif |
|
85 | 86 | <p id="alter_clone_uri_help_block" class="help-block"> |
|
86 | <% pull_link = h.literal(h.link_to('remote sync', h.route_path('edit_repo_remote', repo_name=c.repo_name))) %> | |
|
87 | ${_('http[s] url where from repository was imported, this field can used for doing {pull_link}.').format(pull_link=pull_link)|n} <br/> | |
|
87 | ${_('http[s] url where from repository was imported. This field can used for doing {sync_link}.').format(sync_link=sync_link)|n} <br/> | |
|
88 | ${_('This field is stored encrypted inside Database, a format of http://user:password@server.com/repo_name can be used and will be hidden from display.')} | |
|
89 | </p> | |
|
90 | </div> | |
|
91 | </div> | |
|
92 | <div class="field"> | |
|
93 | <div class="label"> | |
|
94 | <label for="push_uri">${_('Remote push uri')}:</label> | |
|
95 | </div> | |
|
96 | <div class="input"> | |
|
97 | %if c.rhodecode_db_repo.push_uri: | |
|
98 | ## display, if we don't have any errors | |
|
99 | % if not c.form['repo_push_uri'].error: | |
|
100 | <div id="push_uri_hidden" class='text-as-placeholder'> | |
|
101 | <span id="push_uri_hidden_value">${c.rhodecode_db_repo.push_uri_hidden}</span> | |
|
102 | <span class="link" id="edit_push_uri"><i class="icon-edit"></i>${_('edit')}</span> | |
|
103 | </div> | |
|
104 | % endif | |
|
105 | ||
|
106 | ## alter field | |
|
107 | <div id="alter_push_uri" style="${'' if c.form['repo_push_uri'].error else 'display: none'}"> | |
|
108 | ${c.form['repo_push_uri'].render(css_class='medium', oid='push_uri', placeholder=_('enter new value, or leave empty to remove'))|n} | |
|
109 | ${c.form.render_error(request, c.form['repo_push_uri'])|n} | |
|
110 | % if c.form['repo_push_uri'].error: | |
|
111 | ## we got error from form subit, means we modify the url | |
|
112 | ${h.hidden('repo_push_uri_change', 'MOD')} | |
|
113 | % else: | |
|
114 | ${h.hidden('repo_push_uri_change', 'OLD')} | |
|
115 | % endif | |
|
116 | ||
|
117 | % if not c.form['repo_push_uri'].error: | |
|
118 | <span class="link" id="cancel_edit_push_uri">${_('cancel')}</span> | |
|
119 | % endif | |
|
120 | ||
|
121 | </div> | |
|
122 | %else: | |
|
123 | ## not set yet, display form to set it | |
|
124 | ${c.form['repo_push_uri'].render(css_class='medium', oid='push_uri')|n} | |
|
125 | ${c.form.render_error(request, c.form['repo_push_uri'])|n} | |
|
126 | ${h.hidden('repo_push_uri_change', 'NEW')} | |
|
127 | %endif | |
|
128 | <p id="alter_push_uri_help_block" class="help-block"> | |
|
129 | ${_('http[s] url to sync data back. This field can used for doing {sync_link}.').format(sync_link=sync_link)|n} <br/> | |
|
88 | 130 | ${_('This field is stored encrypted inside Database, a format of http://user:password@server.com/repo_name can be used and will be hidden from display.')} |
|
89 | 131 | </p> |
|
90 | 132 | </div> |
|
91 | 133 | </div> |
|
92 | 134 | % else: |
|
93 | 135 | ${h.hidden('repo_clone_uri', '')} |
|
136 | ${h.hidden('repo_push_uri', '')} | |
|
94 | 137 | % endif |
|
95 | 138 | |
|
96 | 139 | <div class="field"> |
|
97 | 140 | <div class="label"> |
|
98 | 141 | <label for="repo_landing_commit_ref">${_('Landing commit')}:</label> |
|
99 | 142 | </div> |
|
100 | 143 | <div class="select"> |
|
101 | 144 | ${c.form['repo_landing_commit_ref'].render(css_class='medium', oid='repo_landing_commit_ref')|n} |
|
102 | 145 | ${c.form.render_error(request, c.form['repo_landing_commit_ref'])|n} |
|
103 | 146 | <p class="help-block">${_('Default commit for files page, downloads, full text search index and readme')}</p> |
|
104 | 147 | </div> |
|
105 | 148 | </div> |
|
106 | 149 | |
|
107 | 150 | <div class="field badged-field"> |
|
108 | 151 | <div class="label"> |
|
109 | 152 | <label for="repo_owner">${_('Owner')}:</label> |
|
110 | 153 | </div> |
|
111 | 154 | <div class="input"> |
|
112 | 155 | <div class="badge-input-container"> |
|
113 | 156 | <div class="user-badge"> |
|
114 | 157 | ${base.gravatar_with_user(c.rhodecode_db_repo.user.email or c.rhodecode_db_repo.user.username, show_disabled=not c.rhodecode_db_repo.user.active)} |
|
115 | 158 | </div> |
|
116 | 159 | <div class="badge-input-wrap"> |
|
117 | 160 | ${c.form['repo_owner'].render(css_class='medium', oid='repo_owner')|n} |
|
118 | 161 | </div> |
|
119 | 162 | </div> |
|
120 | 163 | ${c.form.render_error(request, c.form['repo_owner'])|n} |
|
121 | 164 | <p class="help-block">${_('Change owner of this repository.')}</p> |
|
122 | 165 | </div> |
|
123 | 166 | </div> |
|
124 | 167 | |
|
125 | 168 | <div class="field"> |
|
126 | 169 | <div class="label label-textarea"> |
|
127 | 170 | <label for="repo_description">${_('Description')}:</label> |
|
128 | 171 | </div> |
|
129 | 172 | <div class="textarea text-area editor"> |
|
130 | 173 | ${c.form['repo_description'].render(css_class='medium', oid='repo_description')|n} |
|
131 | 174 | ${c.form.render_error(request, c.form['repo_description'])|n} |
|
132 | 175 | |
|
133 | 176 | <% metatags_url = h.literal('''<a href="#metatagsShow" onclick="$('#meta-tags-desc').toggle();return false">meta-tags</a>''') %> |
|
134 | 177 | <span class="help-block">${_('Plain text format with support of {metatags}. Add a README file for longer descriptions').format(metatags=metatags_url)|n}</span> |
|
135 | 178 | <span id="meta-tags-desc" style="display: none"> |
|
136 | 179 | <%namespace name="dt" file="/data_table/_dt_elements.mako"/> |
|
137 | 180 | ${dt.metatags_help()} |
|
138 | 181 | </span> |
|
139 | 182 | </div> |
|
140 | 183 | </div> |
|
141 | 184 | |
|
142 | 185 | <div class="field"> |
|
143 | 186 | <div class="label label-checkbox"> |
|
144 | 187 | <label for="${c.form['repo_private'].oid}">${_('Private repository')}:</label> |
|
145 | 188 | </div> |
|
146 | 189 | <div class="checkboxes"> |
|
147 | 190 | ${c.form['repo_private'].render(css_class='medium')|n} |
|
148 | 191 | ${c.form.render_error(request, c.form['repo_private'])|n} |
|
149 | 192 | <span class="help-block">${_('Private repositories are only visible to people explicitly added as collaborators.')}</span> |
|
150 | 193 | </div> |
|
151 | 194 | </div> |
|
152 | 195 | <div class="field"> |
|
153 | 196 | <div class="label label-checkbox"> |
|
154 | 197 | <label for="${c.form['repo_enable_statistics'].oid}">${_('Enable statistics')}:</label> |
|
155 | 198 | </div> |
|
156 | 199 | <div class="checkboxes"> |
|
157 | 200 | ${c.form['repo_enable_statistics'].render(css_class='medium')|n} |
|
158 | 201 | ${c.form.render_error(request, c.form['repo_enable_statistics'])|n} |
|
159 | 202 | <span class="help-block">${_('Enable statistics window on summary page.')}</span> |
|
160 | 203 | </div> |
|
161 | 204 | </div> |
|
162 | 205 | <div class="field"> |
|
163 | 206 | <div class="label label-checkbox"> |
|
164 | 207 | <label for="${c.form['repo_enable_downloads'].oid}">${_('Enable downloads')}:</label> |
|
165 | 208 | </div> |
|
166 | 209 | <div class="checkboxes"> |
|
167 | 210 | ${c.form['repo_enable_downloads'].render(css_class='medium')|n} |
|
168 | 211 | ${c.form.render_error(request, c.form['repo_enable_downloads'])|n} |
|
169 | 212 | <span class="help-block">${_('Enable download menu on summary page.')}</span> |
|
170 | 213 | </div> |
|
171 | 214 | </div> |
|
172 | 215 | <div class="field"> |
|
173 | 216 | <div class="label label-checkbox"> |
|
174 | 217 | <label for="${c.form['repo_enable_locking'].oid}">${_('Enable automatic locking')}:</label> |
|
175 | 218 | </div> |
|
176 | 219 | <div class="checkboxes"> |
|
177 | 220 | ${c.form['repo_enable_locking'].render(css_class='medium')|n} |
|
178 | 221 | ${c.form.render_error(request, c.form['repo_enable_locking'])|n} |
|
179 | 222 | <span class="help-block">${_('Enable automatic locking on repository. Pulling from this repository creates a lock that can be released by pushing back by the same user')}</span> |
|
180 | 223 | </div> |
|
181 | 224 | </div> |
|
182 | 225 | |
|
183 | 226 | %if c.visual.repository_fields: |
|
184 | 227 | ## EXTRA FIELDS |
|
185 | 228 | %for field in c.repo_fields: |
|
186 | 229 | <div class="field"> |
|
187 | 230 | <div class="label"> |
|
188 | 231 | <label for="${field.field_key_prefixed}">${field.field_label} (${field.field_key}):</label> |
|
189 | 232 | </div> |
|
190 | 233 | <div class="input input-medium"> |
|
191 | 234 | ${h.text(field.field_key_prefixed, field.field_value, class_='medium')} |
|
192 | 235 | %if field.field_desc: |
|
193 | 236 | <span class="help-block">${field.field_desc}</span> |
|
194 | 237 | %endif |
|
195 | 238 | </div> |
|
196 | 239 | </div> |
|
197 | 240 | %endfor |
|
198 | 241 | %endif |
|
199 | 242 | <div class="buttons"> |
|
200 | 243 | ${h.submit('save',_('Save'),class_="btn")} |
|
201 | 244 | ${h.reset('reset',_('Reset'),class_="btn")} |
|
202 | 245 | </div> |
|
203 | 246 | </div> |
|
204 | 247 | </div> |
|
205 | 248 | ${h.end_form()} |
|
206 | 249 | </div> |
|
207 | 250 | </div> |
|
208 | 251 | |
|
209 | 252 | <script> |
|
210 | 253 | $(document).ready(function(){ |
|
211 |
var cloneUrl = function( |
|
|
212 | var alterButton = $('#alter_clone_uri'); | |
|
213 | var editButton = $('#edit_clone_uri'); | |
|
214 | var cancelEditButton = $('#cancel_edit_clone_uri'); | |
|
215 | var hiddenUrl = $('#clone_uri_hidden'); | |
|
216 | var hiddenUrlValue = $('#clone_uri_hidden_value'); | |
|
217 | var input = $('#clone_uri'); | |
|
218 | var helpBlock = $('#alter_clone_uri_help_block'); | |
|
219 | var changedFlag = $('#repo_clone_uri_change'); | |
|
254 | var cloneUrl = function ( | |
|
255 | alterButton, editButton, cancelEditButton, | |
|
256 | hiddenUrl, hiddenUrlValue, input, helpBlock, changedFlag) { | |
|
257 | ||
|
220 | 258 | var originalText = helpBlock.html(); |
|
221 | 259 | var obfuscatedUrl = hiddenUrlValue.html(); |
|
222 | 260 | |
|
223 | 261 | var edit = function(e) { |
|
224 | 262 | alterButton.show(); |
|
225 | 263 | editButton.hide(); |
|
226 | 264 | hiddenUrl.hide(); |
|
227 | 265 | |
|
228 | 266 | //add the old value next to input for verification |
|
229 | 267 | helpBlock.html("(" + obfuscatedUrl + ")" + "<br\>" + originalText); |
|
230 | 268 | changedFlag.val('MOD'); |
|
231 | 269 | }; |
|
232 | 270 | |
|
233 | 271 | var cancelEdit = function(e) { |
|
234 | 272 | alterButton.hide(); |
|
235 | 273 | editButton.show(); |
|
236 | 274 | hiddenUrl.show(); |
|
237 | 275 | |
|
238 | 276 | helpBlock.html(originalText); |
|
239 | 277 | changedFlag.val('OLD'); |
|
240 | 278 | input.val(''); |
|
241 | 279 | }; |
|
242 | 280 | |
|
243 | 281 | var initEvents = function() { |
|
244 | 282 | editButton.on('click', edit); |
|
245 | 283 | cancelEditButton.on('click', cancelEdit); |
|
246 | 284 | }; |
|
247 | 285 | |
|
248 | 286 | var setInitialState = function() { |
|
249 | 287 | if (input.hasClass('error')) { |
|
250 | 288 | alterButton.show(); |
|
251 | 289 | editButton.hide(); |
|
252 | 290 | hiddenUrl.hide(); |
|
253 | 291 | } |
|
254 | 292 | }; |
|
255 | 293 | |
|
256 | 294 | setInitialState(); |
|
257 | 295 | initEvents(); |
|
258 |
} |
|
|
296 | }; | |
|
297 | ||
|
298 | ||
|
299 | var alterButton = $('#alter_clone_uri'); | |
|
300 | var editButton = $('#edit_clone_uri'); | |
|
301 | var cancelEditButton = $('#cancel_edit_clone_uri'); | |
|
302 | var hiddenUrl = $('#clone_uri_hidden'); | |
|
303 | var hiddenUrlValue = $('#clone_uri_hidden_value'); | |
|
304 | var input = $('#clone_uri'); | |
|
305 | var helpBlock = $('#alter_clone_uri_help_block'); | |
|
306 | var changedFlag = $('#repo_clone_uri_change'); | |
|
307 | cloneUrl( | |
|
308 | alterButton, editButton, cancelEditButton, hiddenUrl, | |
|
309 | hiddenUrlValue, input, helpBlock, changedFlag); | |
|
310 | ||
|
311 | var alterButton = $('#alter_push_uri'); | |
|
312 | var editButton = $('#edit_push_uri'); | |
|
313 | var cancelEditButton = $('#cancel_edit_push_uri'); | |
|
314 | var hiddenUrl = $('#push_uri_hidden'); | |
|
315 | var hiddenUrlValue = $('#push_uri_hidden_value'); | |
|
316 | var input = $('#push_uri'); | |
|
317 | var helpBlock = $('#alter_push_uri_help_block'); | |
|
318 | var changedFlag = $('#repo_push_uri_change'); | |
|
319 | cloneUrl( | |
|
320 | alterButton, editButton, cancelEditButton, hiddenUrl, | |
|
321 | hiddenUrlValue, input, helpBlock, changedFlag); | |
|
259 | 322 | |
|
260 | 323 | selectMyGroup = function(element) { |
|
261 | 324 | $("#repo_group").val($(element).data('personalGroupId')).trigger("change"); |
|
262 | 325 | }; |
|
263 | 326 | |
|
264 | 327 | UsersAutoComplete('repo_owner', '${c.rhodecode_user.user_id}'); |
|
265 | 328 | }); |
|
266 | 329 | </script> |
General Comments 0
You need to be logged in to leave comments.
Login now