Show More
@@ -1,2534 +1,2533 b'' | |||
|
1 | 1 | # Copyright (C) 2011-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | import logging |
|
20 | 20 | import time |
|
21 | 21 | |
|
22 | 22 | import rhodecode |
|
23 | 23 | from rhodecode.api import ( |
|
24 | 24 | jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError) |
|
25 | 25 | from rhodecode.api.utils import ( |
|
26 | 26 | has_superadmin_permission, Optional, OAttr, get_repo_or_error, |
|
27 | 27 | get_user_group_or_error, get_user_or_error, validate_repo_permissions, |
|
28 | 28 | get_perm_or_error, parse_args, get_origin, build_commit_data, |
|
29 | 29 | validate_set_owner_permissions) |
|
30 | 30 | from rhodecode.lib import audit_logger, rc_cache, channelstream |
|
31 | 31 | from rhodecode.lib import repo_maintenance |
|
32 | 32 | from rhodecode.lib.auth import ( |
|
33 | 33 | HasPermissionAnyApi, HasUserGroupPermissionAnyApi, |
|
34 | 34 | HasRepoPermissionAnyApi) |
|
35 | 35 | from rhodecode.lib.celerylib.utils import get_task_id |
|
36 | 36 | from rhodecode.lib.utils2 import ( |
|
37 | 37 | str2bool, time_to_datetime, safe_str, safe_int) |
|
38 | 38 | from rhodecode.lib.ext_json import json |
|
39 | 39 | from rhodecode.lib.exceptions import ( |
|
40 | 40 | StatusChangeOnClosedPullRequestError, CommentVersionMismatch) |
|
41 | 41 | from rhodecode.lib.vcs import RepositoryError |
|
42 | 42 | from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError |
|
43 | 43 | from rhodecode.model.changeset_status import ChangesetStatusModel |
|
44 | 44 | from rhodecode.model.comment import CommentsModel |
|
45 | 45 | from rhodecode.model.db import ( |
|
46 | 46 | Session, ChangesetStatus, RepositoryField, Repository, RepoGroup, |
|
47 | 47 | ChangesetComment) |
|
48 | 48 | from rhodecode.model.permission import PermissionModel |
|
49 | 49 | from rhodecode.model.pull_request import PullRequestModel |
|
50 | 50 | from rhodecode.model.repo import RepoModel |
|
51 | 51 | from rhodecode.model.scm import ScmModel, RepoList |
|
52 | 52 | from rhodecode.model.settings import SettingsModel, VcsSettingsModel |
|
53 | 53 | from rhodecode.model import validation_schema |
|
54 | 54 | from rhodecode.model.validation_schema.schemas import repo_schema |
|
55 | 55 | |
|
56 | 56 | log = logging.getLogger(__name__) |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | @jsonrpc_method() |
|
60 | 60 | def get_repo(request, apiuser, repoid, cache=Optional(True)): |
|
61 | 61 | """ |
|
62 | 62 | Gets an existing repository by its name or repository_id. |
|
63 | 63 | |
|
64 | 64 | The members section so the output returns users groups or users |
|
65 | 65 | associated with that repository. |
|
66 | 66 | |
|
67 | 67 | This command can only be run using an |authtoken| with admin rights, |
|
68 | 68 | or users with at least read rights to the |repo|. |
|
69 | 69 | |
|
70 | 70 | :param apiuser: This is filled automatically from the |authtoken|. |
|
71 | 71 | :type apiuser: AuthUser |
|
72 | 72 | :param repoid: The repository name or repository id. |
|
73 | 73 | :type repoid: str or int |
|
74 | 74 | :param cache: use the cached value for last changeset |
|
75 | 75 | :type: cache: Optional(bool) |
|
76 | 76 | |
|
77 | 77 | Example output: |
|
78 | 78 | |
|
79 | 79 | .. code-block:: bash |
|
80 | 80 | |
|
81 | 81 | { |
|
82 | 82 | "error": null, |
|
83 | 83 | "id": <repo_id>, |
|
84 | 84 | "result": { |
|
85 | 85 | "clone_uri": null, |
|
86 | 86 | "created_on": "timestamp", |
|
87 | 87 | "description": "repo description", |
|
88 | 88 | "enable_downloads": false, |
|
89 | 89 | "enable_locking": false, |
|
90 | 90 | "enable_statistics": false, |
|
91 | 91 | "followers": [ |
|
92 | 92 | { |
|
93 | 93 | "active": true, |
|
94 | 94 | "admin": false, |
|
95 | 95 | "api_key": "****************************************", |
|
96 | 96 | "api_keys": [ |
|
97 | 97 | "****************************************" |
|
98 | 98 | ], |
|
99 | 99 | "email": "user@example.com", |
|
100 | 100 | "emails": [ |
|
101 | 101 | "user@example.com" |
|
102 | 102 | ], |
|
103 | 103 | "extern_name": "rhodecode", |
|
104 | 104 | "extern_type": "rhodecode", |
|
105 | 105 | "firstname": "username", |
|
106 | 106 | "ip_addresses": [], |
|
107 | 107 | "language": null, |
|
108 | 108 | "last_login": "2015-09-16T17:16:35.854", |
|
109 | 109 | "lastname": "surname", |
|
110 | 110 | "user_id": <user_id>, |
|
111 | 111 | "username": "name" |
|
112 | 112 | } |
|
113 | 113 | ], |
|
114 | 114 | "fork_of": "parent-repo", |
|
115 | 115 | "landing_rev": [ |
|
116 | 116 | "rev", |
|
117 | 117 | "tip" |
|
118 | 118 | ], |
|
119 | 119 | "last_changeset": { |
|
120 | 120 | "author": "User <user@example.com>", |
|
121 | 121 | "branch": "default", |
|
122 | 122 | "date": "timestamp", |
|
123 | 123 | "message": "last commit message", |
|
124 | 124 | "parents": [ |
|
125 | 125 | { |
|
126 | 126 | "raw_id": "commit-id" |
|
127 | 127 | } |
|
128 | 128 | ], |
|
129 | 129 | "raw_id": "commit-id", |
|
130 | 130 | "revision": <revision number>, |
|
131 | 131 | "short_id": "short id" |
|
132 | 132 | }, |
|
133 | 133 | "lock_reason": null, |
|
134 | 134 | "locked_by": null, |
|
135 | 135 | "locked_date": null, |
|
136 | 136 | "owner": "owner-name", |
|
137 | 137 | "permissions": [ |
|
138 | 138 | { |
|
139 | 139 | "name": "super-admin-name", |
|
140 | 140 | "origin": "super-admin", |
|
141 | 141 | "permission": "repository.admin", |
|
142 | 142 | "type": "user" |
|
143 | 143 | }, |
|
144 | 144 | { |
|
145 | 145 | "name": "owner-name", |
|
146 | 146 | "origin": "owner", |
|
147 | 147 | "permission": "repository.admin", |
|
148 | 148 | "type": "user" |
|
149 | 149 | }, |
|
150 | 150 | { |
|
151 | 151 | "name": "user-group-name", |
|
152 | 152 | "origin": "permission", |
|
153 | 153 | "permission": "repository.write", |
|
154 | 154 | "type": "user_group" |
|
155 | 155 | } |
|
156 | 156 | ], |
|
157 | 157 | "private": true, |
|
158 | 158 | "repo_id": 676, |
|
159 | 159 | "repo_name": "user-group/repo-name", |
|
160 | 160 | "repo_type": "hg" |
|
161 | 161 | } |
|
162 | 162 | } |
|
163 | 163 | """ |
|
164 | 164 | |
|
165 | 165 | repo = get_repo_or_error(repoid) |
|
166 | 166 | cache = Optional.extract(cache) |
|
167 | 167 | |
|
168 | 168 | include_secrets = False |
|
169 | 169 | if has_superadmin_permission(apiuser): |
|
170 | 170 | include_secrets = True |
|
171 | 171 | else: |
|
172 | 172 | # check if we have at least read permission for this repo ! |
|
173 | 173 | _perms = ( |
|
174 | 174 | 'repository.admin', 'repository.write', 'repository.read',) |
|
175 | 175 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
176 | 176 | |
|
177 | 177 | permissions = [] |
|
178 | 178 | for _user in repo.permissions(): |
|
179 | 179 | user_data = { |
|
180 | 180 | 'name': _user.username, |
|
181 | 181 | 'permission': _user.permission, |
|
182 | 182 | 'origin': get_origin(_user), |
|
183 | 183 | 'type': "user", |
|
184 | 184 | } |
|
185 | 185 | permissions.append(user_data) |
|
186 | 186 | |
|
187 | 187 | for _user_group in repo.permission_user_groups(): |
|
188 | 188 | user_group_data = { |
|
189 | 189 | 'name': _user_group.users_group_name, |
|
190 | 190 | 'permission': _user_group.permission, |
|
191 | 191 | 'origin': get_origin(_user_group), |
|
192 | 192 | 'type': "user_group", |
|
193 | 193 | } |
|
194 | 194 | permissions.append(user_group_data) |
|
195 | 195 | |
|
196 | 196 | following_users = [ |
|
197 | 197 | user.user.get_api_data(include_secrets=include_secrets) |
|
198 | 198 | for user in repo.followers] |
|
199 | 199 | |
|
200 | 200 | if not cache: |
|
201 | 201 | repo.update_commit_cache() |
|
202 | 202 | data = repo.get_api_data(include_secrets=include_secrets) |
|
203 | 203 | data['permissions'] = permissions |
|
204 | 204 | data['followers'] = following_users |
|
205 | 205 | |
|
206 | 206 | return data |
|
207 | 207 | |
|
208 | 208 | |
|
209 | 209 | @jsonrpc_method() |
|
210 | 210 | def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)): |
|
211 | 211 | """ |
|
212 | 212 | Lists all existing repositories. |
|
213 | 213 | |
|
214 | 214 | This command can only be run using an |authtoken| with admin rights, |
|
215 | 215 | or users with at least read rights to |repos|. |
|
216 | 216 | |
|
217 | 217 | :param apiuser: This is filled automatically from the |authtoken|. |
|
218 | 218 | :type apiuser: AuthUser |
|
219 | 219 | :param root: specify root repository group to fetch repositories. |
|
220 | 220 | filters the returned repositories to be members of given root group. |
|
221 | 221 | :type root: Optional(None) |
|
222 | 222 | :param traverse: traverse given root into subrepositories. With this flag |
|
223 | 223 | set to False, it will only return top-level repositories from `root`. |
|
224 | 224 | if root is empty it will return just top-level repositories. |
|
225 | 225 | :type traverse: Optional(True) |
|
226 | 226 | |
|
227 | 227 | |
|
228 | 228 | Example output: |
|
229 | 229 | |
|
230 | 230 | .. code-block:: bash |
|
231 | 231 | |
|
232 | 232 | id : <id_given_in_input> |
|
233 | 233 | result: [ |
|
234 | 234 | { |
|
235 | 235 | "repo_id" : "<repo_id>", |
|
236 | 236 | "repo_name" : "<reponame>" |
|
237 | 237 | "repo_type" : "<repo_type>", |
|
238 | 238 | "clone_uri" : "<clone_uri>", |
|
239 | 239 | "private": : "<bool>", |
|
240 | 240 | "created_on" : "<datetimecreated>", |
|
241 | 241 | "description" : "<description>", |
|
242 | 242 | "landing_rev": "<landing_rev>", |
|
243 | 243 | "owner": "<repo_owner>", |
|
244 | 244 | "fork_of": "<name_of_fork_parent>", |
|
245 | 245 | "enable_downloads": "<bool>", |
|
246 | 246 | "enable_locking": "<bool>", |
|
247 | 247 | "enable_statistics": "<bool>", |
|
248 | 248 | }, |
|
249 | 249 | ... |
|
250 | 250 | ] |
|
251 | 251 | error: null |
|
252 | 252 | """ |
|
253 | 253 | |
|
254 | 254 | include_secrets = has_superadmin_permission(apiuser) |
|
255 | 255 | _perms = ('repository.read', 'repository.write', 'repository.admin',) |
|
256 | 256 | extras = {'user': apiuser} |
|
257 | 257 | |
|
258 | 258 | root = Optional.extract(root) |
|
259 | 259 | traverse = Optional.extract(traverse, binary=True) |
|
260 | 260 | |
|
261 | 261 | if root: |
|
262 | 262 | # verify parent existance, if it's empty return an error |
|
263 | 263 | parent = RepoGroup.get_by_group_name(root) |
|
264 | 264 | if not parent: |
|
265 | 265 | raise JSONRPCError( |
|
266 | 266 | f'Root repository group `{root}` does not exist') |
|
267 | 267 | |
|
268 | 268 | if traverse: |
|
269 | 269 | repos = RepoModel().get_repos_for_root(root=root, traverse=traverse) |
|
270 | 270 | else: |
|
271 | 271 | repos = RepoModel().get_repos_for_root(root=parent) |
|
272 | 272 | else: |
|
273 | 273 | if traverse: |
|
274 | 274 | repos = RepoModel().get_all() |
|
275 | 275 | else: |
|
276 | 276 | # return just top-level |
|
277 | 277 | repos = RepoModel().get_repos_for_root(root=None) |
|
278 | 278 | |
|
279 | 279 | repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras) |
|
280 | 280 | return [repo.get_api_data(include_secrets=include_secrets) |
|
281 | 281 | for repo in repo_list] |
|
282 | 282 | |
|
283 | 283 | |
|
284 | 284 | @jsonrpc_method() |
|
285 | 285 | def get_repo_changeset(request, apiuser, repoid, revision, |
|
286 | 286 | details=Optional('basic')): |
|
287 | 287 | """ |
|
288 | 288 | Returns information about a changeset. |
|
289 | 289 | |
|
290 | 290 | Additionally parameters define the amount of details returned by |
|
291 | 291 | this function. |
|
292 | 292 | |
|
293 | 293 | This command can only be run using an |authtoken| with admin rights, |
|
294 | 294 | or users with at least read rights to the |repo|. |
|
295 | 295 | |
|
296 | 296 | :param apiuser: This is filled automatically from the |authtoken|. |
|
297 | 297 | :type apiuser: AuthUser |
|
298 | 298 | :param repoid: The repository name or repository id |
|
299 | 299 | :type repoid: str or int |
|
300 | 300 | :param revision: revision for which listing should be done |
|
301 | 301 | :type revision: str |
|
302 | 302 | :param details: details can be 'basic|extended|full' full gives diff |
|
303 | 303 | info details like the diff itself, and number of changed files etc. |
|
304 | 304 | :type details: Optional(str) |
|
305 | 305 | |
|
306 | 306 | """ |
|
307 | 307 | repo = get_repo_or_error(repoid) |
|
308 | 308 | if not has_superadmin_permission(apiuser): |
|
309 | 309 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
310 | 310 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
311 | 311 | |
|
312 | 312 | changes_details = Optional.extract(details) |
|
313 | 313 | _changes_details_types = ['basic', 'extended', 'full'] |
|
314 | 314 | if changes_details not in _changes_details_types: |
|
315 | 315 | raise JSONRPCError( |
|
316 | 316 | 'ret_type must be one of %s' % ( |
|
317 | 317 | ','.join(_changes_details_types))) |
|
318 | 318 | |
|
319 | 319 | vcs_repo = repo.scm_instance() |
|
320 | 320 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
321 | 321 | 'status', '_commit', '_file_paths'] |
|
322 | 322 | |
|
323 | 323 | try: |
|
324 | 324 | commit = repo.get_commit(commit_id=revision, pre_load=pre_load) |
|
325 | 325 | except TypeError as e: |
|
326 | 326 | raise JSONRPCError(safe_str(e)) |
|
327 | 327 | _cs_json = commit.__json__() |
|
328 | 328 | _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details) |
|
329 | 329 | if changes_details == 'full': |
|
330 | 330 | _cs_json['refs'] = commit._get_refs() |
|
331 | 331 | return _cs_json |
|
332 | 332 | |
|
333 | 333 | |
|
334 | 334 | @jsonrpc_method() |
|
335 | 335 | def get_repo_changesets(request, apiuser, repoid, start_rev, limit, |
|
336 | 336 | details=Optional('basic')): |
|
337 | 337 | """ |
|
338 | 338 | Returns a set of commits limited by the number starting |
|
339 | 339 | from the `start_rev` option. |
|
340 | 340 | |
|
341 | 341 | Additional parameters define the amount of details returned by this |
|
342 | 342 | function. |
|
343 | 343 | |
|
344 | 344 | This command can only be run using an |authtoken| with admin rights, |
|
345 | 345 | or users with at least read rights to |repos|. |
|
346 | 346 | |
|
347 | 347 | :param apiuser: This is filled automatically from the |authtoken|. |
|
348 | 348 | :type apiuser: AuthUser |
|
349 | 349 | :param repoid: The repository name or repository ID. |
|
350 | 350 | :type repoid: str or int |
|
351 | 351 | :param start_rev: The starting revision from where to get changesets. |
|
352 | 352 | :type start_rev: str |
|
353 | 353 | :param limit: Limit the number of commits to this amount |
|
354 | 354 | :type limit: str or int |
|
355 | 355 | :param details: Set the level of detail returned. Valid option are: |
|
356 | 356 | ``basic``, ``extended`` and ``full``. |
|
357 | 357 | :type details: Optional(str) |
|
358 | 358 | |
|
359 | 359 | .. note:: |
|
360 | 360 | |
|
361 | 361 | Setting the parameter `details` to the value ``full`` is extensive |
|
362 | 362 | and returns details like the diff itself, and the number |
|
363 | 363 | of changed files. |
|
364 | 364 | |
|
365 | 365 | """ |
|
366 | 366 | repo = get_repo_or_error(repoid) |
|
367 | 367 | if not has_superadmin_permission(apiuser): |
|
368 | 368 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
369 | 369 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
370 | 370 | |
|
371 | 371 | changes_details = Optional.extract(details) |
|
372 | 372 | _changes_details_types = ['basic', 'extended', 'full'] |
|
373 | 373 | if changes_details not in _changes_details_types: |
|
374 | 374 | raise JSONRPCError( |
|
375 | 375 | 'ret_type must be one of %s' % ( |
|
376 | 376 | ','.join(_changes_details_types))) |
|
377 | 377 | |
|
378 | 378 | limit = int(limit) |
|
379 | 379 | pre_load = ['author', 'branch', 'date', 'message', 'parents', |
|
380 | 380 | 'status', '_commit', '_file_paths'] |
|
381 | 381 | |
|
382 | 382 | vcs_repo = repo.scm_instance() |
|
383 | 383 | # SVN needs a special case to distinguish its index and commit id |
|
384 | 384 | if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'): |
|
385 | 385 | start_rev = vcs_repo.commit_ids[0] |
|
386 | 386 | |
|
387 | 387 | try: |
|
388 | 388 | commits = vcs_repo.get_commits( |
|
389 | 389 | start_id=start_rev, pre_load=pre_load, translate_tags=False) |
|
390 | 390 | except TypeError as e: |
|
391 | 391 | raise JSONRPCError(safe_str(e)) |
|
392 | 392 | except Exception: |
|
393 | 393 | log.exception('Fetching of commits failed') |
|
394 | 394 | raise JSONRPCError('Error occurred during commit fetching') |
|
395 | 395 | |
|
396 | 396 | ret = [] |
|
397 | 397 | for cnt, commit in enumerate(commits): |
|
398 | 398 | if cnt >= limit != -1: |
|
399 | 399 | break |
|
400 | 400 | _cs_json = commit.__json__() |
|
401 | 401 | _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details) |
|
402 | 402 | if changes_details == 'full': |
|
403 | 403 | _cs_json['refs'] = { |
|
404 | 404 | 'branches': [commit.branch], |
|
405 | 405 | 'bookmarks': getattr(commit, 'bookmarks', []), |
|
406 | 406 | 'tags': commit.tags |
|
407 | 407 | } |
|
408 | 408 | ret.append(_cs_json) |
|
409 | 409 | return ret |
|
410 | 410 | |
|
411 | 411 | |
|
412 | 412 | @jsonrpc_method() |
|
413 | 413 | def get_repo_nodes(request, apiuser, repoid, revision, root_path, |
|
414 | 414 | ret_type=Optional('all'), details=Optional('basic'), |
|
415 | 415 | max_file_bytes=Optional(None)): |
|
416 | 416 | """ |
|
417 | 417 | Returns a list of nodes and children in a flat list for a given |
|
418 | 418 | path at given revision. |
|
419 | 419 | |
|
420 | 420 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
421 | 421 | |
|
422 | 422 | This command can only be run using an |authtoken| with admin rights, |
|
423 | 423 | or users with at least read rights to |repos|. |
|
424 | 424 | |
|
425 | 425 | :param apiuser: This is filled automatically from the |authtoken|. |
|
426 | 426 | :type apiuser: AuthUser |
|
427 | 427 | :param repoid: The repository name or repository ID. |
|
428 | 428 | :type repoid: str or int |
|
429 | 429 | :param revision: The revision for which listing should be done. |
|
430 | 430 | :type revision: str |
|
431 | 431 | :param root_path: The path from which to start displaying. |
|
432 | 432 | :type root_path: str |
|
433 | 433 | :param ret_type: Set the return type. Valid options are |
|
434 | 434 | ``all`` (default), ``files`` and ``dirs``. |
|
435 | 435 | :type ret_type: Optional(str) |
|
436 | 436 | :param details: Returns extended information about nodes, such as |
|
437 | 437 | md5, binary, and or content. |
|
438 | 438 | The valid options are ``basic`` and ``full``. |
|
439 | 439 | :type details: Optional(str) |
|
440 | 440 | :param max_file_bytes: Only return file content under this file size bytes |
|
441 | 441 | :type details: Optional(int) |
|
442 | 442 | |
|
443 | 443 | Example output: |
|
444 | 444 | |
|
445 | 445 | .. code-block:: bash |
|
446 | 446 | |
|
447 | 447 | id : <id_given_in_input> |
|
448 | 448 | result: [ |
|
449 | 449 | { |
|
450 | 450 | "binary": false, |
|
451 | 451 | "content": "File line", |
|
452 | 452 | "extension": "md", |
|
453 | 453 | "lines": 2, |
|
454 | 454 | "md5": "059fa5d29b19c0657e384749480f6422", |
|
455 | 455 | "mimetype": "text/x-minidsrc", |
|
456 | 456 | "name": "file.md", |
|
457 | 457 | "size": 580, |
|
458 | 458 | "type": "file" |
|
459 | 459 | }, |
|
460 | 460 | ... |
|
461 | 461 | ] |
|
462 | 462 | error: null |
|
463 | 463 | """ |
|
464 | 464 | |
|
465 | 465 | repo = get_repo_or_error(repoid) |
|
466 | 466 | if not has_superadmin_permission(apiuser): |
|
467 | 467 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
468 | 468 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
469 | 469 | |
|
470 | 470 | ret_type = Optional.extract(ret_type) |
|
471 | 471 | details = Optional.extract(details) |
|
472 | 472 | max_file_bytes = Optional.extract(max_file_bytes) |
|
473 | 473 | |
|
474 | 474 | _extended_types = ['basic', 'full'] |
|
475 | 475 | if details not in _extended_types: |
|
476 | 476 | ret_types = ','.join(_extended_types) |
|
477 | 477 | raise JSONRPCError(f'ret_type must be one of {ret_types}') |
|
478 | 478 | |
|
479 | 479 | extended_info = False |
|
480 | 480 | content = False |
|
481 | 481 | if details == 'basic': |
|
482 | 482 | extended_info = True |
|
483 | 483 | |
|
484 | 484 | if details == 'full': |
|
485 | 485 | extended_info = content = True |
|
486 | 486 | |
|
487 | 487 | _map = {} |
|
488 | 488 | try: |
|
489 | 489 | # check if repo is not empty by any chance, skip quicker if it is. |
|
490 | 490 | _scm = repo.scm_instance() |
|
491 | 491 | if _scm.is_empty(): |
|
492 | 492 | return [] |
|
493 | 493 | |
|
494 | 494 | _d, _f = ScmModel().get_nodes( |
|
495 | 495 | repo, revision, root_path, flat=False, |
|
496 | 496 | extended_info=extended_info, content=content, |
|
497 | 497 | max_file_bytes=max_file_bytes) |
|
498 | 498 | |
|
499 | 499 | _map = { |
|
500 | 500 | 'all': _d + _f, |
|
501 | 501 | 'files': _f, |
|
502 | 502 | 'dirs': _d, |
|
503 | 503 | } |
|
504 | 504 | |
|
505 | 505 | return _map[ret_type] |
|
506 | 506 | except KeyError: |
|
507 | 507 | keys = ','.join(sorted(_map.keys())) |
|
508 | 508 | raise JSONRPCError(f'ret_type must be one of {keys}') |
|
509 | 509 | except Exception: |
|
510 | 510 | log.exception("Exception occurred while trying to get repo nodes") |
|
511 | 511 | raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes') |
|
512 | 512 | |
|
513 | 513 | |
|
514 | 514 | @jsonrpc_method() |
|
515 | 515 | def get_repo_file(request, apiuser, repoid, commit_id, file_path, |
|
516 | 516 | max_file_bytes=Optional(0), details=Optional('basic'), |
|
517 | 517 | cache=Optional(True)): |
|
518 | 518 | """ |
|
519 | 519 | Returns a single file from repository at given revision. |
|
520 | 520 | |
|
521 | 521 | This command can only be run using an |authtoken| with admin rights, |
|
522 | 522 | or users with at least read rights to |repos|. |
|
523 | 523 | |
|
524 | 524 | :param apiuser: This is filled automatically from the |authtoken|. |
|
525 | 525 | :type apiuser: AuthUser |
|
526 | 526 | :param repoid: The repository name or repository ID. |
|
527 | 527 | :type repoid: str or int |
|
528 | 528 | :param commit_id: The revision for which listing should be done. |
|
529 | 529 | :type commit_id: str |
|
530 | 530 | :param file_path: The path from which to start displaying. |
|
531 | 531 | :type file_path: str |
|
532 | 532 | :param details: Returns different set of information about nodes. |
|
533 | 533 | The valid options are ``minimal`` ``basic`` and ``full``. |
|
534 | 534 | :type details: Optional(str) |
|
535 | 535 | :param max_file_bytes: Only return file content under this file size bytes |
|
536 | 536 | :type max_file_bytes: Optional(int) |
|
537 | 537 | :param cache: Use internal caches for fetching files. If disabled fetching |
|
538 | 538 | files is slower but more memory efficient |
|
539 | 539 | :type cache: Optional(bool) |
|
540 | 540 | |
|
541 | 541 | Example output: |
|
542 | 542 | |
|
543 | 543 | .. code-block:: bash |
|
544 | 544 | |
|
545 | 545 | id : <id_given_in_input> |
|
546 | 546 | result: { |
|
547 | 547 | "binary": false, |
|
548 | 548 | "extension": "py", |
|
549 | 549 | "lines": 35, |
|
550 | 550 | "content": "....", |
|
551 | 551 | "md5": "76318336366b0f17ee249e11b0c99c41", |
|
552 | 552 | "mimetype": "text/x-python", |
|
553 | 553 | "name": "python.py", |
|
554 | 554 | "size": 817, |
|
555 | 555 | "type": "file", |
|
556 | 556 | } |
|
557 | 557 | error: null |
|
558 | 558 | """ |
|
559 | 559 | |
|
560 | 560 | repo = get_repo_or_error(repoid) |
|
561 | 561 | if not has_superadmin_permission(apiuser): |
|
562 | 562 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
563 | 563 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
564 | 564 | |
|
565 | 565 | cache = Optional.extract(cache, binary=True) |
|
566 | 566 | details = Optional.extract(details) |
|
567 | 567 | max_file_bytes = Optional.extract(max_file_bytes) |
|
568 | 568 | |
|
569 | 569 | _extended_types = ['minimal', 'minimal+search', 'basic', 'full'] |
|
570 | 570 | if details not in _extended_types: |
|
571 | 571 | ret_types = ','.join(_extended_types) |
|
572 | 572 | raise JSONRPCError(f'ret_type must be one of %s, got {ret_types}', details) |
|
573 | 573 | extended_info = False |
|
574 | 574 | content = False |
|
575 | 575 | |
|
576 | 576 | if details == 'minimal': |
|
577 | 577 | extended_info = False |
|
578 | 578 | |
|
579 | 579 | elif details == 'basic': |
|
580 | 580 | extended_info = True |
|
581 | 581 | |
|
582 | 582 | elif details == 'full': |
|
583 | 583 | extended_info = content = True |
|
584 | 584 | |
|
585 | 585 | file_path = safe_str(file_path) |
|
586 | 586 | try: |
|
587 | 587 | # check if repo is not empty by any chance, skip quicker if it is. |
|
588 | 588 | _scm = repo.scm_instance() |
|
589 | 589 | if _scm.is_empty(): |
|
590 | 590 | return None |
|
591 | 591 | |
|
592 | 592 | node = ScmModel().get_node( |
|
593 | 593 | repo, commit_id, file_path, extended_info=extended_info, |
|
594 | 594 | content=content, max_file_bytes=max_file_bytes, cache=cache) |
|
595 | 595 | |
|
596 | 596 | except NodeDoesNotExistError: |
|
597 | 597 | raise JSONRPCError( |
|
598 | 598 | f'There is no file in repo: `{repo.repo_name}` at path `{file_path}` for commit: `{commit_id}`') |
|
599 | 599 | except Exception: |
|
600 | 600 | log.exception("Exception occurred while trying to get repo %s file", |
|
601 | 601 | repo.repo_name) |
|
602 | 602 | raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` file at path {file_path}') |
|
603 | 603 | |
|
604 | 604 | return node |
|
605 | 605 | |
|
606 | 606 | |
|
607 | 607 | @jsonrpc_method() |
|
608 | 608 | def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path): |
|
609 | 609 | """ |
|
610 | 610 | Returns a list of tree nodes for path at given revision. This api is built |
|
611 | 611 | strictly for usage in full text search building, and shouldn't be consumed |
|
612 | 612 | |
|
613 | 613 | This command can only be run using an |authtoken| with admin rights, |
|
614 | 614 | or users with at least read rights to |repos|. |
|
615 | 615 | |
|
616 | 616 | """ |
|
617 | 617 | |
|
618 | 618 | repo = get_repo_or_error(repoid) |
|
619 | 619 | if not has_superadmin_permission(apiuser): |
|
620 | 620 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
621 | 621 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
622 | 622 | |
|
623 | 623 | repo_id = repo.repo_id |
|
624 | 624 | cache_seconds = rhodecode.ConfigGet().get_int('rc_cache.cache_repo.expiration_time') |
|
625 | 625 | cache_on = cache_seconds > 0 |
|
626 | 626 | |
|
627 | 627 | cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}' |
|
628 | 628 | rc_cache.get_or_create_region('cache_repo', cache_namespace_uid) |
|
629 | 629 | |
|
630 | 630 | def compute_fts_tree(repo_id, commit_id, root_path): |
|
631 | 631 | return ScmModel().get_fts_data(repo_id, commit_id, root_path) |
|
632 | 632 | |
|
633 | 633 | try: |
|
634 | 634 | # check if repo is not empty by any chance, skip quicker if it is. |
|
635 | 635 | _scm = repo.scm_instance() |
|
636 | 636 | if not _scm or _scm.is_empty(): |
|
637 | 637 | return [] |
|
638 | 638 | except RepositoryError: |
|
639 | 639 | log.exception("Exception occurred while trying to get repo nodes") |
|
640 | 640 | raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes') |
|
641 | 641 | |
|
642 | 642 | try: |
|
643 | 643 | # we need to resolve commit_id to a FULL sha for cache to work correctly. |
|
644 | 644 | # sending 'master' is a pointer that needs to be translated to current commit. |
|
645 | 645 | commit_id = _scm.get_commit(commit_id=commit_id).raw_id |
|
646 | 646 | log.debug( |
|
647 | 647 | 'Computing FTS REPO TREE for repo_id %s commit_id `%s` ' |
|
648 | 648 | 'with caching: %s[TTL: %ss]' % ( |
|
649 | 649 | repo_id, commit_id, cache_on, cache_seconds or 0)) |
|
650 | 650 | |
|
651 | 651 | tree_files = compute_fts_tree(repo_id, commit_id, root_path) |
|
652 | 652 | |
|
653 | 653 | return tree_files |
|
654 | 654 | |
|
655 | 655 | except Exception: |
|
656 | 656 | log.exception("Exception occurred while trying to get repo nodes") |
|
657 | 657 | raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name) |
|
658 | 658 | |
|
659 | 659 | |
|
660 | 660 | @jsonrpc_method() |
|
661 | 661 | def get_repo_refs(request, apiuser, repoid): |
|
662 | 662 | """ |
|
663 | 663 | Returns a dictionary of current references. It returns |
|
664 | 664 | bookmarks, branches, closed_branches, and tags for given repository |
|
665 | 665 | |
|
666 | 666 | It's possible to specify ret_type to show only `files` or `dirs`. |
|
667 | 667 | |
|
668 | 668 | This command can only be run using an |authtoken| with admin rights, |
|
669 | 669 | or users with at least read rights to |repos|. |
|
670 | 670 | |
|
671 | 671 | :param apiuser: This is filled automatically from the |authtoken|. |
|
672 | 672 | :type apiuser: AuthUser |
|
673 | 673 | :param repoid: The repository name or repository ID. |
|
674 | 674 | :type repoid: str or int |
|
675 | 675 | |
|
676 | 676 | Example output: |
|
677 | 677 | |
|
678 | 678 | .. code-block:: bash |
|
679 | 679 | |
|
680 | 680 | id : <id_given_in_input> |
|
681 | 681 | "result": { |
|
682 | 682 | "bookmarks": { |
|
683 | 683 | "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
684 | 684 | "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
685 | 685 | }, |
|
686 | 686 | "branches": { |
|
687 | 687 | "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
688 | 688 | "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf" |
|
689 | 689 | }, |
|
690 | 690 | "branches_closed": {}, |
|
691 | 691 | "tags": { |
|
692 | 692 | "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188", |
|
693 | 693 | "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022", |
|
694 | 694 | "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27", |
|
695 | 695 | "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17", |
|
696 | 696 | } |
|
697 | 697 | } |
|
698 | 698 | error: null |
|
699 | 699 | """ |
|
700 | 700 | |
|
701 | 701 | repo = get_repo_or_error(repoid) |
|
702 | 702 | if not has_superadmin_permission(apiuser): |
|
703 | 703 | _perms = ('repository.admin', 'repository.write', 'repository.read',) |
|
704 | 704 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
705 | 705 | |
|
706 | 706 | try: |
|
707 | 707 | # check if repo is not empty by any chance, skip quicker if it is. |
|
708 | 708 | vcs_instance = repo.scm_instance() |
|
709 | 709 | refs = vcs_instance.refs() |
|
710 | 710 | return refs |
|
711 | 711 | except Exception: |
|
712 | 712 | log.exception("Exception occurred while trying to get repo refs") |
|
713 | 713 | raise JSONRPCError( |
|
714 | 714 | 'failed to get repo: `%s` references' % repo.repo_name |
|
715 | 715 | ) |
|
716 | 716 | |
|
717 | 717 | |
|
718 | 718 | @jsonrpc_method() |
|
719 | 719 | def create_repo( |
|
720 | 720 | request, apiuser, repo_name, repo_type, |
|
721 | 721 | owner=Optional(OAttr('apiuser')), |
|
722 | 722 | description=Optional(''), |
|
723 | 723 | private=Optional(False), |
|
724 | 724 | clone_uri=Optional(None), |
|
725 | 725 | push_uri=Optional(None), |
|
726 | 726 | landing_rev=Optional(None), |
|
727 | 727 | enable_statistics=Optional(False), |
|
728 | 728 | enable_locking=Optional(False), |
|
729 | 729 | enable_downloads=Optional(False), |
|
730 | 730 | copy_permissions=Optional(False)): |
|
731 | 731 | """ |
|
732 | 732 | Creates a repository. |
|
733 | 733 | |
|
734 | 734 | * If the repository name contains "/", repository will be created inside |
|
735 | 735 | a repository group or nested repository groups |
|
736 | 736 | |
|
737 | 737 | For example "foo/bar/repo1" will create |repo| called "repo1" inside |
|
738 | 738 | group "foo/bar". You have to have permissions to access and write to |
|
739 | 739 | the last repository group ("bar" in this example) |
|
740 | 740 | |
|
741 | 741 | This command can only be run using an |authtoken| with at least |
|
742 | 742 | permissions to create repositories, or write permissions to |
|
743 | 743 | parent repository groups. |
|
744 | 744 | |
|
745 | 745 | :param apiuser: This is filled automatically from the |authtoken|. |
|
746 | 746 | :type apiuser: AuthUser |
|
747 | 747 | :param repo_name: Set the repository name. |
|
748 | 748 | :type repo_name: str |
|
749 | 749 | :param repo_type: Set the repository type; 'hg','git', or 'svn'. |
|
750 | 750 | :type repo_type: str |
|
751 | 751 | :param owner: user_id or username |
|
752 | 752 | :type owner: Optional(str) |
|
753 | 753 | :param description: Set the repository description. |
|
754 | 754 | :type description: Optional(str) |
|
755 | 755 | :param private: set repository as private |
|
756 | 756 | :type private: bool |
|
757 | 757 | :param clone_uri: set clone_uri |
|
758 | 758 | :type clone_uri: str |
|
759 | 759 | :param push_uri: set push_uri |
|
760 | 760 | :type push_uri: str |
|
761 | 761 | :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd |
|
762 | 762 | :type landing_rev: str |
|
763 | 763 | :param enable_locking: |
|
764 | 764 | :type enable_locking: bool |
|
765 | 765 | :param enable_downloads: |
|
766 | 766 | :type enable_downloads: bool |
|
767 | 767 | :param enable_statistics: |
|
768 | 768 | :type enable_statistics: bool |
|
769 | 769 | :param copy_permissions: Copy permission from group in which the |
|
770 | 770 | repository is being created. |
|
771 | 771 | :type copy_permissions: bool |
|
772 | 772 | |
|
773 | 773 | |
|
774 | 774 | Example output: |
|
775 | 775 | |
|
776 | 776 | .. code-block:: bash |
|
777 | 777 | |
|
778 | 778 | id : <id_given_in_input> |
|
779 | 779 | result: { |
|
780 | 780 | "msg": "Created new repository `<reponame>`", |
|
781 | 781 | "success": true, |
|
782 | 782 | "task": "<celery task id or None if done sync>" |
|
783 | 783 | } |
|
784 | 784 | error: null |
|
785 | 785 | |
|
786 | 786 | |
|
787 | 787 | Example error output: |
|
788 | 788 | |
|
789 | 789 | .. code-block:: bash |
|
790 | 790 | |
|
791 | 791 | id : <id_given_in_input> |
|
792 | 792 | result : null |
|
793 | 793 | error : { |
|
794 | 794 | 'failed to create repository `<repo_name>`' |
|
795 | 795 | } |
|
796 | 796 | |
|
797 | 797 | """ |
|
798 | 798 | |
|
799 | 799 | owner = validate_set_owner_permissions(apiuser, owner) |
|
800 | 800 | |
|
801 | 801 | description = Optional.extract(description) |
|
802 | 802 | copy_permissions = Optional.extract(copy_permissions) |
|
803 | 803 | clone_uri = Optional.extract(clone_uri) |
|
804 | 804 | push_uri = Optional.extract(push_uri) |
|
805 | 805 | |
|
806 | 806 | defs = SettingsModel().get_default_repo_settings(strip_prefix=True) |
|
807 | 807 | if isinstance(private, Optional): |
|
808 | 808 | private = defs.get('repo_private') or Optional.extract(private) |
|
809 | 809 | if isinstance(repo_type, Optional): |
|
810 | 810 | repo_type = defs.get('repo_type') |
|
811 | 811 | if isinstance(enable_statistics, Optional): |
|
812 | 812 | enable_statistics = defs.get('repo_enable_statistics') |
|
813 | 813 | if isinstance(enable_locking, Optional): |
|
814 | 814 | enable_locking = defs.get('repo_enable_locking') |
|
815 | 815 | if isinstance(enable_downloads, Optional): |
|
816 | 816 | enable_downloads = defs.get('repo_enable_downloads') |
|
817 | 817 | |
|
818 | 818 | landing_ref, _label = ScmModel.backend_landing_ref(repo_type) |
|
819 | 819 | ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) |
|
820 | 820 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
821 | 821 | |
|
822 | 822 | landing_commit_ref = Optional.extract(landing_rev) or landing_ref |
|
823 | 823 | |
|
824 | 824 | schema = repo_schema.RepoSchema().bind( |
|
825 | 825 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
826 | 826 | repo_ref_options=ref_choices, |
|
827 | 827 | repo_type=repo_type, |
|
828 | 828 | # user caller |
|
829 | 829 | user=apiuser) |
|
830 | 830 | |
|
831 | 831 | try: |
|
832 | 832 | schema_data = schema.deserialize(dict( |
|
833 | 833 | repo_name=repo_name, |
|
834 | 834 | repo_type=repo_type, |
|
835 | 835 | repo_owner=owner.username, |
|
836 | 836 | repo_description=description, |
|
837 | 837 | repo_landing_commit_ref=landing_commit_ref, |
|
838 | 838 | repo_clone_uri=clone_uri, |
|
839 | 839 | repo_push_uri=push_uri, |
|
840 | 840 | repo_private=private, |
|
841 | 841 | repo_copy_permissions=copy_permissions, |
|
842 | 842 | repo_enable_statistics=enable_statistics, |
|
843 | 843 | repo_enable_downloads=enable_downloads, |
|
844 | 844 | repo_enable_locking=enable_locking)) |
|
845 | 845 | except validation_schema.Invalid as err: |
|
846 | 846 | raise JSONRPCValidationError(colander_exc=err) |
|
847 | 847 | |
|
848 | 848 | try: |
|
849 | 849 | data = { |
|
850 | 850 | 'owner': owner, |
|
851 | 851 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
852 | 852 | 'repo_name_full': schema_data['repo_name'], |
|
853 | 853 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
854 | 854 | 'repo_type': schema_data['repo_type'], |
|
855 | 855 | 'repo_description': schema_data['repo_description'], |
|
856 | 856 | 'repo_private': schema_data['repo_private'], |
|
857 | 857 | 'clone_uri': schema_data['repo_clone_uri'], |
|
858 | 858 | 'push_uri': schema_data['repo_push_uri'], |
|
859 | 859 | 'repo_landing_rev': schema_data['repo_landing_commit_ref'], |
|
860 | 860 | 'enable_statistics': schema_data['repo_enable_statistics'], |
|
861 | 861 | 'enable_locking': schema_data['repo_enable_locking'], |
|
862 | 862 | 'enable_downloads': schema_data['repo_enable_downloads'], |
|
863 | 863 | 'repo_copy_permissions': schema_data['repo_copy_permissions'], |
|
864 | 864 | } |
|
865 | 865 | |
|
866 | 866 | task = RepoModel().create(form_data=data, cur_user=owner.user_id) |
|
867 | 867 | task_id = get_task_id(task) |
|
868 | 868 | # no commit, it's done in RepoModel, or async via celery |
|
869 | 869 | return { |
|
870 | 870 | 'msg': "Created new repository `{}`".format(schema_data['repo_name']), |
|
871 | 871 | 'success': True, # cannot return the repo data here since fork |
|
872 | 872 | # can be done async |
|
873 | 873 | 'task': task_id |
|
874 | 874 | } |
|
875 | 875 | except Exception: |
|
876 | 876 | log.exception( |
|
877 | 877 | "Exception while trying to create the repository %s", |
|
878 | 878 | schema_data['repo_name']) |
|
879 | 879 | raise JSONRPCError( |
|
880 | 880 | 'failed to create repository `{}`'.format(schema_data['repo_name'])) |
|
881 | 881 | |
|
882 | 882 | |
|
883 | 883 | @jsonrpc_method() |
|
884 | 884 | def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''), |
|
885 | 885 | description=Optional('')): |
|
886 | 886 | """ |
|
887 | 887 | Adds an extra field to a repository. |
|
888 | 888 | |
|
889 | 889 | This command can only be run using an |authtoken| with at least |
|
890 | 890 | write permissions to the |repo|. |
|
891 | 891 | |
|
892 | 892 | :param apiuser: This is filled automatically from the |authtoken|. |
|
893 | 893 | :type apiuser: AuthUser |
|
894 | 894 | :param repoid: Set the repository name or repository id. |
|
895 | 895 | :type repoid: str or int |
|
896 | 896 | :param key: Create a unique field key for this repository. |
|
897 | 897 | :type key: str |
|
898 | 898 | :param label: |
|
899 | 899 | :type label: Optional(str) |
|
900 | 900 | :param description: |
|
901 | 901 | :type description: Optional(str) |
|
902 | 902 | """ |
|
903 | 903 | repo = get_repo_or_error(repoid) |
|
904 | 904 | if not has_superadmin_permission(apiuser): |
|
905 | 905 | _perms = ('repository.admin',) |
|
906 | 906 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
907 | 907 | |
|
908 | 908 | label = Optional.extract(label) or key |
|
909 | 909 | description = Optional.extract(description) |
|
910 | 910 | |
|
911 | 911 | field = RepositoryField.get_by_key_name(key, repo) |
|
912 | 912 | if field: |
|
913 | raise JSONRPCError('Field with key ' | |
|
914 | '`%s` exists for repo `%s`' % (key, repoid)) | |
|
913 | raise JSONRPCError(f'Field with key `{key}` exists for repo `{repoid}`') | |
|
915 | 914 | |
|
916 | 915 | try: |
|
917 | 916 | RepoModel().add_repo_field(repo, key, field_label=label, |
|
918 | 917 | field_desc=description) |
|
919 | 918 | Session().commit() |
|
920 | 919 | return { |
|
921 | 920 | 'msg': f"Added new repository field `{key}`", |
|
922 | 921 | 'success': True, |
|
923 | 922 | } |
|
924 | 923 | except Exception: |
|
925 | 924 | log.exception("Exception occurred while trying to add field to repo") |
|
926 | 925 | raise JSONRPCError( |
|
927 | 926 | f'failed to create new field for repository `{repoid}`') |
|
928 | 927 | |
|
929 | 928 | |
|
930 | 929 | @jsonrpc_method() |
|
931 | 930 | def remove_field_from_repo(request, apiuser, repoid, key): |
|
932 | 931 | """ |
|
933 | 932 | Removes an extra field from a repository. |
|
934 | 933 | |
|
935 | 934 | This command can only be run using an |authtoken| with at least |
|
936 | 935 | write permissions to the |repo|. |
|
937 | 936 | |
|
938 | 937 | :param apiuser: This is filled automatically from the |authtoken|. |
|
939 | 938 | :type apiuser: AuthUser |
|
940 | 939 | :param repoid: Set the repository name or repository ID. |
|
941 | 940 | :type repoid: str or int |
|
942 | 941 | :param key: Set the unique field key for this repository. |
|
943 | 942 | :type key: str |
|
944 | 943 | """ |
|
945 | 944 | |
|
946 | 945 | repo = get_repo_or_error(repoid) |
|
947 | 946 | if not has_superadmin_permission(apiuser): |
|
948 | 947 | _perms = ('repository.admin',) |
|
949 | 948 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
950 | 949 | |
|
951 | 950 | field = RepositoryField.get_by_key_name(key, repo) |
|
952 | 951 | if not field: |
|
953 | 952 | raise JSONRPCError('Field with key `%s` does not ' |
|
954 | 953 | 'exists for repo `%s`' % (key, repoid)) |
|
955 | 954 | |
|
956 | 955 | try: |
|
957 | 956 | RepoModel().delete_repo_field(repo, field_key=key) |
|
958 | 957 | Session().commit() |
|
959 | 958 | return { |
|
960 | 959 | 'msg': f"Deleted repository field `{key}`", |
|
961 | 960 | 'success': True, |
|
962 | 961 | } |
|
963 | 962 | except Exception: |
|
964 | 963 | log.exception( |
|
965 | 964 | "Exception occurred while trying to delete field from repo") |
|
966 | 965 | raise JSONRPCError( |
|
967 | 966 | f'failed to delete field for repository `{repoid}`') |
|
968 | 967 | |
|
969 | 968 | |
|
970 | 969 | @jsonrpc_method() |
|
971 | 970 | def update_repo( |
|
972 | 971 | request, apiuser, repoid, repo_name=Optional(None), |
|
973 | 972 | owner=Optional(OAttr('apiuser')), description=Optional(''), |
|
974 | 973 | private=Optional(False), |
|
975 | 974 | clone_uri=Optional(None), push_uri=Optional(None), |
|
976 | 975 | landing_rev=Optional(None), fork_of=Optional(None), |
|
977 | 976 | enable_statistics=Optional(False), |
|
978 | 977 | enable_locking=Optional(False), |
|
979 | 978 | enable_downloads=Optional(False), fields=Optional('')): |
|
980 | 979 | r""" |
|
981 | 980 | Updates a repository with the given information. |
|
982 | 981 | |
|
983 | 982 | This command can only be run using an |authtoken| with at least |
|
984 | 983 | admin permissions to the |repo|. |
|
985 | 984 | |
|
986 | 985 | * If the repository name contains "/", repository will be updated |
|
987 | 986 | accordingly with a repository group or nested repository groups |
|
988 | 987 | |
|
989 | 988 | For example repoid=repo-test name="foo/bar/repo-test" will update |repo| |
|
990 | 989 | called "repo-test" and place it inside group "foo/bar". |
|
991 | 990 | You have to have permissions to access and write to the last repository |
|
992 | 991 | group ("bar" in this example) |
|
993 | 992 | |
|
994 | 993 | :param apiuser: This is filled automatically from the |authtoken|. |
|
995 | 994 | :type apiuser: AuthUser |
|
996 | 995 | :param repoid: repository name or repository ID. |
|
997 | 996 | :type repoid: str or int |
|
998 | 997 | :param repo_name: Update the |repo| name, including the |
|
999 | 998 | repository group it's in. |
|
1000 | 999 | :type repo_name: str |
|
1001 | 1000 | :param owner: Set the |repo| owner. |
|
1002 | 1001 | :type owner: str |
|
1003 | 1002 | :param fork_of: Set the |repo| as fork of another |repo|. |
|
1004 | 1003 | :type fork_of: str |
|
1005 | 1004 | :param description: Update the |repo| description. |
|
1006 | 1005 | :type description: str |
|
1007 | 1006 | :param private: Set the |repo| as private. (True | False) |
|
1008 | 1007 | :type private: bool |
|
1009 | 1008 | :param clone_uri: Update the |repo| clone URI. |
|
1010 | 1009 | :type clone_uri: str |
|
1011 | 1010 | :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd |
|
1012 | 1011 | :type landing_rev: str |
|
1013 | 1012 | :param enable_statistics: Enable statistics on the |repo|, (True | False). |
|
1014 | 1013 | :type enable_statistics: bool |
|
1015 | 1014 | :param enable_locking: Enable |repo| locking. |
|
1016 | 1015 | :type enable_locking: bool |
|
1017 | 1016 | :param enable_downloads: Enable downloads from the |repo|, (True | False). |
|
1018 | 1017 | :type enable_downloads: bool |
|
1019 | 1018 | :param fields: Add extra fields to the |repo|. Use the following |
|
1020 | 1019 | example format: ``field_key=field_val,field_key2=fieldval2``. |
|
1021 | 1020 | Escape ', ' with \, |
|
1022 | 1021 | :type fields: str |
|
1023 | 1022 | """ |
|
1024 | 1023 | |
|
1025 | 1024 | repo = get_repo_or_error(repoid) |
|
1026 | 1025 | |
|
1027 | 1026 | include_secrets = False |
|
1028 | 1027 | if not has_superadmin_permission(apiuser): |
|
1029 | 1028 | _perms = ('repository.admin',) |
|
1030 | 1029 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1031 | 1030 | else: |
|
1032 | 1031 | include_secrets = True |
|
1033 | 1032 | |
|
1034 | 1033 | updates = dict( |
|
1035 | 1034 | repo_name=repo_name |
|
1036 | 1035 | if not isinstance(repo_name, Optional) else repo.repo_name, |
|
1037 | 1036 | |
|
1038 | 1037 | fork_id=fork_of |
|
1039 | 1038 | if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None, |
|
1040 | 1039 | |
|
1041 | 1040 | user=owner |
|
1042 | 1041 | if not isinstance(owner, Optional) else repo.user.username, |
|
1043 | 1042 | |
|
1044 | 1043 | repo_description=description |
|
1045 | 1044 | if not isinstance(description, Optional) else repo.description, |
|
1046 | 1045 | |
|
1047 | 1046 | repo_private=private |
|
1048 | 1047 | if not isinstance(private, Optional) else repo.private, |
|
1049 | 1048 | |
|
1050 | 1049 | clone_uri=clone_uri |
|
1051 | 1050 | if not isinstance(clone_uri, Optional) else repo.clone_uri, |
|
1052 | 1051 | |
|
1053 | 1052 | push_uri=push_uri |
|
1054 | 1053 | if not isinstance(push_uri, Optional) else repo.push_uri, |
|
1055 | 1054 | |
|
1056 | 1055 | repo_landing_rev=landing_rev |
|
1057 | 1056 | if not isinstance(landing_rev, Optional) else repo._landing_revision, |
|
1058 | 1057 | |
|
1059 | 1058 | repo_enable_statistics=enable_statistics |
|
1060 | 1059 | if not isinstance(enable_statistics, Optional) else repo.enable_statistics, |
|
1061 | 1060 | |
|
1062 | 1061 | repo_enable_locking=enable_locking |
|
1063 | 1062 | if not isinstance(enable_locking, Optional) else repo.enable_locking, |
|
1064 | 1063 | |
|
1065 | 1064 | repo_enable_downloads=enable_downloads |
|
1066 | 1065 | if not isinstance(enable_downloads, Optional) else repo.enable_downloads) |
|
1067 | 1066 | |
|
1068 | 1067 | landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) |
|
1069 | 1068 | ref_choices, _labels = ScmModel().get_repo_landing_revs( |
|
1070 | 1069 | request.translate, repo=repo) |
|
1071 | 1070 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
1072 | 1071 | |
|
1073 | 1072 | old_values = repo.get_api_data() |
|
1074 | 1073 | repo_type = repo.repo_type |
|
1075 | 1074 | schema = repo_schema.RepoSchema().bind( |
|
1076 | 1075 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
1077 | 1076 | repo_ref_options=ref_choices, |
|
1078 | 1077 | repo_type=repo_type, |
|
1079 | 1078 | # user caller |
|
1080 | 1079 | user=apiuser, |
|
1081 | 1080 | old_values=old_values) |
|
1082 | 1081 | try: |
|
1083 | 1082 | schema_data = schema.deserialize(dict( |
|
1084 | 1083 | # we save old value, users cannot change type |
|
1085 | 1084 | repo_type=repo_type, |
|
1086 | 1085 | |
|
1087 | 1086 | repo_name=updates['repo_name'], |
|
1088 | 1087 | repo_owner=updates['user'], |
|
1089 | 1088 | repo_description=updates['repo_description'], |
|
1090 | 1089 | repo_clone_uri=updates['clone_uri'], |
|
1091 | 1090 | repo_push_uri=updates['push_uri'], |
|
1092 | 1091 | repo_fork_of=updates['fork_id'], |
|
1093 | 1092 | repo_private=updates['repo_private'], |
|
1094 | 1093 | repo_landing_commit_ref=updates['repo_landing_rev'], |
|
1095 | 1094 | repo_enable_statistics=updates['repo_enable_statistics'], |
|
1096 | 1095 | repo_enable_downloads=updates['repo_enable_downloads'], |
|
1097 | 1096 | repo_enable_locking=updates['repo_enable_locking'])) |
|
1098 | 1097 | except validation_schema.Invalid as err: |
|
1099 | 1098 | raise JSONRPCValidationError(colander_exc=err) |
|
1100 | 1099 | |
|
1101 | 1100 | # save validated data back into the updates dict |
|
1102 | 1101 | validated_updates = dict( |
|
1103 | 1102 | repo_name=schema_data['repo_group']['repo_name_without_group'], |
|
1104 | 1103 | repo_group=schema_data['repo_group']['repo_group_id'], |
|
1105 | 1104 | |
|
1106 | 1105 | user=schema_data['repo_owner'], |
|
1107 | 1106 | repo_description=schema_data['repo_description'], |
|
1108 | 1107 | repo_private=schema_data['repo_private'], |
|
1109 | 1108 | clone_uri=schema_data['repo_clone_uri'], |
|
1110 | 1109 | push_uri=schema_data['repo_push_uri'], |
|
1111 | 1110 | repo_landing_rev=schema_data['repo_landing_commit_ref'], |
|
1112 | 1111 | repo_enable_statistics=schema_data['repo_enable_statistics'], |
|
1113 | 1112 | repo_enable_locking=schema_data['repo_enable_locking'], |
|
1114 | 1113 | repo_enable_downloads=schema_data['repo_enable_downloads'], |
|
1115 | 1114 | ) |
|
1116 | 1115 | |
|
1117 | 1116 | if schema_data['repo_fork_of']: |
|
1118 | 1117 | fork_repo = get_repo_or_error(schema_data['repo_fork_of']) |
|
1119 | 1118 | validated_updates['fork_id'] = fork_repo.repo_id |
|
1120 | 1119 | |
|
1121 | 1120 | # extra fields |
|
1122 | 1121 | fields = parse_args(Optional.extract(fields), key_prefix='ex_') |
|
1123 | 1122 | if fields: |
|
1124 | 1123 | validated_updates.update(fields) |
|
1125 | 1124 | |
|
1126 | 1125 | try: |
|
1127 | 1126 | RepoModel().update(repo, **validated_updates) |
|
1128 | 1127 | audit_logger.store_api( |
|
1129 | 1128 | 'repo.edit', action_data={'old_data': old_values}, |
|
1130 | 1129 | user=apiuser, repo=repo) |
|
1131 | 1130 | Session().commit() |
|
1132 | 1131 | return { |
|
1133 | 1132 | 'msg': f'updated repo ID:{repo.repo_id} {repo.repo_name}', |
|
1134 | 1133 | 'repository': repo.get_api_data(include_secrets=include_secrets) |
|
1135 | 1134 | } |
|
1136 | 1135 | except Exception: |
|
1137 | 1136 | log.exception( |
|
1138 | 1137 | "Exception while trying to update the repository %s", |
|
1139 | 1138 | repoid) |
|
1140 | 1139 | raise JSONRPCError('failed to update repo `%s`' % repoid) |
|
1141 | 1140 | |
|
1142 | 1141 | |
|
1143 | 1142 | @jsonrpc_method() |
|
1144 | 1143 | def fork_repo(request, apiuser, repoid, fork_name, |
|
1145 | 1144 | owner=Optional(OAttr('apiuser')), |
|
1146 | 1145 | description=Optional(''), |
|
1147 | 1146 | private=Optional(False), |
|
1148 | 1147 | clone_uri=Optional(None), |
|
1149 | 1148 | landing_rev=Optional(None), |
|
1150 | 1149 | copy_permissions=Optional(False)): |
|
1151 | 1150 | """ |
|
1152 | 1151 | Creates a fork of the specified |repo|. |
|
1153 | 1152 | |
|
1154 | 1153 | * If the fork_name contains "/", fork will be created inside |
|
1155 | 1154 | a repository group or nested repository groups |
|
1156 | 1155 | |
|
1157 | 1156 | For example "foo/bar/fork-repo" will create fork called "fork-repo" |
|
1158 | 1157 | inside group "foo/bar". You have to have permissions to access and |
|
1159 | 1158 | write to the last repository group ("bar" in this example) |
|
1160 | 1159 | |
|
1161 | 1160 | This command can only be run using an |authtoken| with minimum |
|
1162 | 1161 | read permissions of the forked repo, create fork permissions for an user. |
|
1163 | 1162 | |
|
1164 | 1163 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1165 | 1164 | :type apiuser: AuthUser |
|
1166 | 1165 | :param repoid: Set repository name or repository ID. |
|
1167 | 1166 | :type repoid: str or int |
|
1168 | 1167 | :param fork_name: Set the fork name, including it's repository group membership. |
|
1169 | 1168 | :type fork_name: str |
|
1170 | 1169 | :param owner: Set the fork owner. |
|
1171 | 1170 | :type owner: str |
|
1172 | 1171 | :param description: Set the fork description. |
|
1173 | 1172 | :type description: str |
|
1174 | 1173 | :param copy_permissions: Copy permissions from parent |repo|. The |
|
1175 | 1174 | default is False. |
|
1176 | 1175 | :type copy_permissions: bool |
|
1177 | 1176 | :param private: Make the fork private. The default is False. |
|
1178 | 1177 | :type private: bool |
|
1179 | 1178 | :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd |
|
1180 | 1179 | |
|
1181 | 1180 | Example output: |
|
1182 | 1181 | |
|
1183 | 1182 | .. code-block:: bash |
|
1184 | 1183 | |
|
1185 | 1184 | id : <id_for_response> |
|
1186 | 1185 | api_key : "<api_key>" |
|
1187 | 1186 | args: { |
|
1188 | 1187 | "repoid" : "<reponame or repo_id>", |
|
1189 | 1188 | "fork_name": "<forkname>", |
|
1190 | 1189 | "owner": "<username or user_id = Optional(=apiuser)>", |
|
1191 | 1190 | "description": "<description>", |
|
1192 | 1191 | "copy_permissions": "<bool>", |
|
1193 | 1192 | "private": "<bool>", |
|
1194 | 1193 | "landing_rev": "<landing_rev>" |
|
1195 | 1194 | } |
|
1196 | 1195 | |
|
1197 | 1196 | Example error output: |
|
1198 | 1197 | |
|
1199 | 1198 | .. code-block:: bash |
|
1200 | 1199 | |
|
1201 | 1200 | id : <id_given_in_input> |
|
1202 | 1201 | result: { |
|
1203 | 1202 | "msg": "Created fork of `<reponame>` as `<forkname>`", |
|
1204 | 1203 | "success": true, |
|
1205 | 1204 | "task": "<celery task id or None if done sync>" |
|
1206 | 1205 | } |
|
1207 | 1206 | error: null |
|
1208 | 1207 | |
|
1209 | 1208 | """ |
|
1210 | 1209 | |
|
1211 | 1210 | repo = get_repo_or_error(repoid) |
|
1212 | 1211 | repo_name = repo.repo_name |
|
1213 | 1212 | |
|
1214 | 1213 | if not has_superadmin_permission(apiuser): |
|
1215 | 1214 | # check if we have at least read permission for |
|
1216 | 1215 | # this repo that we fork ! |
|
1217 | 1216 | _perms = ('repository.admin', 'repository.write', 'repository.read') |
|
1218 | 1217 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1219 | 1218 | |
|
1220 | 1219 | # check if the regular user has at least fork permissions as well |
|
1221 | 1220 | if not HasPermissionAnyApi(PermissionModel.FORKING_ENABLED)(user=apiuser): |
|
1222 | 1221 | raise JSONRPCForbidden() |
|
1223 | 1222 | |
|
1224 | 1223 | # check if user can set owner parameter |
|
1225 | 1224 | owner = validate_set_owner_permissions(apiuser, owner) |
|
1226 | 1225 | |
|
1227 | 1226 | description = Optional.extract(description) |
|
1228 | 1227 | copy_permissions = Optional.extract(copy_permissions) |
|
1229 | 1228 | clone_uri = Optional.extract(clone_uri) |
|
1230 | 1229 | |
|
1231 | 1230 | landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type) |
|
1232 | 1231 | ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate) |
|
1233 | 1232 | ref_choices = list(set(ref_choices + [landing_ref])) |
|
1234 | 1233 | landing_commit_ref = Optional.extract(landing_rev) or landing_ref |
|
1235 | 1234 | |
|
1236 | 1235 | private = Optional.extract(private) |
|
1237 | 1236 | |
|
1238 | 1237 | schema = repo_schema.RepoSchema().bind( |
|
1239 | 1238 | repo_type_options=rhodecode.BACKENDS.keys(), |
|
1240 | 1239 | repo_ref_options=ref_choices, |
|
1241 | 1240 | repo_type=repo.repo_type, |
|
1242 | 1241 | # user caller |
|
1243 | 1242 | user=apiuser) |
|
1244 | 1243 | |
|
1245 | 1244 | try: |
|
1246 | 1245 | schema_data = schema.deserialize(dict( |
|
1247 | 1246 | repo_name=fork_name, |
|
1248 | 1247 | repo_type=repo.repo_type, |
|
1249 | 1248 | repo_owner=owner.username, |
|
1250 | 1249 | repo_description=description, |
|
1251 | 1250 | repo_landing_commit_ref=landing_commit_ref, |
|
1252 | 1251 | repo_clone_uri=clone_uri, |
|
1253 | 1252 | repo_private=private, |
|
1254 | 1253 | repo_copy_permissions=copy_permissions)) |
|
1255 | 1254 | except validation_schema.Invalid as err: |
|
1256 | 1255 | raise JSONRPCValidationError(colander_exc=err) |
|
1257 | 1256 | |
|
1258 | 1257 | try: |
|
1259 | 1258 | data = { |
|
1260 | 1259 | 'fork_parent_id': repo.repo_id, |
|
1261 | 1260 | |
|
1262 | 1261 | 'repo_name': schema_data['repo_group']['repo_name_without_group'], |
|
1263 | 1262 | 'repo_name_full': schema_data['repo_name'], |
|
1264 | 1263 | 'repo_group': schema_data['repo_group']['repo_group_id'], |
|
1265 | 1264 | 'repo_type': schema_data['repo_type'], |
|
1266 | 1265 | 'description': schema_data['repo_description'], |
|
1267 | 1266 | 'private': schema_data['repo_private'], |
|
1268 | 1267 | 'copy_permissions': schema_data['repo_copy_permissions'], |
|
1269 | 1268 | 'landing_rev': schema_data['repo_landing_commit_ref'], |
|
1270 | 1269 | } |
|
1271 | 1270 | |
|
1272 | 1271 | task = RepoModel().create_fork(data, cur_user=owner.user_id) |
|
1273 | 1272 | # no commit, it's done in RepoModel, or async via celery |
|
1274 | 1273 | task_id = get_task_id(task) |
|
1275 | 1274 | |
|
1276 | 1275 | return { |
|
1277 | 1276 | 'msg': 'Created fork of `{}` as `{}`'.format( |
|
1278 | 1277 | repo.repo_name, schema_data['repo_name']), |
|
1279 | 1278 | 'success': True, # cannot return the repo data here since fork |
|
1280 | 1279 | # can be done async |
|
1281 | 1280 | 'task': task_id |
|
1282 | 1281 | } |
|
1283 | 1282 | except Exception: |
|
1284 | 1283 | log.exception( |
|
1285 | 1284 | "Exception while trying to create fork %s", |
|
1286 | 1285 | schema_data['repo_name']) |
|
1287 | 1286 | raise JSONRPCError( |
|
1288 | 1287 | 'failed to fork repository `{}` as `{}`'.format( |
|
1289 | 1288 | repo_name, schema_data['repo_name'])) |
|
1290 | 1289 | |
|
1291 | 1290 | |
|
1292 | 1291 | @jsonrpc_method() |
|
1293 | 1292 | def delete_repo(request, apiuser, repoid, forks=Optional('')): |
|
1294 | 1293 | """ |
|
1295 | 1294 | Deletes a repository. |
|
1296 | 1295 | |
|
1297 | 1296 | * When the `forks` parameter is set it's possible to detach or delete |
|
1298 | 1297 | forks of deleted repository. |
|
1299 | 1298 | |
|
1300 | 1299 | This command can only be run using an |authtoken| with admin |
|
1301 | 1300 | permissions on the |repo|. |
|
1302 | 1301 | |
|
1303 | 1302 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1304 | 1303 | :type apiuser: AuthUser |
|
1305 | 1304 | :param repoid: Set the repository name or repository ID. |
|
1306 | 1305 | :type repoid: str or int |
|
1307 | 1306 | :param forks: Set to `detach` or `delete` forks from the |repo|. |
|
1308 | 1307 | :type forks: Optional(str) |
|
1309 | 1308 | |
|
1310 | 1309 | Example error output: |
|
1311 | 1310 | |
|
1312 | 1311 | .. code-block:: bash |
|
1313 | 1312 | |
|
1314 | 1313 | id : <id_given_in_input> |
|
1315 | 1314 | result: { |
|
1316 | 1315 | "msg": "Deleted repository `<reponame>`", |
|
1317 | 1316 | "success": true |
|
1318 | 1317 | } |
|
1319 | 1318 | error: null |
|
1320 | 1319 | """ |
|
1321 | 1320 | |
|
1322 | 1321 | repo = get_repo_or_error(repoid) |
|
1323 | 1322 | repo_name = repo.repo_name |
|
1324 | 1323 | if not has_superadmin_permission(apiuser): |
|
1325 | 1324 | _perms = ('repository.admin',) |
|
1326 | 1325 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1327 | 1326 | |
|
1328 | 1327 | try: |
|
1329 | 1328 | handle_forks = Optional.extract(forks) |
|
1330 | 1329 | _forks_msg = '' |
|
1331 | 1330 | _forks = [f for f in repo.forks] |
|
1332 | 1331 | if handle_forks == 'detach': |
|
1333 | 1332 | _forks_msg = ' ' + 'Detached %s forks' % len(_forks) |
|
1334 | 1333 | elif handle_forks == 'delete': |
|
1335 | 1334 | _forks_msg = ' ' + 'Deleted %s forks' % len(_forks) |
|
1336 | 1335 | elif _forks: |
|
1337 | 1336 | raise JSONRPCError( |
|
1338 | 1337 | 'Cannot delete `%s` it still contains attached forks' % |
|
1339 | 1338 | (repo.repo_name,) |
|
1340 | 1339 | ) |
|
1341 | 1340 | old_data = repo.get_api_data() |
|
1342 | 1341 | RepoModel().delete(repo, forks=forks) |
|
1343 | 1342 | |
|
1344 | 1343 | repo = audit_logger.RepoWrap(repo_id=None, |
|
1345 | 1344 | repo_name=repo.repo_name) |
|
1346 | 1345 | |
|
1347 | 1346 | audit_logger.store_api( |
|
1348 | 1347 | 'repo.delete', action_data={'old_data': old_data}, |
|
1349 | 1348 | user=apiuser, repo=repo) |
|
1350 | 1349 | |
|
1351 | 1350 | ScmModel().mark_for_invalidation(repo_name, delete=True) |
|
1352 | 1351 | Session().commit() |
|
1353 | 1352 | return { |
|
1354 | 1353 | 'msg': f'Deleted repository `{repo_name}`{_forks_msg}', |
|
1355 | 1354 | 'success': True |
|
1356 | 1355 | } |
|
1357 | 1356 | except Exception: |
|
1358 | 1357 | log.exception("Exception occurred while trying to delete repo") |
|
1359 | 1358 | raise JSONRPCError( |
|
1360 | 1359 | f'failed to delete repository `{repo_name}`' |
|
1361 | 1360 | ) |
|
1362 | 1361 | |
|
1363 | 1362 | |
|
1364 | 1363 | #TODO: marcink, change name ? |
|
1365 | 1364 | @jsonrpc_method() |
|
1366 | 1365 | def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)): |
|
1367 | 1366 | """ |
|
1368 | 1367 | Invalidates the cache for the specified repository. |
|
1369 | 1368 | |
|
1370 | 1369 | This command can only be run using an |authtoken| with admin rights to |
|
1371 | 1370 | the specified repository. |
|
1372 | 1371 | |
|
1373 | 1372 | This command takes the following options: |
|
1374 | 1373 | |
|
1375 | 1374 | :param apiuser: This is filled automatically from |authtoken|. |
|
1376 | 1375 | :type apiuser: AuthUser |
|
1377 | 1376 | :param repoid: Sets the repository name or repository ID. |
|
1378 | 1377 | :type repoid: str or int |
|
1379 | 1378 | :param delete_keys: This deletes the invalidated keys instead of |
|
1380 | 1379 | just flagging them. |
|
1381 | 1380 | :type delete_keys: Optional(``True`` | ``False``) |
|
1382 | 1381 | |
|
1383 | 1382 | Example output: |
|
1384 | 1383 | |
|
1385 | 1384 | .. code-block:: bash |
|
1386 | 1385 | |
|
1387 | 1386 | id : <id_given_in_input> |
|
1388 | 1387 | result : { |
|
1389 | 1388 | 'msg': Cache for repository `<repository name>` was invalidated, |
|
1390 | 1389 | 'repository': <repository name> |
|
1391 | 1390 | } |
|
1392 | 1391 | error : null |
|
1393 | 1392 | |
|
1394 | 1393 | Example error output: |
|
1395 | 1394 | |
|
1396 | 1395 | .. code-block:: bash |
|
1397 | 1396 | |
|
1398 | 1397 | id : <id_given_in_input> |
|
1399 | 1398 | result : null |
|
1400 | 1399 | error : { |
|
1401 | 1400 | 'Error occurred during cache invalidation action' |
|
1402 | 1401 | } |
|
1403 | 1402 | |
|
1404 | 1403 | """ |
|
1405 | 1404 | |
|
1406 | 1405 | repo = get_repo_or_error(repoid) |
|
1407 | 1406 | if not has_superadmin_permission(apiuser): |
|
1408 | 1407 | _perms = ('repository.admin', 'repository.write',) |
|
1409 | 1408 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1410 | 1409 | |
|
1411 | 1410 | delete = Optional.extract(delete_keys) |
|
1412 | 1411 | try: |
|
1413 | 1412 | ScmModel().mark_for_invalidation(repo.repo_name, delete=delete) |
|
1414 | 1413 | return { |
|
1415 | 1414 | 'msg': f'Cache for repository `{repoid}` was invalidated', |
|
1416 | 1415 | 'repository': repo.repo_name |
|
1417 | 1416 | } |
|
1418 | 1417 | except Exception: |
|
1419 | 1418 | log.exception( |
|
1420 | 1419 | "Exception occurred while trying to invalidate repo cache") |
|
1421 | 1420 | raise JSONRPCError( |
|
1422 | 1421 | 'Error occurred during cache invalidation action' |
|
1423 | 1422 | ) |
|
1424 | 1423 | |
|
1425 | 1424 | |
|
1426 | 1425 | #TODO: marcink, change name ? |
|
1427 | 1426 | @jsonrpc_method() |
|
1428 | 1427 | def lock(request, apiuser, repoid, locked=Optional(None), |
|
1429 | 1428 | userid=Optional(OAttr('apiuser'))): |
|
1430 | 1429 | """ |
|
1431 | 1430 | Sets the lock state of the specified |repo| by the given user. |
|
1432 | 1431 | From more information, see :ref:`repo-locking`. |
|
1433 | 1432 | |
|
1434 | 1433 | * If the ``userid`` option is not set, the repository is locked to the |
|
1435 | 1434 | user who called the method. |
|
1436 | 1435 | * If the ``locked`` parameter is not set, the current lock state of the |
|
1437 | 1436 | repository is displayed. |
|
1438 | 1437 | |
|
1439 | 1438 | This command can only be run using an |authtoken| with admin rights to |
|
1440 | 1439 | the specified repository. |
|
1441 | 1440 | |
|
1442 | 1441 | This command takes the following options: |
|
1443 | 1442 | |
|
1444 | 1443 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1445 | 1444 | :type apiuser: AuthUser |
|
1446 | 1445 | :param repoid: Sets the repository name or repository ID. |
|
1447 | 1446 | :type repoid: str or int |
|
1448 | 1447 | :param locked: Sets the lock state. |
|
1449 | 1448 | :type locked: Optional(``True`` | ``False``) |
|
1450 | 1449 | :param userid: Set the repository lock to this user. |
|
1451 | 1450 | :type userid: Optional(str or int) |
|
1452 | 1451 | |
|
1453 | 1452 | Example error output: |
|
1454 | 1453 | |
|
1455 | 1454 | .. code-block:: bash |
|
1456 | 1455 | |
|
1457 | 1456 | id : <id_given_in_input> |
|
1458 | 1457 | result : { |
|
1459 | 1458 | 'repo': '<reponame>', |
|
1460 | 1459 | 'locked': <bool: lock state>, |
|
1461 | 1460 | 'locked_since': <int: lock timestamp>, |
|
1462 | 1461 | 'locked_by': <username of person who made the lock>, |
|
1463 | 1462 | 'lock_reason': <str: reason for locking>, |
|
1464 | 1463 | 'lock_state_changed': <bool: True if lock state has been changed in this request>, |
|
1465 | 1464 | 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.' |
|
1466 | 1465 | or |
|
1467 | 1466 | 'msg': 'Repo `<repository name>` not locked.' |
|
1468 | 1467 | or |
|
1469 | 1468 | 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`' |
|
1470 | 1469 | } |
|
1471 | 1470 | error : null |
|
1472 | 1471 | |
|
1473 | 1472 | Example error output: |
|
1474 | 1473 | |
|
1475 | 1474 | .. code-block:: bash |
|
1476 | 1475 | |
|
1477 | 1476 | id : <id_given_in_input> |
|
1478 | 1477 | result : null |
|
1479 | 1478 | error : { |
|
1480 | 1479 | 'Error occurred locking repository `<reponame>`' |
|
1481 | 1480 | } |
|
1482 | 1481 | """ |
|
1483 | 1482 | |
|
1484 | 1483 | repo = get_repo_or_error(repoid) |
|
1485 | 1484 | if not has_superadmin_permission(apiuser): |
|
1486 | 1485 | # check if we have at least write permission for this repo ! |
|
1487 | 1486 | _perms = ('repository.admin', 'repository.write',) |
|
1488 | 1487 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1489 | 1488 | |
|
1490 | 1489 | # make sure normal user does not pass someone else userid, |
|
1491 | 1490 | # he is not allowed to do that |
|
1492 | 1491 | if not isinstance(userid, Optional) and userid != apiuser.user_id: |
|
1493 | 1492 | raise JSONRPCError('userid is not the same as your user') |
|
1494 | 1493 | |
|
1495 | 1494 | if isinstance(userid, Optional): |
|
1496 | 1495 | userid = apiuser.user_id |
|
1497 | 1496 | |
|
1498 | 1497 | user = get_user_or_error(userid) |
|
1499 | 1498 | |
|
1500 | 1499 | if isinstance(locked, Optional): |
|
1501 | 1500 | lockobj = repo.locked |
|
1502 | 1501 | |
|
1503 | 1502 | if lockobj[0] is None: |
|
1504 | 1503 | _d = { |
|
1505 | 1504 | 'repo': repo.repo_name, |
|
1506 | 1505 | 'locked': False, |
|
1507 | 1506 | 'locked_since': None, |
|
1508 | 1507 | 'locked_by': None, |
|
1509 | 1508 | 'lock_reason': None, |
|
1510 | 1509 | 'lock_state_changed': False, |
|
1511 | 1510 | 'msg': 'Repo `%s` not locked.' % repo.repo_name |
|
1512 | 1511 | } |
|
1513 | 1512 | return _d |
|
1514 | 1513 | else: |
|
1515 | 1514 | _user_id, _time, _reason = lockobj |
|
1516 | 1515 | lock_user = get_user_or_error(userid) |
|
1517 | 1516 | _d = { |
|
1518 | 1517 | 'repo': repo.repo_name, |
|
1519 | 1518 | 'locked': True, |
|
1520 | 1519 | 'locked_since': _time, |
|
1521 | 1520 | 'locked_by': lock_user.username, |
|
1522 | 1521 | 'lock_reason': _reason, |
|
1523 | 1522 | 'lock_state_changed': False, |
|
1524 | 1523 | 'msg': ('Repo `%s` locked by `%s` on `%s`.' |
|
1525 | 1524 | % (repo.repo_name, lock_user.username, |
|
1526 | 1525 | json.dumps(time_to_datetime(_time)))) |
|
1527 | 1526 | } |
|
1528 | 1527 | return _d |
|
1529 | 1528 | |
|
1530 | 1529 | # force locked state through a flag |
|
1531 | 1530 | else: |
|
1532 | 1531 | locked = str2bool(locked) |
|
1533 | 1532 | lock_reason = Repository.LOCK_API |
|
1534 | 1533 | try: |
|
1535 | 1534 | if locked: |
|
1536 | 1535 | lock_time = time.time() |
|
1537 | 1536 | Repository.lock(repo, user.user_id, lock_time, lock_reason) |
|
1538 | 1537 | else: |
|
1539 | 1538 | lock_time = None |
|
1540 | 1539 | Repository.unlock(repo) |
|
1541 | 1540 | _d = { |
|
1542 | 1541 | 'repo': repo.repo_name, |
|
1543 | 1542 | 'locked': locked, |
|
1544 | 1543 | 'locked_since': lock_time, |
|
1545 | 1544 | 'locked_by': user.username, |
|
1546 | 1545 | 'lock_reason': lock_reason, |
|
1547 | 1546 | 'lock_state_changed': True, |
|
1548 | 1547 | 'msg': ('User `%s` set lock state for repo `%s` to `%s`' |
|
1549 | 1548 | % (user.username, repo.repo_name, locked)) |
|
1550 | 1549 | } |
|
1551 | 1550 | return _d |
|
1552 | 1551 | except Exception: |
|
1553 | 1552 | log.exception( |
|
1554 | 1553 | "Exception occurred while trying to lock repository") |
|
1555 | 1554 | raise JSONRPCError( |
|
1556 | 1555 | 'Error occurred locking repository `%s`' % repo.repo_name |
|
1557 | 1556 | ) |
|
1558 | 1557 | |
|
1559 | 1558 | |
|
1560 | 1559 | @jsonrpc_method() |
|
1561 | 1560 | def comment_commit( |
|
1562 | 1561 | request, apiuser, repoid, commit_id, message, status=Optional(None), |
|
1563 | 1562 | comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE), |
|
1564 | 1563 | resolves_comment_id=Optional(None), extra_recipients=Optional([]), |
|
1565 | 1564 | userid=Optional(OAttr('apiuser')), send_email=Optional(True)): |
|
1566 | 1565 | """ |
|
1567 | 1566 | Set a commit comment, and optionally change the status of the commit. |
|
1568 | 1567 | |
|
1569 | 1568 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1570 | 1569 | :type apiuser: AuthUser |
|
1571 | 1570 | :param repoid: Set the repository name or repository ID. |
|
1572 | 1571 | :type repoid: str or int |
|
1573 | 1572 | :param commit_id: Specify the commit_id for which to set a comment. |
|
1574 | 1573 | :type commit_id: str |
|
1575 | 1574 | :param message: The comment text. |
|
1576 | 1575 | :type message: str |
|
1577 | 1576 | :param status: (**Optional**) status of commit, one of: 'not_reviewed', |
|
1578 | 1577 | 'approved', 'rejected', 'under_review' |
|
1579 | 1578 | :type status: str |
|
1580 | 1579 | :param comment_type: Comment type, one of: 'note', 'todo' |
|
1581 | 1580 | :type comment_type: Optional(str), default: 'note' |
|
1582 | 1581 | :param resolves_comment_id: id of comment which this one will resolve |
|
1583 | 1582 | :type resolves_comment_id: Optional(int) |
|
1584 | 1583 | :param extra_recipients: list of user ids or usernames to add |
|
1585 | 1584 | notifications for this comment. Acts like a CC for notification |
|
1586 | 1585 | :type extra_recipients: Optional(list) |
|
1587 | 1586 | :param userid: Set the user name of the comment creator. |
|
1588 | 1587 | :type userid: Optional(str or int) |
|
1589 | 1588 | :param send_email: Define if this comment should also send email notification |
|
1590 | 1589 | :type send_email: Optional(bool) |
|
1591 | 1590 | |
|
1592 | 1591 | Example error output: |
|
1593 | 1592 | |
|
1594 | 1593 | .. code-block:: bash |
|
1595 | 1594 | |
|
1596 | 1595 | { |
|
1597 | 1596 | "id" : <id_given_in_input>, |
|
1598 | 1597 | "result" : { |
|
1599 | 1598 | "msg": "Commented on commit `<commit_id>` for repository `<repoid>`", |
|
1600 | 1599 | "status_change": null or <status>, |
|
1601 | 1600 | "success": true |
|
1602 | 1601 | }, |
|
1603 | 1602 | "error" : null |
|
1604 | 1603 | } |
|
1605 | 1604 | |
|
1606 | 1605 | """ |
|
1607 | 1606 | _ = request.translate |
|
1608 | 1607 | |
|
1609 | 1608 | repo = get_repo_or_error(repoid) |
|
1610 | 1609 | if not has_superadmin_permission(apiuser): |
|
1611 | 1610 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1612 | 1611 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1613 | 1612 | db_repo_name = repo.repo_name |
|
1614 | 1613 | |
|
1615 | 1614 | try: |
|
1616 | 1615 | commit = repo.scm_instance().get_commit(commit_id=commit_id) |
|
1617 | 1616 | commit_id = commit.raw_id |
|
1618 | 1617 | except Exception as e: |
|
1619 | 1618 | log.exception('Failed to fetch commit') |
|
1620 | 1619 | raise JSONRPCError(safe_str(e)) |
|
1621 | 1620 | |
|
1622 | 1621 | if isinstance(userid, Optional): |
|
1623 | 1622 | userid = apiuser.user_id |
|
1624 | 1623 | |
|
1625 | 1624 | user = get_user_or_error(userid) |
|
1626 | 1625 | status = Optional.extract(status) |
|
1627 | 1626 | comment_type = Optional.extract(comment_type) |
|
1628 | 1627 | resolves_comment_id = Optional.extract(resolves_comment_id) |
|
1629 | 1628 | extra_recipients = Optional.extract(extra_recipients) |
|
1630 | 1629 | send_email = Optional.extract(send_email, binary=True) |
|
1631 | 1630 | |
|
1632 | 1631 | allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES] |
|
1633 | 1632 | if status and status not in allowed_statuses: |
|
1634 | 1633 | raise JSONRPCError('Bad status, must be on ' |
|
1635 | 1634 | 'of %s got %s' % (allowed_statuses, status,)) |
|
1636 | 1635 | |
|
1637 | 1636 | if resolves_comment_id: |
|
1638 | 1637 | comment = ChangesetComment.get(resolves_comment_id) |
|
1639 | 1638 | if not comment: |
|
1640 | 1639 | raise JSONRPCError( |
|
1641 | 1640 | 'Invalid resolves_comment_id `%s` for this commit.' |
|
1642 | 1641 | % resolves_comment_id) |
|
1643 | 1642 | if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO: |
|
1644 | 1643 | raise JSONRPCError( |
|
1645 | 1644 | 'Comment `%s` is wrong type for setting status to resolved.' |
|
1646 | 1645 | % resolves_comment_id) |
|
1647 | 1646 | |
|
1648 | 1647 | try: |
|
1649 | 1648 | rc_config = SettingsModel().get_all_settings() |
|
1650 | 1649 | renderer = rc_config.get('rhodecode_markup_renderer', 'rst') |
|
1651 | 1650 | status_change_label = ChangesetStatus.get_status_lbl(status) |
|
1652 | 1651 | comment = CommentsModel().create( |
|
1653 | 1652 | message, repo, user, commit_id=commit_id, |
|
1654 | 1653 | status_change=status_change_label, |
|
1655 | 1654 | status_change_type=status, |
|
1656 | 1655 | renderer=renderer, |
|
1657 | 1656 | comment_type=comment_type, |
|
1658 | 1657 | resolves_comment_id=resolves_comment_id, |
|
1659 | 1658 | auth_user=apiuser, |
|
1660 | 1659 | extra_recipients=extra_recipients, |
|
1661 | 1660 | send_email=send_email |
|
1662 | 1661 | ) |
|
1663 | 1662 | is_inline = comment.is_inline |
|
1664 | 1663 | |
|
1665 | 1664 | if status: |
|
1666 | 1665 | # also do a status change |
|
1667 | 1666 | try: |
|
1668 | 1667 | ChangesetStatusModel().set_status( |
|
1669 | 1668 | repo, status, user, comment, revision=commit_id, |
|
1670 | 1669 | dont_allow_on_closed_pull_request=True |
|
1671 | 1670 | ) |
|
1672 | 1671 | except StatusChangeOnClosedPullRequestError: |
|
1673 | 1672 | log.exception( |
|
1674 | 1673 | "Exception occurred while trying to change repo commit status") |
|
1675 | 1674 | msg = ('Changing status on a commit associated with ' |
|
1676 | 1675 | 'a closed pull request is not allowed') |
|
1677 | 1676 | raise JSONRPCError(msg) |
|
1678 | 1677 | |
|
1679 | 1678 | CommentsModel().trigger_commit_comment_hook( |
|
1680 | 1679 | repo, apiuser, 'create', |
|
1681 | 1680 | data={'comment': comment, 'commit': commit}) |
|
1682 | 1681 | |
|
1683 | 1682 | Session().commit() |
|
1684 | 1683 | |
|
1685 | 1684 | comment_broadcast_channel = channelstream.comment_channel( |
|
1686 | 1685 | db_repo_name, commit_obj=commit) |
|
1687 | 1686 | |
|
1688 | 1687 | comment_data = {'comment': comment, 'comment_id': comment.comment_id} |
|
1689 | 1688 | comment_type = 'inline' if is_inline else 'general' |
|
1690 | 1689 | channelstream.comment_channelstream_push( |
|
1691 | 1690 | request, comment_broadcast_channel, apiuser, |
|
1692 | 1691 | _('posted a new {} comment').format(comment_type), |
|
1693 | 1692 | comment_data=comment_data) |
|
1694 | 1693 | |
|
1695 | 1694 | return { |
|
1696 | 1695 | 'msg': ( |
|
1697 | 1696 | 'Commented on commit `{}` for repository `{}`'.format( |
|
1698 | 1697 | comment.revision, repo.repo_name)), |
|
1699 | 1698 | 'status_change': status, |
|
1700 | 1699 | 'success': True, |
|
1701 | 1700 | } |
|
1702 | 1701 | except JSONRPCError: |
|
1703 | 1702 | # catch any inside errors, and re-raise them to prevent from |
|
1704 | 1703 | # below global catch to silence them |
|
1705 | 1704 | raise |
|
1706 | 1705 | except Exception: |
|
1707 | 1706 | log.exception("Exception occurred while trying to comment on commit") |
|
1708 | 1707 | raise JSONRPCError( |
|
1709 | 1708 | f'failed to set comment on repository `{repo.repo_name}`' |
|
1710 | 1709 | ) |
|
1711 | 1710 | |
|
1712 | 1711 | |
|
1713 | 1712 | @jsonrpc_method() |
|
1714 | 1713 | def get_repo_comments(request, apiuser, repoid, |
|
1715 | 1714 | commit_id=Optional(None), comment_type=Optional(None), |
|
1716 | 1715 | userid=Optional(None)): |
|
1717 | 1716 | """ |
|
1718 | 1717 | Get all comments for a repository |
|
1719 | 1718 | |
|
1720 | 1719 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1721 | 1720 | :type apiuser: AuthUser |
|
1722 | 1721 | :param repoid: Set the repository name or repository ID. |
|
1723 | 1722 | :type repoid: str or int |
|
1724 | 1723 | :param commit_id: Optionally filter the comments by the commit_id |
|
1725 | 1724 | :type commit_id: Optional(str), default: None |
|
1726 | 1725 | :param comment_type: Optionally filter the comments by the comment_type |
|
1727 | 1726 | one of: 'note', 'todo' |
|
1728 | 1727 | :type comment_type: Optional(str), default: None |
|
1729 | 1728 | :param userid: Optionally filter the comments by the author of comment |
|
1730 | 1729 | :type userid: Optional(str or int), Default: None |
|
1731 | 1730 | |
|
1732 | 1731 | Example error output: |
|
1733 | 1732 | |
|
1734 | 1733 | .. code-block:: bash |
|
1735 | 1734 | |
|
1736 | 1735 | { |
|
1737 | 1736 | "id" : <id_given_in_input>, |
|
1738 | 1737 | "result" : [ |
|
1739 | 1738 | { |
|
1740 | 1739 | "comment_author": <USER_DETAILS>, |
|
1741 | 1740 | "comment_created_on": "2017-02-01T14:38:16.309", |
|
1742 | 1741 | "comment_f_path": "file.txt", |
|
1743 | 1742 | "comment_id": 282, |
|
1744 | 1743 | "comment_lineno": "n1", |
|
1745 | 1744 | "comment_resolved_by": null, |
|
1746 | 1745 | "comment_status": [], |
|
1747 | 1746 | "comment_text": "This file needs a header", |
|
1748 | 1747 | "comment_type": "todo", |
|
1749 | 1748 | "comment_last_version: 0 |
|
1750 | 1749 | } |
|
1751 | 1750 | ], |
|
1752 | 1751 | "error" : null |
|
1753 | 1752 | } |
|
1754 | 1753 | |
|
1755 | 1754 | """ |
|
1756 | 1755 | repo = get_repo_or_error(repoid) |
|
1757 | 1756 | if not has_superadmin_permission(apiuser): |
|
1758 | 1757 | _perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1759 | 1758 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1760 | 1759 | |
|
1761 | 1760 | commit_id = Optional.extract(commit_id) |
|
1762 | 1761 | |
|
1763 | 1762 | userid = Optional.extract(userid) |
|
1764 | 1763 | if userid: |
|
1765 | 1764 | user = get_user_or_error(userid) |
|
1766 | 1765 | else: |
|
1767 | 1766 | user = None |
|
1768 | 1767 | |
|
1769 | 1768 | comment_type = Optional.extract(comment_type) |
|
1770 | 1769 | if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES: |
|
1771 | 1770 | raise JSONRPCError( |
|
1772 | 1771 | 'comment_type must be one of `{}` got {}'.format( |
|
1773 | 1772 | ChangesetComment.COMMENT_TYPES, comment_type) |
|
1774 | 1773 | ) |
|
1775 | 1774 | |
|
1776 | 1775 | comments = CommentsModel().get_repository_comments( |
|
1777 | 1776 | repo=repo, comment_type=comment_type, user=user, commit_id=commit_id) |
|
1778 | 1777 | return comments |
|
1779 | 1778 | |
|
1780 | 1779 | |
|
1781 | 1780 | @jsonrpc_method() |
|
1782 | 1781 | def get_comment(request, apiuser, comment_id): |
|
1783 | 1782 | """ |
|
1784 | 1783 | Get single comment from repository or pull_request |
|
1785 | 1784 | |
|
1786 | 1785 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1787 | 1786 | :type apiuser: AuthUser |
|
1788 | 1787 | :param comment_id: comment id found in the URL of comment |
|
1789 | 1788 | :type comment_id: str or int |
|
1790 | 1789 | |
|
1791 | 1790 | Example error output: |
|
1792 | 1791 | |
|
1793 | 1792 | .. code-block:: bash |
|
1794 | 1793 | |
|
1795 | 1794 | { |
|
1796 | 1795 | "id" : <id_given_in_input>, |
|
1797 | 1796 | "result" : { |
|
1798 | 1797 | "comment_author": <USER_DETAILS>, |
|
1799 | 1798 | "comment_created_on": "2017-02-01T14:38:16.309", |
|
1800 | 1799 | "comment_f_path": "file.txt", |
|
1801 | 1800 | "comment_id": 282, |
|
1802 | 1801 | "comment_lineno": "n1", |
|
1803 | 1802 | "comment_resolved_by": null, |
|
1804 | 1803 | "comment_status": [], |
|
1805 | 1804 | "comment_text": "This file needs a header", |
|
1806 | 1805 | "comment_type": "todo", |
|
1807 | 1806 | "comment_last_version: 0 |
|
1808 | 1807 | }, |
|
1809 | 1808 | "error" : null |
|
1810 | 1809 | } |
|
1811 | 1810 | |
|
1812 | 1811 | """ |
|
1813 | 1812 | |
|
1814 | 1813 | comment = ChangesetComment.get(comment_id) |
|
1815 | 1814 | if not comment: |
|
1816 | 1815 | raise JSONRPCError(f'comment `{comment_id}` does not exist') |
|
1817 | 1816 | |
|
1818 | 1817 | perms = ('repository.read', 'repository.write', 'repository.admin') |
|
1819 | 1818 | has_comment_perm = HasRepoPermissionAnyApi(*perms)\ |
|
1820 | 1819 | (user=apiuser, repo_name=comment.repo.repo_name) |
|
1821 | 1820 | |
|
1822 | 1821 | if not has_comment_perm: |
|
1823 | 1822 | raise JSONRPCError(f'comment `{comment_id}` does not exist') |
|
1824 | 1823 | |
|
1825 | 1824 | return comment |
|
1826 | 1825 | |
|
1827 | 1826 | |
|
1828 | 1827 | @jsonrpc_method() |
|
1829 | 1828 | def edit_comment(request, apiuser, message, comment_id, version, |
|
1830 | 1829 | userid=Optional(OAttr('apiuser'))): |
|
1831 | 1830 | """ |
|
1832 | 1831 | Edit comment on the pull request or commit, |
|
1833 | 1832 | specified by the `comment_id` and version. Initially version should be 0 |
|
1834 | 1833 | |
|
1835 | 1834 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1836 | 1835 | :type apiuser: AuthUser |
|
1837 | 1836 | :param comment_id: Specify the comment_id for editing |
|
1838 | 1837 | :type comment_id: int |
|
1839 | 1838 | :param version: version of the comment that will be created, starts from 0 |
|
1840 | 1839 | :type version: int |
|
1841 | 1840 | :param message: The text content of the comment. |
|
1842 | 1841 | :type message: str |
|
1843 | 1842 | :param userid: Comment on the pull request as this user |
|
1844 | 1843 | :type userid: Optional(str or int) |
|
1845 | 1844 | |
|
1846 | 1845 | Example output: |
|
1847 | 1846 | |
|
1848 | 1847 | .. code-block:: bash |
|
1849 | 1848 | |
|
1850 | 1849 | id : <id_given_in_input> |
|
1851 | 1850 | result : { |
|
1852 | 1851 | "comment": "<comment data>", |
|
1853 | 1852 | "version": "<Integer>", |
|
1854 | 1853 | }, |
|
1855 | 1854 | error : null |
|
1856 | 1855 | """ |
|
1857 | 1856 | |
|
1858 | 1857 | auth_user = apiuser |
|
1859 | 1858 | comment = ChangesetComment.get(comment_id) |
|
1860 | 1859 | if not comment: |
|
1861 | 1860 | raise JSONRPCError(f'comment `{comment_id}` does not exist') |
|
1862 | 1861 | |
|
1863 | 1862 | is_super_admin = has_superadmin_permission(apiuser) |
|
1864 | 1863 | is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ |
|
1865 | 1864 | (user=apiuser, repo_name=comment.repo.repo_name) |
|
1866 | 1865 | |
|
1867 | 1866 | if not isinstance(userid, Optional): |
|
1868 | 1867 | if is_super_admin or is_repo_admin: |
|
1869 | 1868 | apiuser = get_user_or_error(userid) |
|
1870 | 1869 | auth_user = apiuser.AuthUser() |
|
1871 | 1870 | else: |
|
1872 | 1871 | raise JSONRPCError('userid is not the same as your user') |
|
1873 | 1872 | |
|
1874 | 1873 | comment_author = comment.author.user_id == auth_user.user_id |
|
1875 | 1874 | |
|
1876 | 1875 | if comment.immutable: |
|
1877 | 1876 | raise JSONRPCError("Immutable comment cannot be edited") |
|
1878 | 1877 | |
|
1879 | 1878 | if not (is_super_admin or is_repo_admin or comment_author): |
|
1880 | 1879 | raise JSONRPCError("you don't have access to edit this comment") |
|
1881 | 1880 | |
|
1882 | 1881 | try: |
|
1883 | 1882 | comment_history = CommentsModel().edit( |
|
1884 | 1883 | comment_id=comment_id, |
|
1885 | 1884 | text=message, |
|
1886 | 1885 | auth_user=auth_user, |
|
1887 | 1886 | version=version, |
|
1888 | 1887 | ) |
|
1889 | 1888 | Session().commit() |
|
1890 | 1889 | except CommentVersionMismatch: |
|
1891 | 1890 | raise JSONRPCError( |
|
1892 | 1891 | f'comment ({comment_id}) version ({version}) mismatch' |
|
1893 | 1892 | ) |
|
1894 | 1893 | if not comment_history and not message: |
|
1895 | 1894 | raise JSONRPCError( |
|
1896 | 1895 | f"comment ({comment_id}) can't be changed with empty string" |
|
1897 | 1896 | ) |
|
1898 | 1897 | |
|
1899 | 1898 | if comment.pull_request: |
|
1900 | 1899 | pull_request = comment.pull_request |
|
1901 | 1900 | PullRequestModel().trigger_pull_request_hook( |
|
1902 | 1901 | pull_request, apiuser, 'comment_edit', |
|
1903 | 1902 | data={'comment': comment}) |
|
1904 | 1903 | else: |
|
1905 | 1904 | db_repo = comment.repo |
|
1906 | 1905 | commit_id = comment.revision |
|
1907 | 1906 | commit = db_repo.get_commit(commit_id) |
|
1908 | 1907 | CommentsModel().trigger_commit_comment_hook( |
|
1909 | 1908 | db_repo, apiuser, 'edit', |
|
1910 | 1909 | data={'comment': comment, 'commit': commit}) |
|
1911 | 1910 | |
|
1912 | 1911 | data = { |
|
1913 | 1912 | 'comment': comment, |
|
1914 | 1913 | 'version': comment_history.version if comment_history else None, |
|
1915 | 1914 | } |
|
1916 | 1915 | return data |
|
1917 | 1916 | |
|
1918 | 1917 | |
|
1919 | 1918 | # TODO(marcink): write this with all required logic for deleting a comments in PR or commits |
|
1920 | 1919 | # @jsonrpc_method() |
|
1921 | 1920 | # def delete_comment(request, apiuser, comment_id): |
|
1922 | 1921 | # auth_user = apiuser |
|
1923 | 1922 | # |
|
1924 | 1923 | # comment = ChangesetComment.get(comment_id) |
|
1925 | 1924 | # if not comment: |
|
1926 | 1925 | # raise JSONRPCError('comment `%s` does not exist' % (comment_id,)) |
|
1927 | 1926 | # |
|
1928 | 1927 | # is_super_admin = has_superadmin_permission(apiuser) |
|
1929 | 1928 | # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\ |
|
1930 | 1929 | # (user=apiuser, repo_name=comment.repo.repo_name) |
|
1931 | 1930 | # |
|
1932 | 1931 | # comment_author = comment.author.user_id == auth_user.user_id |
|
1933 | 1932 | # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author): |
|
1934 | 1933 | # raise JSONRPCError("you don't have access to edit this comment") |
|
1935 | 1934 | |
|
1936 | 1935 | @jsonrpc_method() |
|
1937 | 1936 | def grant_user_permission(request, apiuser, repoid, userid, perm): |
|
1938 | 1937 | """ |
|
1939 | 1938 | Grant permissions for the specified user on the given repository, |
|
1940 | 1939 | or update existing permissions if found. |
|
1941 | 1940 | |
|
1942 | 1941 | This command can only be run using an |authtoken| with admin |
|
1943 | 1942 | permissions on the |repo|. |
|
1944 | 1943 | |
|
1945 | 1944 | :param apiuser: This is filled automatically from the |authtoken|. |
|
1946 | 1945 | :type apiuser: AuthUser |
|
1947 | 1946 | :param repoid: Set the repository name or repository ID. |
|
1948 | 1947 | :type repoid: str or int |
|
1949 | 1948 | :param userid: Set the user name. |
|
1950 | 1949 | :type userid: str |
|
1951 | 1950 | :param perm: Set the user permissions, using the following format |
|
1952 | 1951 | ``(repository.(none|read|write|admin))`` |
|
1953 | 1952 | :type perm: str |
|
1954 | 1953 | |
|
1955 | 1954 | Example output: |
|
1956 | 1955 | |
|
1957 | 1956 | .. code-block:: bash |
|
1958 | 1957 | |
|
1959 | 1958 | id : <id_given_in_input> |
|
1960 | 1959 | result: { |
|
1961 | 1960 | "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`", |
|
1962 | 1961 | "success": true |
|
1963 | 1962 | } |
|
1964 | 1963 | error: null |
|
1965 | 1964 | """ |
|
1966 | 1965 | |
|
1967 | 1966 | repo = get_repo_or_error(repoid) |
|
1968 | 1967 | user = get_user_or_error(userid) |
|
1969 | 1968 | perm = get_perm_or_error(perm) |
|
1970 | 1969 | if not has_superadmin_permission(apiuser): |
|
1971 | 1970 | _perms = ('repository.admin',) |
|
1972 | 1971 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
1973 | 1972 | |
|
1974 | 1973 | perm_additions = [[user.user_id, perm.permission_name, "user"]] |
|
1975 | 1974 | try: |
|
1976 | 1975 | changes = RepoModel().update_permissions( |
|
1977 | 1976 | repo=repo, perm_additions=perm_additions, cur_user=apiuser) |
|
1978 | 1977 | |
|
1979 | 1978 | action_data = { |
|
1980 | 1979 | 'added': changes['added'], |
|
1981 | 1980 | 'updated': changes['updated'], |
|
1982 | 1981 | 'deleted': changes['deleted'], |
|
1983 | 1982 | } |
|
1984 | 1983 | audit_logger.store_api( |
|
1985 | 1984 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
1986 | 1985 | Session().commit() |
|
1987 | 1986 | PermissionModel().flush_user_permission_caches(changes) |
|
1988 | 1987 | |
|
1989 | 1988 | return { |
|
1990 | 1989 | 'msg': 'Granted perm: `{}` for user: `{}` in repo: `{}`'.format( |
|
1991 | 1990 | perm.permission_name, user.username, repo.repo_name |
|
1992 | 1991 | ), |
|
1993 | 1992 | 'success': True |
|
1994 | 1993 | } |
|
1995 | 1994 | except Exception: |
|
1996 | 1995 | log.exception("Exception occurred while trying edit permissions for repo") |
|
1997 | 1996 | raise JSONRPCError( |
|
1998 | 1997 | 'failed to edit permission for user: `{}` in repo: `{}`'.format( |
|
1999 | 1998 | userid, repoid |
|
2000 | 1999 | ) |
|
2001 | 2000 | ) |
|
2002 | 2001 | |
|
2003 | 2002 | |
|
2004 | 2003 | @jsonrpc_method() |
|
2005 | 2004 | def revoke_user_permission(request, apiuser, repoid, userid): |
|
2006 | 2005 | """ |
|
2007 | 2006 | Revoke permission for a user on the specified repository. |
|
2008 | 2007 | |
|
2009 | 2008 | This command can only be run using an |authtoken| with admin |
|
2010 | 2009 | permissions on the |repo|. |
|
2011 | 2010 | |
|
2012 | 2011 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2013 | 2012 | :type apiuser: AuthUser |
|
2014 | 2013 | :param repoid: Set the repository name or repository ID. |
|
2015 | 2014 | :type repoid: str or int |
|
2016 | 2015 | :param userid: Set the user name of revoked user. |
|
2017 | 2016 | :type userid: str or int |
|
2018 | 2017 | |
|
2019 | 2018 | Example error output: |
|
2020 | 2019 | |
|
2021 | 2020 | .. code-block:: bash |
|
2022 | 2021 | |
|
2023 | 2022 | id : <id_given_in_input> |
|
2024 | 2023 | result: { |
|
2025 | 2024 | "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`", |
|
2026 | 2025 | "success": true |
|
2027 | 2026 | } |
|
2028 | 2027 | error: null |
|
2029 | 2028 | """ |
|
2030 | 2029 | |
|
2031 | 2030 | repo = get_repo_or_error(repoid) |
|
2032 | 2031 | user = get_user_or_error(userid) |
|
2033 | 2032 | if not has_superadmin_permission(apiuser): |
|
2034 | 2033 | _perms = ('repository.admin',) |
|
2035 | 2034 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2036 | 2035 | |
|
2037 | 2036 | perm_deletions = [[user.user_id, None, "user"]] |
|
2038 | 2037 | try: |
|
2039 | 2038 | changes = RepoModel().update_permissions( |
|
2040 | 2039 | repo=repo, perm_deletions=perm_deletions, cur_user=user) |
|
2041 | 2040 | |
|
2042 | 2041 | action_data = { |
|
2043 | 2042 | 'added': changes['added'], |
|
2044 | 2043 | 'updated': changes['updated'], |
|
2045 | 2044 | 'deleted': changes['deleted'], |
|
2046 | 2045 | } |
|
2047 | 2046 | audit_logger.store_api( |
|
2048 | 2047 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2049 | 2048 | Session().commit() |
|
2050 | 2049 | PermissionModel().flush_user_permission_caches(changes) |
|
2051 | 2050 | |
|
2052 | 2051 | return { |
|
2053 | 2052 | 'msg': 'Revoked perm for user: `{}` in repo: `{}`'.format( |
|
2054 | 2053 | user.username, repo.repo_name |
|
2055 | 2054 | ), |
|
2056 | 2055 | 'success': True |
|
2057 | 2056 | } |
|
2058 | 2057 | except Exception: |
|
2059 | 2058 | log.exception("Exception occurred while trying revoke permissions to repo") |
|
2060 | 2059 | raise JSONRPCError( |
|
2061 | 2060 | 'failed to edit permission for user: `{}` in repo: `{}`'.format( |
|
2062 | 2061 | userid, repoid |
|
2063 | 2062 | ) |
|
2064 | 2063 | ) |
|
2065 | 2064 | |
|
2066 | 2065 | |
|
2067 | 2066 | @jsonrpc_method() |
|
2068 | 2067 | def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm): |
|
2069 | 2068 | """ |
|
2070 | 2069 | Grant permission for a user group on the specified repository, |
|
2071 | 2070 | or update existing permissions. |
|
2072 | 2071 | |
|
2073 | 2072 | This command can only be run using an |authtoken| with admin |
|
2074 | 2073 | permissions on the |repo|. |
|
2075 | 2074 | |
|
2076 | 2075 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2077 | 2076 | :type apiuser: AuthUser |
|
2078 | 2077 | :param repoid: Set the repository name or repository ID. |
|
2079 | 2078 | :type repoid: str or int |
|
2080 | 2079 | :param usergroupid: Specify the ID of the user group. |
|
2081 | 2080 | :type usergroupid: str or int |
|
2082 | 2081 | :param perm: Set the user group permissions using the following |
|
2083 | 2082 | format: (repository.(none|read|write|admin)) |
|
2084 | 2083 | :type perm: str |
|
2085 | 2084 | |
|
2086 | 2085 | Example output: |
|
2087 | 2086 | |
|
2088 | 2087 | .. code-block:: bash |
|
2089 | 2088 | |
|
2090 | 2089 | id : <id_given_in_input> |
|
2091 | 2090 | result : { |
|
2092 | 2091 | "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`", |
|
2093 | 2092 | "success": true |
|
2094 | 2093 | |
|
2095 | 2094 | } |
|
2096 | 2095 | error : null |
|
2097 | 2096 | |
|
2098 | 2097 | Example error output: |
|
2099 | 2098 | |
|
2100 | 2099 | .. code-block:: bash |
|
2101 | 2100 | |
|
2102 | 2101 | id : <id_given_in_input> |
|
2103 | 2102 | result : null |
|
2104 | 2103 | error : { |
|
2105 | 2104 | "failed to edit permission for user group: `<usergroup>` in repo `<repo>`' |
|
2106 | 2105 | } |
|
2107 | 2106 | |
|
2108 | 2107 | """ |
|
2109 | 2108 | |
|
2110 | 2109 | repo = get_repo_or_error(repoid) |
|
2111 | 2110 | perm = get_perm_or_error(perm) |
|
2112 | 2111 | if not has_superadmin_permission(apiuser): |
|
2113 | 2112 | _perms = ('repository.admin',) |
|
2114 | 2113 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2115 | 2114 | |
|
2116 | 2115 | user_group = get_user_group_or_error(usergroupid) |
|
2117 | 2116 | if not has_superadmin_permission(apiuser): |
|
2118 | 2117 | # check if we have at least read permission for this user group ! |
|
2119 | 2118 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
2120 | 2119 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
2121 | 2120 | user=apiuser, user_group_name=user_group.users_group_name): |
|
2122 | 2121 | raise JSONRPCError( |
|
2123 | 2122 | f'user group `{usergroupid}` does not exist') |
|
2124 | 2123 | |
|
2125 | 2124 | perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]] |
|
2126 | 2125 | try: |
|
2127 | 2126 | changes = RepoModel().update_permissions( |
|
2128 | 2127 | repo=repo, perm_additions=perm_additions, cur_user=apiuser) |
|
2129 | 2128 | action_data = { |
|
2130 | 2129 | 'added': changes['added'], |
|
2131 | 2130 | 'updated': changes['updated'], |
|
2132 | 2131 | 'deleted': changes['deleted'], |
|
2133 | 2132 | } |
|
2134 | 2133 | audit_logger.store_api( |
|
2135 | 2134 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2136 | 2135 | Session().commit() |
|
2137 | 2136 | PermissionModel().flush_user_permission_caches(changes) |
|
2138 | 2137 | |
|
2139 | 2138 | return { |
|
2140 | 2139 | 'msg': 'Granted perm: `%s` for user group: `%s` in ' |
|
2141 | 2140 | 'repo: `%s`' % ( |
|
2142 | 2141 | perm.permission_name, user_group.users_group_name, |
|
2143 | 2142 | repo.repo_name |
|
2144 | 2143 | ), |
|
2145 | 2144 | 'success': True |
|
2146 | 2145 | } |
|
2147 | 2146 | except Exception: |
|
2148 | 2147 | log.exception( |
|
2149 | 2148 | "Exception occurred while trying change permission on repo") |
|
2150 | 2149 | raise JSONRPCError( |
|
2151 | 2150 | 'failed to edit permission for user group: `%s` in ' |
|
2152 | 2151 | 'repo: `%s`' % ( |
|
2153 | 2152 | usergroupid, repo.repo_name |
|
2154 | 2153 | ) |
|
2155 | 2154 | ) |
|
2156 | 2155 | |
|
2157 | 2156 | |
|
2158 | 2157 | @jsonrpc_method() |
|
2159 | 2158 | def revoke_user_group_permission(request, apiuser, repoid, usergroupid): |
|
2160 | 2159 | """ |
|
2161 | 2160 | Revoke the permissions of a user group on a given repository. |
|
2162 | 2161 | |
|
2163 | 2162 | This command can only be run using an |authtoken| with admin |
|
2164 | 2163 | permissions on the |repo|. |
|
2165 | 2164 | |
|
2166 | 2165 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2167 | 2166 | :type apiuser: AuthUser |
|
2168 | 2167 | :param repoid: Set the repository name or repository ID. |
|
2169 | 2168 | :type repoid: str or int |
|
2170 | 2169 | :param usergroupid: Specify the user group ID. |
|
2171 | 2170 | :type usergroupid: str or int |
|
2172 | 2171 | |
|
2173 | 2172 | Example output: |
|
2174 | 2173 | |
|
2175 | 2174 | .. code-block:: bash |
|
2176 | 2175 | |
|
2177 | 2176 | id : <id_given_in_input> |
|
2178 | 2177 | result: { |
|
2179 | 2178 | "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`", |
|
2180 | 2179 | "success": true |
|
2181 | 2180 | } |
|
2182 | 2181 | error: null |
|
2183 | 2182 | """ |
|
2184 | 2183 | |
|
2185 | 2184 | repo = get_repo_or_error(repoid) |
|
2186 | 2185 | if not has_superadmin_permission(apiuser): |
|
2187 | 2186 | _perms = ('repository.admin',) |
|
2188 | 2187 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2189 | 2188 | |
|
2190 | 2189 | user_group = get_user_group_or_error(usergroupid) |
|
2191 | 2190 | if not has_superadmin_permission(apiuser): |
|
2192 | 2191 | # check if we have at least read permission for this user group ! |
|
2193 | 2192 | _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',) |
|
2194 | 2193 | if not HasUserGroupPermissionAnyApi(*_perms)( |
|
2195 | 2194 | user=apiuser, user_group_name=user_group.users_group_name): |
|
2196 | 2195 | raise JSONRPCError( |
|
2197 | 2196 | f'user group `{usergroupid}` does not exist') |
|
2198 | 2197 | |
|
2199 | 2198 | perm_deletions = [[user_group.users_group_id, None, "user_group"]] |
|
2200 | 2199 | try: |
|
2201 | 2200 | changes = RepoModel().update_permissions( |
|
2202 | 2201 | repo=repo, perm_deletions=perm_deletions, cur_user=apiuser) |
|
2203 | 2202 | action_data = { |
|
2204 | 2203 | 'added': changes['added'], |
|
2205 | 2204 | 'updated': changes['updated'], |
|
2206 | 2205 | 'deleted': changes['deleted'], |
|
2207 | 2206 | } |
|
2208 | 2207 | audit_logger.store_api( |
|
2209 | 2208 | 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo) |
|
2210 | 2209 | Session().commit() |
|
2211 | 2210 | PermissionModel().flush_user_permission_caches(changes) |
|
2212 | 2211 | |
|
2213 | 2212 | return { |
|
2214 | 2213 | 'msg': 'Revoked perm for user group: `{}` in repo: `{}`'.format( |
|
2215 | 2214 | user_group.users_group_name, repo.repo_name |
|
2216 | 2215 | ), |
|
2217 | 2216 | 'success': True |
|
2218 | 2217 | } |
|
2219 | 2218 | except Exception: |
|
2220 | 2219 | log.exception("Exception occurred while trying revoke " |
|
2221 | 2220 | "user group permission on repo") |
|
2222 | 2221 | raise JSONRPCError( |
|
2223 | 2222 | 'failed to edit permission for user group: `%s` in ' |
|
2224 | 2223 | 'repo: `%s`' % ( |
|
2225 | 2224 | user_group.users_group_name, repo.repo_name |
|
2226 | 2225 | ) |
|
2227 | 2226 | ) |
|
2228 | 2227 | |
|
2229 | 2228 | |
|
2230 | 2229 | @jsonrpc_method() |
|
2231 | 2230 | def pull(request, apiuser, repoid, remote_uri=Optional(None)): |
|
2232 | 2231 | """ |
|
2233 | 2232 | Triggers a pull on the given repository from a remote location. You |
|
2234 | 2233 | can use this to keep remote repositories up-to-date. |
|
2235 | 2234 | |
|
2236 | 2235 | This command can only be run using an |authtoken| with admin |
|
2237 | 2236 | rights to the specified repository. For more information, |
|
2238 | 2237 | see :ref:`config-token-ref`. |
|
2239 | 2238 | |
|
2240 | 2239 | This command takes the following options: |
|
2241 | 2240 | |
|
2242 | 2241 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2243 | 2242 | :type apiuser: AuthUser |
|
2244 | 2243 | :param repoid: The repository name or repository ID. |
|
2245 | 2244 | :type repoid: str or int |
|
2246 | 2245 | :param remote_uri: Optional remote URI to pass in for pull |
|
2247 | 2246 | :type remote_uri: str |
|
2248 | 2247 | |
|
2249 | 2248 | Example output: |
|
2250 | 2249 | |
|
2251 | 2250 | .. code-block:: bash |
|
2252 | 2251 | |
|
2253 | 2252 | id : <id_given_in_input> |
|
2254 | 2253 | result : { |
|
2255 | 2254 | "msg": "Pulled from url `<remote_url>` on repo `<repository name>`" |
|
2256 | 2255 | "repository": "<repository name>" |
|
2257 | 2256 | } |
|
2258 | 2257 | error : null |
|
2259 | 2258 | |
|
2260 | 2259 | Example error output: |
|
2261 | 2260 | |
|
2262 | 2261 | .. code-block:: bash |
|
2263 | 2262 | |
|
2264 | 2263 | id : <id_given_in_input> |
|
2265 | 2264 | result : null |
|
2266 | 2265 | error : { |
|
2267 | 2266 | "Unable to push changes from `<remote_url>`" |
|
2268 | 2267 | } |
|
2269 | 2268 | |
|
2270 | 2269 | """ |
|
2271 | 2270 | |
|
2272 | 2271 | repo = get_repo_or_error(repoid) |
|
2273 | 2272 | remote_uri = Optional.extract(remote_uri) |
|
2274 | 2273 | remote_uri_display = remote_uri or repo.clone_uri_hidden |
|
2275 | 2274 | if not has_superadmin_permission(apiuser): |
|
2276 | 2275 | _perms = ('repository.admin',) |
|
2277 | 2276 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2278 | 2277 | |
|
2279 | 2278 | try: |
|
2280 | 2279 | ScmModel().pull_changes( |
|
2281 | 2280 | repo.repo_name, apiuser.username, remote_uri=remote_uri) |
|
2282 | 2281 | return { |
|
2283 | 2282 | 'msg': 'Pulled from url `{}` on repo `{}`'.format( |
|
2284 | 2283 | remote_uri_display, repo.repo_name), |
|
2285 | 2284 | 'repository': repo.repo_name |
|
2286 | 2285 | } |
|
2287 | 2286 | except Exception: |
|
2288 | 2287 | log.exception("Exception occurred while trying to " |
|
2289 | 2288 | "pull changes from remote location") |
|
2290 | 2289 | raise JSONRPCError( |
|
2291 | 2290 | 'Unable to pull changes from `%s`' % remote_uri_display |
|
2292 | 2291 | ) |
|
2293 | 2292 | |
|
2294 | 2293 | |
|
2295 | 2294 | @jsonrpc_method() |
|
2296 | 2295 | def strip(request, apiuser, repoid, revision, branch): |
|
2297 | 2296 | """ |
|
2298 | 2297 | Strips the given revision from the specified repository. |
|
2299 | 2298 | |
|
2300 | 2299 | * This will remove the revision and all of its decendants. |
|
2301 | 2300 | |
|
2302 | 2301 | This command can only be run using an |authtoken| with admin rights to |
|
2303 | 2302 | the specified repository. |
|
2304 | 2303 | |
|
2305 | 2304 | This command takes the following options: |
|
2306 | 2305 | |
|
2307 | 2306 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2308 | 2307 | :type apiuser: AuthUser |
|
2309 | 2308 | :param repoid: The repository name or repository ID. |
|
2310 | 2309 | :type repoid: str or int |
|
2311 | 2310 | :param revision: The revision you wish to strip. |
|
2312 | 2311 | :type revision: str |
|
2313 | 2312 | :param branch: The branch from which to strip the revision. |
|
2314 | 2313 | :type branch: str |
|
2315 | 2314 | |
|
2316 | 2315 | Example output: |
|
2317 | 2316 | |
|
2318 | 2317 | .. code-block:: bash |
|
2319 | 2318 | |
|
2320 | 2319 | id : <id_given_in_input> |
|
2321 | 2320 | result : { |
|
2322 | 2321 | "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'" |
|
2323 | 2322 | "repository": "<repository name>" |
|
2324 | 2323 | } |
|
2325 | 2324 | error : null |
|
2326 | 2325 | |
|
2327 | 2326 | Example error output: |
|
2328 | 2327 | |
|
2329 | 2328 | .. code-block:: bash |
|
2330 | 2329 | |
|
2331 | 2330 | id : <id_given_in_input> |
|
2332 | 2331 | result : null |
|
2333 | 2332 | error : { |
|
2334 | 2333 | "Unable to strip commit <commit_hash> from repo `<repository name>`" |
|
2335 | 2334 | } |
|
2336 | 2335 | |
|
2337 | 2336 | """ |
|
2338 | 2337 | |
|
2339 | 2338 | repo = get_repo_or_error(repoid) |
|
2340 | 2339 | if not has_superadmin_permission(apiuser): |
|
2341 | 2340 | _perms = ('repository.admin',) |
|
2342 | 2341 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2343 | 2342 | |
|
2344 | 2343 | try: |
|
2345 | 2344 | ScmModel().strip(repo, revision, branch) |
|
2346 | 2345 | audit_logger.store_api( |
|
2347 | 2346 | 'repo.commit.strip', action_data={'commit_id': revision}, |
|
2348 | 2347 | repo=repo, |
|
2349 | 2348 | user=apiuser, commit=True) |
|
2350 | 2349 | |
|
2351 | 2350 | return { |
|
2352 | 2351 | 'msg': 'Stripped commit {} from repo `{}`'.format( |
|
2353 | 2352 | revision, repo.repo_name), |
|
2354 | 2353 | 'repository': repo.repo_name |
|
2355 | 2354 | } |
|
2356 | 2355 | except Exception: |
|
2357 | 2356 | log.exception("Exception while trying to strip") |
|
2358 | 2357 | raise JSONRPCError( |
|
2359 | 2358 | 'Unable to strip commit {} from repo `{}`'.format( |
|
2360 | 2359 | revision, repo.repo_name) |
|
2361 | 2360 | ) |
|
2362 | 2361 | |
|
2363 | 2362 | |
|
2364 | 2363 | @jsonrpc_method() |
|
2365 | 2364 | def get_repo_settings(request, apiuser, repoid, key=Optional(None)): |
|
2366 | 2365 | """ |
|
2367 | 2366 | Returns all settings for a repository. If key is given it only returns the |
|
2368 | 2367 | setting identified by the key or null. |
|
2369 | 2368 | |
|
2370 | 2369 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2371 | 2370 | :type apiuser: AuthUser |
|
2372 | 2371 | :param repoid: The repository name or repository id. |
|
2373 | 2372 | :type repoid: str or int |
|
2374 | 2373 | :param key: Key of the setting to return. |
|
2375 | 2374 | :type: key: Optional(str) |
|
2376 | 2375 | |
|
2377 | 2376 | Example output: |
|
2378 | 2377 | |
|
2379 | 2378 | .. code-block:: bash |
|
2380 | 2379 | |
|
2381 | 2380 | { |
|
2382 | 2381 | "error": null, |
|
2383 | 2382 | "id": 237, |
|
2384 | 2383 | "result": { |
|
2385 | 2384 | "extensions_largefiles": true, |
|
2386 | 2385 | "extensions_evolve": true, |
|
2387 | 2386 | "hooks_changegroup_push_logger": true, |
|
2388 | 2387 | "hooks_changegroup_repo_size": false, |
|
2389 | 2388 | "hooks_outgoing_pull_logger": true, |
|
2390 | 2389 | "phases_publish": "True", |
|
2391 | 2390 | "rhodecode_hg_use_rebase_for_merging": true, |
|
2392 | 2391 | "rhodecode_pr_merge_enabled": true, |
|
2393 | 2392 | "rhodecode_use_outdated_comments": true |
|
2394 | 2393 | } |
|
2395 | 2394 | } |
|
2396 | 2395 | """ |
|
2397 | 2396 | |
|
2398 | 2397 | # Restrict access to this api method to super-admins, and repo admins only. |
|
2399 | 2398 | repo = get_repo_or_error(repoid) |
|
2400 | 2399 | if not has_superadmin_permission(apiuser): |
|
2401 | 2400 | _perms = ('repository.admin',) |
|
2402 | 2401 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2403 | 2402 | |
|
2404 | 2403 | try: |
|
2405 | 2404 | settings_model = VcsSettingsModel(repo=repo) |
|
2406 | 2405 | settings = settings_model.get_global_settings() |
|
2407 | 2406 | settings.update(settings_model.get_repo_settings()) |
|
2408 | 2407 | |
|
2409 | 2408 | # If only a single setting is requested fetch it from all settings. |
|
2410 | 2409 | key = Optional.extract(key) |
|
2411 | 2410 | if key is not None: |
|
2412 | 2411 | settings = settings.get(key, None) |
|
2413 | 2412 | except Exception: |
|
2414 | 2413 | msg = f'Failed to fetch settings for repository `{repoid}`' |
|
2415 | 2414 | log.exception(msg) |
|
2416 | 2415 | raise JSONRPCError(msg) |
|
2417 | 2416 | |
|
2418 | 2417 | return settings |
|
2419 | 2418 | |
|
2420 | 2419 | |
|
2421 | 2420 | @jsonrpc_method() |
|
2422 | 2421 | def set_repo_settings(request, apiuser, repoid, settings): |
|
2423 | 2422 | """ |
|
2424 | 2423 | Update repository settings. Returns true on success. |
|
2425 | 2424 | |
|
2426 | 2425 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2427 | 2426 | :type apiuser: AuthUser |
|
2428 | 2427 | :param repoid: The repository name or repository id. |
|
2429 | 2428 | :type repoid: str or int |
|
2430 | 2429 | :param settings: The new settings for the repository. |
|
2431 | 2430 | :type: settings: dict |
|
2432 | 2431 | |
|
2433 | 2432 | Example output: |
|
2434 | 2433 | |
|
2435 | 2434 | .. code-block:: bash |
|
2436 | 2435 | |
|
2437 | 2436 | { |
|
2438 | 2437 | "error": null, |
|
2439 | 2438 | "id": 237, |
|
2440 | 2439 | "result": true |
|
2441 | 2440 | } |
|
2442 | 2441 | """ |
|
2443 | 2442 | # Restrict access to this api method to super-admins, and repo admins only. |
|
2444 | 2443 | repo = get_repo_or_error(repoid) |
|
2445 | 2444 | if not has_superadmin_permission(apiuser): |
|
2446 | 2445 | _perms = ('repository.admin',) |
|
2447 | 2446 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2448 | 2447 | |
|
2449 | 2448 | if type(settings) is not dict: |
|
2450 | 2449 | raise JSONRPCError('Settings have to be a JSON Object.') |
|
2451 | 2450 | |
|
2452 | 2451 | try: |
|
2453 | 2452 | settings_model = VcsSettingsModel(repo=repoid) |
|
2454 | 2453 | |
|
2455 | 2454 | # Merge global, repo and incoming settings. |
|
2456 | 2455 | new_settings = settings_model.get_global_settings() |
|
2457 | 2456 | new_settings.update(settings_model.get_repo_settings()) |
|
2458 | 2457 | new_settings.update(settings) |
|
2459 | 2458 | |
|
2460 | 2459 | # Update the settings. |
|
2461 | 2460 | inherit_global_settings = new_settings.get( |
|
2462 | 2461 | 'inherit_global_settings', False) |
|
2463 | 2462 | settings_model.create_or_update_repo_settings( |
|
2464 | 2463 | new_settings, inherit_global_settings=inherit_global_settings) |
|
2465 | 2464 | Session().commit() |
|
2466 | 2465 | except Exception: |
|
2467 | 2466 | msg = f'Failed to update settings for repository `{repoid}`' |
|
2468 | 2467 | log.exception(msg) |
|
2469 | 2468 | raise JSONRPCError(msg) |
|
2470 | 2469 | |
|
2471 | 2470 | # Indicate success. |
|
2472 | 2471 | return True |
|
2473 | 2472 | |
|
2474 | 2473 | |
|
2475 | 2474 | @jsonrpc_method() |
|
2476 | 2475 | def maintenance(request, apiuser, repoid): |
|
2477 | 2476 | """ |
|
2478 | 2477 | Triggers a maintenance on the given repository. |
|
2479 | 2478 | |
|
2480 | 2479 | This command can only be run using an |authtoken| with admin |
|
2481 | 2480 | rights to the specified repository. For more information, |
|
2482 | 2481 | see :ref:`config-token-ref`. |
|
2483 | 2482 | |
|
2484 | 2483 | This command takes the following options: |
|
2485 | 2484 | |
|
2486 | 2485 | :param apiuser: This is filled automatically from the |authtoken|. |
|
2487 | 2486 | :type apiuser: AuthUser |
|
2488 | 2487 | :param repoid: The repository name or repository ID. |
|
2489 | 2488 | :type repoid: str or int |
|
2490 | 2489 | |
|
2491 | 2490 | Example output: |
|
2492 | 2491 | |
|
2493 | 2492 | .. code-block:: bash |
|
2494 | 2493 | |
|
2495 | 2494 | id : <id_given_in_input> |
|
2496 | 2495 | result : { |
|
2497 | 2496 | "msg": "executed maintenance command", |
|
2498 | 2497 | "executed_actions": [ |
|
2499 | 2498 | <action_message>, <action_message2>... |
|
2500 | 2499 | ], |
|
2501 | 2500 | "repository": "<repository name>" |
|
2502 | 2501 | } |
|
2503 | 2502 | error : null |
|
2504 | 2503 | |
|
2505 | 2504 | Example error output: |
|
2506 | 2505 | |
|
2507 | 2506 | .. code-block:: bash |
|
2508 | 2507 | |
|
2509 | 2508 | id : <id_given_in_input> |
|
2510 | 2509 | result : null |
|
2511 | 2510 | error : { |
|
2512 | 2511 | "Unable to execute maintenance on `<reponame>`" |
|
2513 | 2512 | } |
|
2514 | 2513 | |
|
2515 | 2514 | """ |
|
2516 | 2515 | |
|
2517 | 2516 | repo = get_repo_or_error(repoid) |
|
2518 | 2517 | if not has_superadmin_permission(apiuser): |
|
2519 | 2518 | _perms = ('repository.admin',) |
|
2520 | 2519 | validate_repo_permissions(apiuser, repoid, repo, _perms) |
|
2521 | 2520 | |
|
2522 | 2521 | try: |
|
2523 | 2522 | maintenance = repo_maintenance.RepoMaintenance() |
|
2524 | 2523 | executed_actions = maintenance.execute(repo) |
|
2525 | 2524 | |
|
2526 | 2525 | return { |
|
2527 | 2526 | 'msg': 'executed maintenance command', |
|
2528 | 2527 | 'executed_actions': executed_actions, |
|
2529 | 2528 | 'repository': repo.repo_name |
|
2530 | 2529 | } |
|
2531 | 2530 | except Exception: |
|
2532 | 2531 | log.exception("Exception occurred while trying to run maintenance") |
|
2533 | 2532 | raise JSONRPCError( |
|
2534 | 2533 | 'Unable to execute maintenance on `%s`' % repo.repo_name) |
@@ -1,134 +1,132 b'' | |||
|
1 | 1 | |
|
2 | 2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
3 | 3 | # |
|
4 | 4 | # This program is free software: you can redistribute it and/or modify |
|
5 | 5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | 6 | # (only), as published by the Free Software Foundation. |
|
7 | 7 | # |
|
8 | 8 | # This program is distributed in the hope that it will be useful, |
|
9 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | 11 | # GNU General Public License for more details. |
|
12 | 12 | # |
|
13 | 13 | # You should have received a copy of the GNU Affero General Public License |
|
14 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | 15 | # |
|
16 | 16 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | 19 | |
|
20 | 20 | import pytest |
|
21 | 21 | import mock |
|
22 | 22 | |
|
23 | from rhodecode.apps._base import ADMIN_PREFIX | |
|
24 | from rhodecode.lib import helpers as h | |
|
25 | 23 | from rhodecode.lib.auth import check_password |
|
26 | 24 | from rhodecode.model.meta import Session |
|
27 | 25 | from rhodecode.model.user import UserModel |
|
28 | from rhodecode.tests import assert_session_flash | |
|
29 |
from rhodecode.tests.fixture import Fixture, |
|
|
26 | from rhodecode.tests import assert_session_flash, TestController | |
|
27 | from rhodecode.tests.fixture import Fixture, error_function | |
|
30 | 28 | from rhodecode.tests.routes import route_path |
|
31 | 29 | |
|
32 | 30 | fixture = Fixture() |
|
33 | 31 | |
|
34 | 32 | |
|
35 | 33 | test_user_1 = 'testme' |
|
36 | 34 | test_user_1_password = '0jd83nHNS/d23n' |
|
37 | 35 | |
|
38 | 36 | |
|
39 | 37 | class TestMyAccountPassword(TestController): |
|
40 | 38 | def test_valid_change_password(self, user_util): |
|
41 | 39 | new_password = 'my_new_valid_password' |
|
42 | 40 | user = user_util.create_user(password=test_user_1_password) |
|
43 | 41 | self.log_user(user.username, test_user_1_password) |
|
44 | 42 | |
|
45 | 43 | form_data = [ |
|
46 | 44 | ('current_password', test_user_1_password), |
|
47 | 45 | ('__start__', 'new_password:mapping'), |
|
48 | 46 | ('new_password', new_password), |
|
49 | 47 | ('new_password-confirm', new_password), |
|
50 | 48 | ('__end__', 'new_password:mapping'), |
|
51 | 49 | ('csrf_token', self.csrf_token), |
|
52 | 50 | ] |
|
53 | 51 | response = self.app.post( |
|
54 | 52 | route_path('my_account_password_update'), form_data).follow() |
|
55 | 53 | assert 'Successfully updated password' in response |
|
56 | 54 | |
|
57 | 55 | # check_password depends on user being in session |
|
58 | 56 | Session().add(user) |
|
59 | 57 | try: |
|
60 | 58 | assert check_password(new_password, user.password) |
|
61 | 59 | finally: |
|
62 | 60 | Session().expunge(user) |
|
63 | 61 | |
|
64 | 62 | @pytest.mark.parametrize('current_pw, new_pw, confirm_pw', [ |
|
65 | 63 | ('', 'abcdef123', 'abcdef123'), |
|
66 | 64 | ('wrong_pw', 'abcdef123', 'abcdef123'), |
|
67 | 65 | (test_user_1_password, test_user_1_password, test_user_1_password), |
|
68 | 66 | (test_user_1_password, '', ''), |
|
69 | 67 | (test_user_1_password, 'abcdef123', ''), |
|
70 | 68 | (test_user_1_password, '', 'abcdef123'), |
|
71 | 69 | (test_user_1_password, 'not_the', 'same_pw'), |
|
72 | 70 | (test_user_1_password, 'short', 'short'), |
|
73 | 71 | ]) |
|
74 | 72 | def test_invalid_change_password(self, current_pw, new_pw, confirm_pw, |
|
75 | 73 | user_util): |
|
76 | 74 | user = user_util.create_user(password=test_user_1_password) |
|
77 | 75 | self.log_user(user.username, test_user_1_password) |
|
78 | 76 | |
|
79 | 77 | form_data = [ |
|
80 | 78 | ('current_password', current_pw), |
|
81 | 79 | ('__start__', 'new_password:mapping'), |
|
82 | 80 | ('new_password', new_pw), |
|
83 | 81 | ('new_password-confirm', confirm_pw), |
|
84 | 82 | ('__end__', 'new_password:mapping'), |
|
85 | 83 | ('csrf_token', self.csrf_token), |
|
86 | 84 | ] |
|
87 | 85 | response = self.app.post( |
|
88 | 86 | route_path('my_account_password_update'), form_data) |
|
89 | 87 | |
|
90 | 88 | assert_response = response.assert_response() |
|
91 | 89 | assert assert_response.get_elements('.error-block') |
|
92 | 90 | |
|
93 | 91 | @mock.patch.object(UserModel, 'update_user', error_function) |
|
94 | 92 | def test_invalid_change_password_exception(self, user_util): |
|
95 | 93 | user = user_util.create_user(password=test_user_1_password) |
|
96 | 94 | self.log_user(user.username, test_user_1_password) |
|
97 | 95 | |
|
98 | 96 | form_data = [ |
|
99 | 97 | ('current_password', test_user_1_password), |
|
100 | 98 | ('__start__', 'new_password:mapping'), |
|
101 | 99 | ('new_password', '123456'), |
|
102 | 100 | ('new_password-confirm', '123456'), |
|
103 | 101 | ('__end__', 'new_password:mapping'), |
|
104 | 102 | ('csrf_token', self.csrf_token), |
|
105 | 103 | ] |
|
106 | 104 | response = self.app.post( |
|
107 | 105 | route_path('my_account_password_update'), form_data) |
|
108 | 106 | assert_session_flash( |
|
109 | 107 | response, 'Error occurred during update of user password') |
|
110 | 108 | |
|
111 | 109 | def test_password_is_updated_in_session_on_password_change(self, user_util): |
|
112 | 110 | old_password = 'abcdef123' |
|
113 | 111 | new_password = 'abcdef124' |
|
114 | 112 | |
|
115 | 113 | user = user_util.create_user(password=old_password) |
|
116 | 114 | session = self.log_user(user.username, old_password) |
|
117 | 115 | old_password_hash = session['password'] |
|
118 | 116 | |
|
119 | 117 | form_data = [ |
|
120 | 118 | ('current_password', old_password), |
|
121 | 119 | ('__start__', 'new_password:mapping'), |
|
122 | 120 | ('new_password', new_password), |
|
123 | 121 | ('new_password-confirm', new_password), |
|
124 | 122 | ('__end__', 'new_password:mapping'), |
|
125 | 123 | ('csrf_token', self.csrf_token), |
|
126 | 124 | ] |
|
127 | 125 | self.app.post( |
|
128 | 126 | route_path('my_account_password_update'), form_data) |
|
129 | 127 | |
|
130 | 128 | response = self.app.get(route_path('home')) |
|
131 | 129 | session = response.get_session_from_response() |
|
132 | 130 | new_password_hash = session['rhodecode_user']['password'] |
|
133 | 131 | |
|
134 | 132 | assert old_password_hash != new_password_hash No newline at end of file |
@@ -1,847 +1,845 b'' | |||
|
1 | 1 | # Copyright (C) 2017-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | |
|
20 | 20 | import os |
|
21 | 21 | import sys |
|
22 | 22 | import time |
|
23 | 23 | import platform |
|
24 | 24 | import collections |
|
25 | 25 | import psutil |
|
26 | 26 | from functools import wraps |
|
27 | 27 | |
|
28 | 28 | import pkg_resources |
|
29 | 29 | import logging |
|
30 | 30 | import resource |
|
31 | 31 | |
|
32 | 32 | import configparser |
|
33 | 33 | |
|
34 | 34 | from rc_license.models import LicenseModel |
|
35 | 35 | from rhodecode.lib.str_utils import safe_str |
|
36 | 36 | |
|
37 | 37 | log = logging.getLogger(__name__) |
|
38 | 38 | |
|
39 | 39 | |
|
40 | 40 | _NA = 'NOT AVAILABLE' |
|
41 | 41 | _NA_FLOAT = 0.0 |
|
42 | 42 | |
|
43 | 43 | STATE_OK = 'ok' |
|
44 | 44 | STATE_ERR = 'error' |
|
45 | 45 | STATE_WARN = 'warning' |
|
46 | 46 | |
|
47 | 47 | STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK} |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | registered_helpers = {} |
|
51 | 51 | |
|
52 | 52 | |
|
53 | 53 | def register_sysinfo(func): |
|
54 | 54 | """ |
|
55 | 55 | @register_helper |
|
56 | 56 | def db_check(): |
|
57 | 57 | pass |
|
58 | 58 | |
|
59 | 59 | db_check == registered_helpers['db_check'] |
|
60 | 60 | """ |
|
61 | 61 | global registered_helpers |
|
62 | 62 | registered_helpers[func.__name__] = func |
|
63 | 63 | |
|
64 | 64 | @wraps(func) |
|
65 | 65 | def _wrapper(*args, **kwargs): |
|
66 | 66 | return func(*args, **kwargs) |
|
67 | 67 | return _wrapper |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | # HELPERS |
|
71 | 71 | def percentage(part: (int, float), whole: (int, float)): |
|
72 | 72 | whole = float(whole) |
|
73 | 73 | if whole > 0: |
|
74 | 74 | return round(100 * float(part) / whole, 1) |
|
75 | 75 | return 0.0 |
|
76 | 76 | |
|
77 | 77 | |
|
78 | 78 | def get_storage_size(storage_path): |
|
79 | 79 | sizes = [] |
|
80 | 80 | for file_ in os.listdir(storage_path): |
|
81 | 81 | storage_file = os.path.join(storage_path, file_) |
|
82 | 82 | if os.path.isfile(storage_file): |
|
83 | 83 | try: |
|
84 | 84 | sizes.append(os.path.getsize(storage_file)) |
|
85 | 85 | except OSError: |
|
86 | 86 | log.exception('Failed to get size of storage file %s', storage_file) |
|
87 | 87 | pass |
|
88 | 88 | |
|
89 | 89 | return sum(sizes) |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | def get_resource(resource_type): |
|
93 | 93 | try: |
|
94 | 94 | return resource.getrlimit(resource_type) |
|
95 | 95 | except Exception: |
|
96 | 96 | return 'NOT_SUPPORTED' |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | def get_cert_path(ini_path): |
|
100 | 100 | default = '/etc/ssl/certs/ca-certificates.crt' |
|
101 | 101 | control_ca_bundle = os.path.join( |
|
102 | 102 | os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))), |
|
103 | 103 | '.rccontrol-profile/etc/ca-bundle.crt') |
|
104 | 104 | if os.path.isfile(control_ca_bundle): |
|
105 | 105 | default = control_ca_bundle |
|
106 | 106 | |
|
107 | 107 | return default |
|
108 | 108 | |
|
109 | 109 | |
|
110 | 110 | class SysInfoRes(object): |
|
111 | 111 | def __init__(self, value, state=None, human_value=None): |
|
112 | 112 | self.value = value |
|
113 | 113 | self.state = state or STATE_OK_DEFAULT |
|
114 | 114 | self.human_value = human_value or value |
|
115 | 115 | |
|
116 | 116 | def __json__(self): |
|
117 | 117 | return { |
|
118 | 118 | 'value': self.value, |
|
119 | 119 | 'state': self.state, |
|
120 | 120 | 'human_value': self.human_value, |
|
121 | 121 | } |
|
122 | 122 | |
|
123 | 123 | def get_value(self): |
|
124 | 124 | return self.__json__() |
|
125 | 125 | |
|
126 | 126 | def __str__(self): |
|
127 | 127 | return f'<SysInfoRes({self.__json__()})>' |
|
128 | 128 | |
|
129 | 129 | |
|
130 | 130 | class SysInfo(object): |
|
131 | 131 | |
|
132 | 132 | def __init__(self, func_name, **kwargs): |
|
133 | 133 | self.function_name = func_name |
|
134 | 134 | self.value = _NA |
|
135 | 135 | self.state = None |
|
136 | 136 | self.kwargs = kwargs or {} |
|
137 | 137 | |
|
138 | 138 | def __call__(self): |
|
139 | 139 | computed = self.compute(**self.kwargs) |
|
140 | 140 | if not isinstance(computed, SysInfoRes): |
|
141 | 141 | raise ValueError( |
|
142 | 142 | 'computed value for {} is not instance of ' |
|
143 | 143 | '{}, got {} instead'.format( |
|
144 | 144 | self.function_name, SysInfoRes, type(computed))) |
|
145 | 145 | return computed.__json__() |
|
146 | 146 | |
|
147 | 147 | def __str__(self): |
|
148 | 148 | return f'<SysInfo({self.function_name})>' |
|
149 | 149 | |
|
150 | 150 | def compute(self, **kwargs): |
|
151 | 151 | return self.function_name(**kwargs) |
|
152 | 152 | |
|
153 | 153 | |
|
154 | 154 | # SysInfo functions |
|
155 | 155 | @register_sysinfo |
|
156 | 156 | def python_info(): |
|
157 | 157 | value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}', |
|
158 | 158 | executable=sys.executable) |
|
159 | 159 | return SysInfoRes(value=value) |
|
160 | 160 | |
|
161 | 161 | |
|
162 | 162 | @register_sysinfo |
|
163 | 163 | def py_modules(): |
|
164 | 164 | mods = dict([(p.project_name, {'version': p.version, 'location': p.location}) |
|
165 | 165 | for p in pkg_resources.working_set]) |
|
166 | 166 | |
|
167 | 167 | value = sorted(mods.items(), key=lambda k: k[0].lower()) |
|
168 | 168 | return SysInfoRes(value=value) |
|
169 | 169 | |
|
170 | 170 | |
|
171 | 171 | @register_sysinfo |
|
172 | 172 | def platform_type(): |
|
173 | 173 | from rhodecode.lib.utils import generate_platform_uuid |
|
174 | 174 | |
|
175 | 175 | value = dict( |
|
176 | 176 | name=safe_str(platform.platform()), |
|
177 | 177 | uuid=generate_platform_uuid() |
|
178 | 178 | ) |
|
179 | 179 | return SysInfoRes(value=value) |
|
180 | 180 | |
|
181 | 181 | |
|
182 | 182 | @register_sysinfo |
|
183 | 183 | def locale_info(): |
|
184 | 184 | import locale |
|
185 | 185 | |
|
186 | 186 | def safe_get_locale(locale_name): |
|
187 | 187 | try: |
|
188 | 188 | locale.getlocale(locale_name) |
|
189 | 189 | except TypeError: |
|
190 | 190 | return f'FAILED_LOCALE_GET:{locale_name}' |
|
191 | 191 | |
|
192 | 192 | value = dict( |
|
193 | 193 | locale_default=locale.getlocale(), |
|
194 | 194 | locale_lc_all=safe_get_locale(locale.LC_ALL), |
|
195 | 195 | locale_lc_ctype=safe_get_locale(locale.LC_CTYPE), |
|
196 | 196 | lang_env=os.environ.get('LANG'), |
|
197 | 197 | lc_all_env=os.environ.get('LC_ALL'), |
|
198 | 198 | local_archive_env=os.environ.get('LOCALE_ARCHIVE'), |
|
199 | 199 | ) |
|
200 | 200 | human_value = \ |
|
201 | 201 | f"LANG: {value['lang_env']}, \ |
|
202 | 202 | locale LC_ALL: {value['locale_lc_all']}, \ |
|
203 | 203 | locale LC_CTYPE: {value['locale_lc_ctype']}, \ |
|
204 | 204 | Default locales: {value['locale_default']}" |
|
205 | 205 | |
|
206 | 206 | return SysInfoRes(value=value, human_value=human_value) |
|
207 | 207 | |
|
208 | 208 | |
|
209 | 209 | @register_sysinfo |
|
210 | 210 | def ulimit_info(): |
|
211 | 211 | data = collections.OrderedDict([ |
|
212 | 212 | ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)), |
|
213 | 213 | ('file size', get_resource(resource.RLIMIT_FSIZE)), |
|
214 | 214 | ('stack size', get_resource(resource.RLIMIT_STACK)), |
|
215 | 215 | ('core file size', get_resource(resource.RLIMIT_CORE)), |
|
216 | 216 | ('address space size', get_resource(resource.RLIMIT_AS)), |
|
217 | 217 | ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)), |
|
218 | 218 | ('heap size', get_resource(resource.RLIMIT_DATA)), |
|
219 | 219 | ('rss size', get_resource(resource.RLIMIT_RSS)), |
|
220 | 220 | ('number of processes', get_resource(resource.RLIMIT_NPROC)), |
|
221 | 221 | ('open files', get_resource(resource.RLIMIT_NOFILE)), |
|
222 | 222 | ]) |
|
223 | 223 | |
|
224 | 224 | text = ', '.join(f'{k}:{v}' for k, v in data.items()) |
|
225 | 225 | |
|
226 | 226 | value = { |
|
227 | 227 | 'limits': data, |
|
228 | 228 | 'text': text, |
|
229 | 229 | } |
|
230 | 230 | return SysInfoRes(value=value) |
|
231 | 231 | |
|
232 | 232 | |
|
233 | 233 | @register_sysinfo |
|
234 | 234 | def uptime(): |
|
235 | 235 | from rhodecode.lib.helpers import age, time_to_datetime |
|
236 | 236 | from rhodecode.translation import TranslationString |
|
237 | 237 | |
|
238 | 238 | value = dict(boot_time=0, uptime=0, text='') |
|
239 | 239 | state = STATE_OK_DEFAULT |
|
240 | 240 | |
|
241 | 241 | boot_time = psutil.boot_time() |
|
242 | 242 | value['boot_time'] = boot_time |
|
243 | 243 | value['uptime'] = time.time() - boot_time |
|
244 | 244 | |
|
245 | 245 | date_or_age = age(time_to_datetime(boot_time)) |
|
246 | 246 | if isinstance(date_or_age, TranslationString): |
|
247 | 247 | date_or_age = date_or_age.interpolate() |
|
248 | 248 | |
|
249 | 249 | human_value = value.copy() |
|
250 | 250 | human_value['boot_time'] = time_to_datetime(boot_time) |
|
251 | 251 | human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False) |
|
252 | 252 | |
|
253 | 253 | human_value['text'] = f'Server started {date_or_age}' |
|
254 | 254 | return SysInfoRes(value=value, human_value=human_value) |
|
255 | 255 | |
|
256 | 256 | |
|
257 | 257 | @register_sysinfo |
|
258 | 258 | def memory(): |
|
259 | 259 | from rhodecode.lib.helpers import format_byte_size_binary |
|
260 | 260 | value = dict(available=0, used=0, used_real=0, cached=0, percent=0, |
|
261 | 261 | percent_used=0, free=0, inactive=0, active=0, shared=0, |
|
262 | 262 | total=0, buffers=0, text='') |
|
263 | 263 | |
|
264 | 264 | state = STATE_OK_DEFAULT |
|
265 | 265 | |
|
266 | 266 | value.update(dict(psutil.virtual_memory()._asdict())) |
|
267 | 267 | value['used_real'] = value['total'] - value['available'] |
|
268 | value['percent_used'] = psutil._common.usage_percent( | |
|
269 | value['used_real'], value['total'], 1) | |
|
268 | value['percent_used'] = psutil._common.usage_percent(value['used_real'], value['total'], 1) | |
|
270 | 269 | |
|
271 | 270 | human_value = value.copy() |
|
272 | 271 | human_value['text'] = '{}/{}, {}% used'.format( |
|
273 | 272 | format_byte_size_binary(value['used_real']), |
|
274 | 273 | format_byte_size_binary(value['total']), |
|
275 | 274 | value['percent_used']) |
|
276 | 275 | |
|
277 | 276 | keys = list(value.keys())[::] |
|
278 | 277 | keys.pop(keys.index('percent')) |
|
279 | 278 | keys.pop(keys.index('percent_used')) |
|
280 | 279 | keys.pop(keys.index('text')) |
|
281 | 280 | for k in keys: |
|
282 | 281 | human_value[k] = format_byte_size_binary(value[k]) |
|
283 | 282 | |
|
284 | 283 | if state['type'] == STATE_OK and value['percent_used'] > 90: |
|
285 | 284 | msg = 'Critical: your available RAM memory is very low.' |
|
286 | 285 | state = {'message': msg, 'type': STATE_ERR} |
|
287 | 286 | |
|
288 | 287 | elif state['type'] == STATE_OK and value['percent_used'] > 70: |
|
289 | 288 | msg = 'Warning: your available RAM memory is running low.' |
|
290 | 289 | state = {'message': msg, 'type': STATE_WARN} |
|
291 | 290 | |
|
292 | 291 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
293 | 292 | |
|
294 | 293 | |
|
295 | 294 | @register_sysinfo |
|
296 | 295 | def machine_load(): |
|
297 | 296 | value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''} |
|
298 | 297 | state = STATE_OK_DEFAULT |
|
299 | 298 | |
|
300 | 299 | # load averages |
|
301 | 300 | if hasattr(psutil.os, 'getloadavg'): |
|
302 | 301 | value.update(dict( |
|
303 | 302 | list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg())) |
|
304 | 303 | )) |
|
305 | 304 | |
|
306 | 305 | human_value = value.copy() |
|
307 | 306 | human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format( |
|
308 | 307 | value['1_min'], value['5_min'], value['15_min']) |
|
309 | 308 | |
|
310 | 309 | if state['type'] == STATE_OK and value['15_min'] > 5.0: |
|
311 | 310 | msg = 'Warning: your machine load is very high.' |
|
312 | 311 | state = {'message': msg, 'type': STATE_WARN} |
|
313 | 312 | |
|
314 | 313 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
315 | 314 | |
|
316 | 315 | |
|
317 | 316 | @register_sysinfo |
|
318 | 317 | def cpu(): |
|
319 | 318 | value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []} |
|
320 | 319 | state = STATE_OK_DEFAULT |
|
321 | 320 | |
|
322 | 321 | value['cpu'] = psutil.cpu_percent(0.5) |
|
323 | 322 | value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True) |
|
324 | 323 | value['cpu_count'] = psutil.cpu_count() |
|
325 | 324 | |
|
326 | 325 | human_value = value.copy() |
|
327 | human_value['text'] = '{} cores at {} %'.format( | |
|
328 | value['cpu_count'], value['cpu']) | |
|
326 | human_value['text'] = '{} cores at {} %'.format(value['cpu_count'], value['cpu']) | |
|
329 | 327 | |
|
330 | 328 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
331 | 329 | |
|
332 | 330 | |
|
333 | 331 | @register_sysinfo |
|
334 | 332 | def storage(): |
|
335 | 333 | from rhodecode.lib.helpers import format_byte_size_binary |
|
336 | 334 | from rhodecode.model.settings import VcsSettingsModel |
|
337 | 335 | path = VcsSettingsModel().get_repos_location() |
|
338 | 336 | |
|
339 | 337 | value = dict(percent=0, used=0, total=0, path=path, text='') |
|
340 | 338 | state = STATE_OK_DEFAULT |
|
341 | 339 | |
|
342 | 340 | try: |
|
343 | 341 | value.update(dict(psutil.disk_usage(path)._asdict())) |
|
344 | 342 | except Exception as e: |
|
345 | 343 | log.exception('Failed to fetch disk info') |
|
346 | 344 | state = {'message': str(e), 'type': STATE_ERR} |
|
347 | 345 | |
|
348 | 346 | human_value = value.copy() |
|
349 | 347 | human_value['used'] = format_byte_size_binary(value['used']) |
|
350 | 348 | human_value['total'] = format_byte_size_binary(value['total']) |
|
351 | 349 | human_value['text'] = "{}/{}, {}% used".format( |
|
352 | 350 | format_byte_size_binary(value['used']), |
|
353 | 351 | format_byte_size_binary(value['total']), |
|
354 | 352 | value['percent']) |
|
355 | 353 | |
|
356 | 354 | if state['type'] == STATE_OK and value['percent'] > 90: |
|
357 | 355 | msg = 'Critical: your disk space is very low.' |
|
358 | 356 | state = {'message': msg, 'type': STATE_ERR} |
|
359 | 357 | |
|
360 | 358 | elif state['type'] == STATE_OK and value['percent'] > 70: |
|
361 | 359 | msg = 'Warning: your disk space is running low.' |
|
362 | 360 | state = {'message': msg, 'type': STATE_WARN} |
|
363 | 361 | |
|
364 | 362 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
365 | 363 | |
|
366 | 364 | |
|
367 | 365 | @register_sysinfo |
|
368 | 366 | def storage_inodes(): |
|
369 | 367 | from rhodecode.model.settings import VcsSettingsModel |
|
370 | 368 | path = VcsSettingsModel().get_repos_location() |
|
371 | 369 | |
|
372 | 370 | value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='') |
|
373 | 371 | state = STATE_OK_DEFAULT |
|
374 | 372 | |
|
375 | 373 | try: |
|
376 | 374 | i_stat = os.statvfs(path) |
|
377 | 375 | value['free'] = i_stat.f_ffree |
|
378 | 376 | value['used'] = i_stat.f_files-i_stat.f_favail |
|
379 | 377 | value['total'] = i_stat.f_files |
|
380 | 378 | value['percent'] = percentage(value['used'], value['total']) |
|
381 | 379 | except Exception as e: |
|
382 | 380 | log.exception('Failed to fetch disk inodes info') |
|
383 | 381 | state = {'message': str(e), 'type': STATE_ERR} |
|
384 | 382 | |
|
385 | 383 | human_value = value.copy() |
|
386 | 384 | human_value['text'] = "{}/{}, {}% used".format( |
|
387 | 385 | value['used'], value['total'], value['percent']) |
|
388 | 386 | |
|
389 | 387 | if state['type'] == STATE_OK and value['percent'] > 90: |
|
390 | 388 | msg = 'Critical: your disk free inodes are very low.' |
|
391 | 389 | state = {'message': msg, 'type': STATE_ERR} |
|
392 | 390 | |
|
393 | 391 | elif state['type'] == STATE_OK and value['percent'] > 70: |
|
394 | 392 | msg = 'Warning: your disk free inodes are running low.' |
|
395 | 393 | state = {'message': msg, 'type': STATE_WARN} |
|
396 | 394 | |
|
397 | 395 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
398 | 396 | |
|
399 | 397 | |
|
400 | 398 | @register_sysinfo |
|
401 | 399 | def storage_archives(): |
|
402 | 400 | import rhodecode |
|
403 | 401 | from rhodecode.lib.utils import safe_str |
|
404 | 402 | from rhodecode.lib.helpers import format_byte_size_binary |
|
405 | 403 | |
|
406 | 404 | msg = 'Archive cache storage is controlled by ' \ |
|
407 | 405 | 'archive_cache.store_dir=/path/to/cache option in the .ini file' |
|
408 | 406 | path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg)) |
|
409 | 407 | |
|
410 | 408 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
411 | 409 | state = STATE_OK_DEFAULT |
|
412 | 410 | try: |
|
413 | 411 | items_count = 0 |
|
414 | 412 | used = 0 |
|
415 | 413 | for root, dirs, files in os.walk(path): |
|
416 | 414 | if root == path: |
|
417 | 415 | items_count = len(dirs) |
|
418 | 416 | |
|
419 | 417 | for f in files: |
|
420 | 418 | try: |
|
421 | 419 | used += os.path.getsize(os.path.join(root, f)) |
|
422 | 420 | except OSError: |
|
423 | 421 | pass |
|
424 | 422 | value.update({ |
|
425 | 423 | 'percent': 100, |
|
426 | 424 | 'used': used, |
|
427 | 425 | 'total': used, |
|
428 | 426 | 'items': items_count |
|
429 | 427 | }) |
|
430 | 428 | |
|
431 | 429 | except Exception as e: |
|
432 | 430 | log.exception('failed to fetch archive cache storage') |
|
433 | 431 | state = {'message': str(e), 'type': STATE_ERR} |
|
434 | 432 | |
|
435 | 433 | human_value = value.copy() |
|
436 | 434 | human_value['used'] = format_byte_size_binary(value['used']) |
|
437 | 435 | human_value['total'] = format_byte_size_binary(value['total']) |
|
438 | 436 | human_value['text'] = "{} ({} items)".format( |
|
439 | 437 | human_value['used'], value['items']) |
|
440 | 438 | |
|
441 | 439 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
442 | 440 | |
|
443 | 441 | |
|
444 | 442 | @register_sysinfo |
|
445 | 443 | def storage_gist(): |
|
446 | 444 | from rhodecode.model.gist import GIST_STORE_LOC |
|
447 | 445 | from rhodecode.model.settings import VcsSettingsModel |
|
448 | 446 | from rhodecode.lib.utils import safe_str |
|
449 | 447 | from rhodecode.lib.helpers import format_byte_size_binary |
|
450 | 448 | path = safe_str(os.path.join( |
|
451 | 449 | VcsSettingsModel().get_repos_location(), GIST_STORE_LOC)) |
|
452 | 450 | |
|
453 | 451 | # gist storage |
|
454 | 452 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
455 | 453 | state = STATE_OK_DEFAULT |
|
456 | 454 | |
|
457 | 455 | try: |
|
458 | 456 | items_count = 0 |
|
459 | 457 | used = 0 |
|
460 | 458 | for root, dirs, files in os.walk(path): |
|
461 | 459 | if root == path: |
|
462 | 460 | items_count = len(dirs) |
|
463 | 461 | |
|
464 | 462 | for f in files: |
|
465 | 463 | try: |
|
466 | 464 | used += os.path.getsize(os.path.join(root, f)) |
|
467 | 465 | except OSError: |
|
468 | 466 | pass |
|
469 | 467 | value.update({ |
|
470 | 468 | 'percent': 100, |
|
471 | 469 | 'used': used, |
|
472 | 470 | 'total': used, |
|
473 | 471 | 'items': items_count |
|
474 | 472 | }) |
|
475 | 473 | except Exception as e: |
|
476 | 474 | log.exception('failed to fetch gist storage items') |
|
477 | 475 | state = {'message': str(e), 'type': STATE_ERR} |
|
478 | 476 | |
|
479 | 477 | human_value = value.copy() |
|
480 | 478 | human_value['used'] = format_byte_size_binary(value['used']) |
|
481 | 479 | human_value['total'] = format_byte_size_binary(value['total']) |
|
482 | 480 | human_value['text'] = "{} ({} items)".format( |
|
483 | 481 | human_value['used'], value['items']) |
|
484 | 482 | |
|
485 | 483 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
486 | 484 | |
|
487 | 485 | |
|
488 | 486 | @register_sysinfo |
|
489 | 487 | def storage_temp(): |
|
490 | 488 | import tempfile |
|
491 | 489 | from rhodecode.lib.helpers import format_byte_size_binary |
|
492 | 490 | |
|
493 | 491 | path = tempfile.gettempdir() |
|
494 | 492 | value = dict(percent=0, used=0, total=0, items=0, path=path, text='') |
|
495 | 493 | state = STATE_OK_DEFAULT |
|
496 | 494 | |
|
497 | 495 | if not psutil: |
|
498 | 496 | return SysInfoRes(value=value, state=state) |
|
499 | 497 | |
|
500 | 498 | try: |
|
501 | 499 | value.update(dict(psutil.disk_usage(path)._asdict())) |
|
502 | 500 | except Exception as e: |
|
503 | 501 | log.exception('Failed to fetch temp dir info') |
|
504 | 502 | state = {'message': str(e), 'type': STATE_ERR} |
|
505 | 503 | |
|
506 | 504 | human_value = value.copy() |
|
507 | 505 | human_value['used'] = format_byte_size_binary(value['used']) |
|
508 | 506 | human_value['total'] = format_byte_size_binary(value['total']) |
|
509 | 507 | human_value['text'] = "{}/{}, {}% used".format( |
|
510 | 508 | format_byte_size_binary(value['used']), |
|
511 | 509 | format_byte_size_binary(value['total']), |
|
512 | 510 | value['percent']) |
|
513 | 511 | |
|
514 | 512 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
515 | 513 | |
|
516 | 514 | |
|
517 | 515 | @register_sysinfo |
|
518 | 516 | def search_info(): |
|
519 | 517 | import rhodecode |
|
520 | 518 | from rhodecode.lib.index import searcher_from_config |
|
521 | 519 | |
|
522 | 520 | backend = rhodecode.CONFIG.get('search.module', '') |
|
523 | 521 | location = rhodecode.CONFIG.get('search.location', '') |
|
524 | 522 | |
|
525 | 523 | try: |
|
526 | 524 | searcher = searcher_from_config(rhodecode.CONFIG) |
|
527 | 525 | searcher = searcher.__class__.__name__ |
|
528 | 526 | except Exception: |
|
529 | 527 | searcher = None |
|
530 | 528 | |
|
531 | 529 | value = dict( |
|
532 | 530 | backend=backend, searcher=searcher, location=location, text='') |
|
533 | 531 | state = STATE_OK_DEFAULT |
|
534 | 532 | |
|
535 | 533 | human_value = value.copy() |
|
536 | 534 | human_value['text'] = "backend:`{}`".format(human_value['backend']) |
|
537 | 535 | |
|
538 | 536 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
539 | 537 | |
|
540 | 538 | |
|
541 | 539 | @register_sysinfo |
|
542 | 540 | def git_info(): |
|
543 | 541 | from rhodecode.lib.vcs.backends import git |
|
544 | 542 | state = STATE_OK_DEFAULT |
|
545 | 543 | value = human_value = '' |
|
546 | 544 | try: |
|
547 | 545 | value = git.discover_git_version(raise_on_exc=True) |
|
548 | 546 | human_value = f'version reported from VCSServer: {value}' |
|
549 | 547 | except Exception as e: |
|
550 | 548 | state = {'message': str(e), 'type': STATE_ERR} |
|
551 | 549 | |
|
552 | 550 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
553 | 551 | |
|
554 | 552 | |
|
555 | 553 | @register_sysinfo |
|
556 | 554 | def hg_info(): |
|
557 | 555 | from rhodecode.lib.vcs.backends import hg |
|
558 | 556 | state = STATE_OK_DEFAULT |
|
559 | 557 | value = human_value = '' |
|
560 | 558 | try: |
|
561 | 559 | value = hg.discover_hg_version(raise_on_exc=True) |
|
562 | 560 | human_value = f'version reported from VCSServer: {value}' |
|
563 | 561 | except Exception as e: |
|
564 | 562 | state = {'message': str(e), 'type': STATE_ERR} |
|
565 | 563 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
566 | 564 | |
|
567 | 565 | |
|
568 | 566 | @register_sysinfo |
|
569 | 567 | def svn_info(): |
|
570 | 568 | from rhodecode.lib.vcs.backends import svn |
|
571 | 569 | state = STATE_OK_DEFAULT |
|
572 | 570 | value = human_value = '' |
|
573 | 571 | try: |
|
574 | 572 | value = svn.discover_svn_version(raise_on_exc=True) |
|
575 | 573 | human_value = f'version reported from VCSServer: {value}' |
|
576 | 574 | except Exception as e: |
|
577 | 575 | state = {'message': str(e), 'type': STATE_ERR} |
|
578 | 576 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
579 | 577 | |
|
580 | 578 | |
|
581 | 579 | @register_sysinfo |
|
582 | 580 | def vcs_backends(): |
|
583 | 581 | import rhodecode |
|
584 | 582 | value = rhodecode.CONFIG.get('vcs.backends') |
|
585 | 583 | human_value = 'Enabled backends in order: {}'.format(','.join(value)) |
|
586 | 584 | return SysInfoRes(value=value, human_value=human_value) |
|
587 | 585 | |
|
588 | 586 | |
|
589 | 587 | @register_sysinfo |
|
590 | 588 | def vcs_server(): |
|
591 | 589 | import rhodecode |
|
592 | 590 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data |
|
593 | 591 | |
|
594 | 592 | server_url = rhodecode.CONFIG.get('vcs.server') |
|
595 | 593 | enabled = rhodecode.CONFIG.get('vcs.server.enable') |
|
596 | 594 | protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http' |
|
597 | 595 | state = STATE_OK_DEFAULT |
|
598 | 596 | version = None |
|
599 | 597 | workers = 0 |
|
600 | 598 | |
|
601 | 599 | try: |
|
602 | 600 | data = get_vcsserver_service_data() |
|
603 | 601 | if data and 'version' in data: |
|
604 | 602 | version = data['version'] |
|
605 | 603 | |
|
606 | 604 | if data and 'config' in data: |
|
607 | 605 | conf = data['config'] |
|
608 | 606 | workers = conf.get('workers', 'NOT AVAILABLE') |
|
609 | 607 | |
|
610 | 608 | connection = 'connected' |
|
611 | 609 | except Exception as e: |
|
612 | 610 | connection = 'failed' |
|
613 | 611 | state = {'message': str(e), 'type': STATE_ERR} |
|
614 | 612 | |
|
615 | 613 | value = dict( |
|
616 | 614 | url=server_url, |
|
617 | 615 | enabled=enabled, |
|
618 | 616 | protocol=protocol, |
|
619 | 617 | connection=connection, |
|
620 | 618 | version=version, |
|
621 | 619 | text='', |
|
622 | 620 | ) |
|
623 | 621 | |
|
624 | 622 | human_value = value.copy() |
|
625 | 623 | human_value['text'] = \ |
|
626 | 624 | '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format( |
|
627 | 625 | url=server_url, ver=version, workers=workers, mode=protocol, |
|
628 | 626 | conn=connection) |
|
629 | 627 | |
|
630 | 628 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
631 | 629 | |
|
632 | 630 | |
|
633 | 631 | @register_sysinfo |
|
634 | 632 | def vcs_server_config(): |
|
635 | 633 | from rhodecode.lib.vcs.backends import get_vcsserver_service_data |
|
636 | 634 | state = STATE_OK_DEFAULT |
|
637 | 635 | |
|
638 | 636 | value = {} |
|
639 | 637 | try: |
|
640 | 638 | data = get_vcsserver_service_data() |
|
641 | 639 | value = data['app_config'] |
|
642 | 640 | except Exception as e: |
|
643 | 641 | state = {'message': str(e), 'type': STATE_ERR} |
|
644 | 642 | |
|
645 | 643 | human_value = value.copy() |
|
646 | 644 | human_value['text'] = 'VCS Server config' |
|
647 | 645 | |
|
648 | 646 | return SysInfoRes(value=value, state=state, human_value=human_value) |
|
649 | 647 | |
|
650 | 648 | |
|
651 | 649 | @register_sysinfo |
|
652 | 650 | def rhodecode_app_info(): |
|
653 | 651 | import rhodecode |
|
654 | 652 | edition = rhodecode.CONFIG.get('rhodecode.edition') |
|
655 | 653 | |
|
656 | 654 | value = dict( |
|
657 | 655 | rhodecode_version=rhodecode.__version__, |
|
658 | 656 | rhodecode_lib_path=os.path.abspath(rhodecode.__file__), |
|
659 | 657 | text='' |
|
660 | 658 | ) |
|
661 | 659 | human_value = value.copy() |
|
662 | 660 | human_value['text'] = 'RhodeCode {edition}, version {ver}'.format( |
|
663 | 661 | edition=edition, ver=value['rhodecode_version'] |
|
664 | 662 | ) |
|
665 | 663 | return SysInfoRes(value=value, human_value=human_value) |
|
666 | 664 | |
|
667 | 665 | |
|
668 | 666 | @register_sysinfo |
|
669 | 667 | def rhodecode_config(): |
|
670 | 668 | import rhodecode |
|
671 | 669 | path = rhodecode.CONFIG.get('__file__') |
|
672 | 670 | rhodecode_ini_safe = rhodecode.CONFIG.copy() |
|
673 | 671 | cert_path = get_cert_path(path) |
|
674 | 672 | |
|
675 | 673 | try: |
|
676 | 674 | config = configparser.ConfigParser() |
|
677 | 675 | config.read(path) |
|
678 | 676 | parsed_ini = config |
|
679 | 677 | if parsed_ini.has_section('server:main'): |
|
680 | 678 | parsed_ini = dict(parsed_ini.items('server:main')) |
|
681 | 679 | except Exception: |
|
682 | 680 | log.exception('Failed to read .ini file for display') |
|
683 | 681 | parsed_ini = {} |
|
684 | 682 | |
|
685 | 683 | rhodecode_ini_safe['server:main'] = parsed_ini |
|
686 | 684 | |
|
687 | 685 | blacklist = [ |
|
688 | 686 | f'rhodecode_{LicenseModel.LICENSE_DB_KEY}', |
|
689 | 687 | 'routes.map', |
|
690 | 688 | 'sqlalchemy.db1.url', |
|
691 | 689 | 'channelstream.secret', |
|
692 | 690 | 'beaker.session.secret', |
|
693 | 691 | 'rhodecode.encrypted_values.secret', |
|
694 | 692 | 'rhodecode_auth_github_consumer_key', |
|
695 | 693 | 'rhodecode_auth_github_consumer_secret', |
|
696 | 694 | 'rhodecode_auth_google_consumer_key', |
|
697 | 695 | 'rhodecode_auth_google_consumer_secret', |
|
698 | 696 | 'rhodecode_auth_bitbucket_consumer_secret', |
|
699 | 697 | 'rhodecode_auth_bitbucket_consumer_key', |
|
700 | 698 | 'rhodecode_auth_twitter_consumer_secret', |
|
701 | 699 | 'rhodecode_auth_twitter_consumer_key', |
|
702 | 700 | |
|
703 | 701 | 'rhodecode_auth_twitter_secret', |
|
704 | 702 | 'rhodecode_auth_github_secret', |
|
705 | 703 | 'rhodecode_auth_google_secret', |
|
706 | 704 | 'rhodecode_auth_bitbucket_secret', |
|
707 | 705 | |
|
708 | 706 | 'appenlight.api_key', |
|
709 | 707 | ('app_conf', 'sqlalchemy.db1.url') |
|
710 | 708 | ] |
|
711 | 709 | for k in blacklist: |
|
712 | 710 | if isinstance(k, tuple): |
|
713 | 711 | section, key = k |
|
714 | 712 | if section in rhodecode_ini_safe: |
|
715 | 713 | rhodecode_ini_safe[section] = '**OBFUSCATED**' |
|
716 | 714 | else: |
|
717 | 715 | rhodecode_ini_safe.pop(k, None) |
|
718 | 716 | |
|
719 | 717 | # TODO: maybe put some CONFIG checks here ? |
|
720 | 718 | return SysInfoRes(value={'config': rhodecode_ini_safe, |
|
721 | 719 | 'path': path, 'cert_path': cert_path}) |
|
722 | 720 | |
|
723 | 721 | |
|
724 | 722 | @register_sysinfo |
|
725 | 723 | def database_info(): |
|
726 | 724 | import rhodecode |
|
727 | 725 | from sqlalchemy.engine import url as engine_url |
|
728 | 726 | from rhodecode.model import meta |
|
729 | 727 | from rhodecode.model.meta import Session |
|
730 | 728 | from rhodecode.model.db import DbMigrateVersion |
|
731 | 729 | |
|
732 | 730 | state = STATE_OK_DEFAULT |
|
733 | 731 | |
|
734 | 732 | db_migrate = DbMigrateVersion.query().filter( |
|
735 | 733 | DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one() |
|
736 | 734 | |
|
737 | 735 | db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url']) |
|
738 | 736 | |
|
739 | 737 | try: |
|
740 | 738 | engine = meta.get_engine() |
|
741 | 739 | db_server_info = engine.dialect._get_server_version_info( |
|
742 | 740 | Session.connection(bind=engine)) |
|
743 | 741 | db_version = '.'.join(map(str, db_server_info)) |
|
744 | 742 | except Exception: |
|
745 | 743 | log.exception('failed to fetch db version') |
|
746 | 744 | db_version = 'UNKNOWN' |
|
747 | 745 | |
|
748 | 746 | db_info = dict( |
|
749 | 747 | migrate_version=db_migrate.version, |
|
750 | 748 | type=db_url_obj.get_backend_name(), |
|
751 | 749 | version=db_version, |
|
752 | 750 | url=repr(db_url_obj) |
|
753 | 751 | ) |
|
754 | 752 | current_version = db_migrate.version |
|
755 | 753 | expected_version = rhodecode.__dbversion__ |
|
756 | 754 | if state['type'] == STATE_OK and current_version != expected_version: |
|
757 | 755 | msg = 'Critical: database schema mismatch, ' \ |
|
758 | 756 | 'expected version {}, got {}. ' \ |
|
759 | 757 | 'Please run migrations on your database.'.format( |
|
760 | 758 | expected_version, current_version) |
|
761 | 759 | state = {'message': msg, 'type': STATE_ERR} |
|
762 | 760 | |
|
763 | 761 | human_value = db_info.copy() |
|
764 | 762 | human_value['url'] = "{} @ migration version: {}".format( |
|
765 | 763 | db_info['url'], db_info['migrate_version']) |
|
766 | 764 | human_value['version'] = "{} {}".format(db_info['type'], db_info['version']) |
|
767 | 765 | return SysInfoRes(value=db_info, state=state, human_value=human_value) |
|
768 | 766 | |
|
769 | 767 | |
|
770 | 768 | @register_sysinfo |
|
771 | 769 | def server_info(environ): |
|
772 | 770 | import rhodecode |
|
773 | 771 | from rhodecode.lib.base import get_server_ip_addr, get_server_port |
|
774 | 772 | |
|
775 | 773 | value = { |
|
776 | 774 | 'server_ip': '{}:{}'.format( |
|
777 | 775 | get_server_ip_addr(environ, log_errors=False), |
|
778 | 776 | get_server_port(environ) |
|
779 | 777 | ), |
|
780 | 778 | 'server_id': rhodecode.CONFIG.get('instance_id'), |
|
781 | 779 | } |
|
782 | 780 | return SysInfoRes(value=value) |
|
783 | 781 | |
|
784 | 782 | |
|
785 | 783 | @register_sysinfo |
|
786 | 784 | def usage_info(): |
|
787 | from rhodecode.model.db import User, Repository | |
|
785 | from rhodecode.model.db import User, Repository, true | |
|
788 | 786 | value = { |
|
789 | 787 | 'users': User.query().count(), |
|
790 |
'users_active': User.query().filter(User.active == |
|
|
788 | 'users_active': User.query().filter(User.active == true()).count(), | |
|
791 | 789 | 'repositories': Repository.query().count(), |
|
792 | 790 | 'repository_types': { |
|
793 | 791 | 'hg': Repository.query().filter( |
|
794 | 792 | Repository.repo_type == 'hg').count(), |
|
795 | 793 | 'git': Repository.query().filter( |
|
796 | 794 | Repository.repo_type == 'git').count(), |
|
797 | 795 | 'svn': Repository.query().filter( |
|
798 | 796 | Repository.repo_type == 'svn').count(), |
|
799 | 797 | }, |
|
800 | 798 | } |
|
801 | 799 | return SysInfoRes(value=value) |
|
802 | 800 | |
|
803 | 801 | |
|
804 | 802 | def get_system_info(environ): |
|
805 | 803 | environ = environ or {} |
|
806 | 804 | return { |
|
807 | 805 | 'rhodecode_app': SysInfo(rhodecode_app_info)(), |
|
808 | 806 | 'rhodecode_config': SysInfo(rhodecode_config)(), |
|
809 | 807 | 'rhodecode_usage': SysInfo(usage_info)(), |
|
810 | 808 | 'python': SysInfo(python_info)(), |
|
811 | 809 | 'py_modules': SysInfo(py_modules)(), |
|
812 | 810 | |
|
813 | 811 | 'platform': SysInfo(platform_type)(), |
|
814 | 812 | 'locale': SysInfo(locale_info)(), |
|
815 | 813 | 'server': SysInfo(server_info, environ=environ)(), |
|
816 | 814 | 'database': SysInfo(database_info)(), |
|
817 | 815 | 'ulimit': SysInfo(ulimit_info)(), |
|
818 | 816 | 'storage': SysInfo(storage)(), |
|
819 | 817 | 'storage_inodes': SysInfo(storage_inodes)(), |
|
820 | 818 | 'storage_archive': SysInfo(storage_archives)(), |
|
821 | 819 | 'storage_gist': SysInfo(storage_gist)(), |
|
822 | 820 | 'storage_temp': SysInfo(storage_temp)(), |
|
823 | 821 | |
|
824 | 822 | 'search': SysInfo(search_info)(), |
|
825 | 823 | |
|
826 | 824 | 'uptime': SysInfo(uptime)(), |
|
827 | 825 | 'load': SysInfo(machine_load)(), |
|
828 | 826 | 'cpu': SysInfo(cpu)(), |
|
829 | 827 | 'memory': SysInfo(memory)(), |
|
830 | 828 | |
|
831 | 829 | 'vcs_backends': SysInfo(vcs_backends)(), |
|
832 | 830 | 'vcs_server': SysInfo(vcs_server)(), |
|
833 | 831 | |
|
834 | 832 | 'vcs_server_config': SysInfo(vcs_server_config)(), |
|
835 | 833 | |
|
836 | 834 | 'git': SysInfo(git_info)(), |
|
837 | 835 | 'hg': SysInfo(hg_info)(), |
|
838 | 836 | 'svn': SysInfo(svn_info)(), |
|
839 | 837 | } |
|
840 | 838 | |
|
841 | 839 | |
|
842 | 840 | def load_system_info(key): |
|
843 | 841 | """ |
|
844 | 842 | get_sys_info('vcs_server') |
|
845 | 843 | get_sys_info('database') |
|
846 | 844 | """ |
|
847 | 845 | return SysInfo(registered_helpers[key])() |
@@ -1,987 +1,987 b'' | |||
|
1 | 1 | # Copyright (C) 2011-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | |
|
20 | 20 | """ |
|
21 | 21 | Some simple helper functions |
|
22 | 22 | """ |
|
23 | 23 | |
|
24 | 24 | import collections |
|
25 | 25 | import datetime |
|
26 | 26 | import dateutil.relativedelta |
|
27 | 27 | import logging |
|
28 | 28 | import re |
|
29 | 29 | import sys |
|
30 | 30 | import time |
|
31 | 31 | import urllib.request |
|
32 | 32 | import urllib.parse |
|
33 | 33 | import urllib.error |
|
34 | 34 | import urlobject |
|
35 | 35 | import uuid |
|
36 | 36 | import getpass |
|
37 | 37 | import socket |
|
38 | 38 | import errno |
|
39 | 39 | import random |
|
40 | 40 | import functools |
|
41 | 41 | from contextlib import closing |
|
42 | 42 | |
|
43 | 43 | import pygments.lexers |
|
44 | 44 | import sqlalchemy |
|
45 | 45 | import sqlalchemy.event |
|
46 | 46 | import sqlalchemy.engine.url |
|
47 | 47 | import sqlalchemy.exc |
|
48 | 48 | import sqlalchemy.sql |
|
49 | 49 | import webob |
|
50 | 50 | from pyramid.settings import asbool |
|
51 | 51 | |
|
52 | 52 | import rhodecode |
|
53 | 53 | from rhodecode.translation import _, _pluralize |
|
54 | 54 | from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes |
|
55 | 55 | from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe |
|
56 | 56 | from rhodecode.lib.type_utils import aslist, str2bool, StrictAttributeDict, AttributeDict |
|
57 | 57 | |
|
58 | 58 | |
|
59 | 59 | def __get_lem(extra_mapping=None): |
|
60 | 60 | """ |
|
61 | 61 | Get language extension map based on what's inside pygments lexers |
|
62 | 62 | """ |
|
63 | 63 | d = collections.defaultdict(lambda: []) |
|
64 | 64 | |
|
65 | 65 | def __clean(s): |
|
66 | 66 | s = s.lstrip('*') |
|
67 | 67 | s = s.lstrip('.') |
|
68 | 68 | |
|
69 | 69 | if s.find('[') != -1: |
|
70 | 70 | exts = [] |
|
71 | 71 | start, stop = s.find('['), s.find(']') |
|
72 | 72 | |
|
73 | 73 | for suffix in s[start + 1:stop]: |
|
74 | 74 | exts.append(s[:s.find('[')] + suffix) |
|
75 | 75 | return [e.lower() for e in exts] |
|
76 | 76 | else: |
|
77 | 77 | return [s.lower()] |
|
78 | 78 | |
|
79 | 79 | for lx, t in sorted(pygments.lexers.LEXERS.items()): |
|
80 | 80 | m = list(map(__clean, t[-2])) |
|
81 | 81 | if m: |
|
82 | 82 | m = functools.reduce(lambda x, y: x + y, m) |
|
83 | 83 | for ext in m: |
|
84 | 84 | desc = lx.replace('Lexer', '') |
|
85 | 85 | d[ext].append(desc) |
|
86 | 86 | |
|
87 | 87 | data = dict(d) |
|
88 | 88 | |
|
89 | 89 | extra_mapping = extra_mapping or {} |
|
90 | 90 | if extra_mapping: |
|
91 | 91 | for k, v in list(extra_mapping.items()): |
|
92 | 92 | if k not in data: |
|
93 | 93 | # register new mapping2lexer |
|
94 | 94 | data[k] = [v] |
|
95 | 95 | |
|
96 | 96 | return data |
|
97 | 97 | |
|
98 | 98 | |
|
99 | 99 | def convert_line_endings(line: str, mode) -> str: |
|
100 | 100 | """ |
|
101 | 101 | Converts a given line "line end" accordingly to given mode |
|
102 | 102 | |
|
103 | 103 | Available modes are:: |
|
104 | 104 | 0 - Unix |
|
105 | 105 | 1 - Mac |
|
106 | 106 | 2 - DOS |
|
107 | 107 | |
|
108 | 108 | :param line: given line to convert |
|
109 | 109 | :param mode: mode to convert to |
|
110 | 110 | :return: converted line according to mode |
|
111 | 111 | """ |
|
112 | 112 | if mode == 0: |
|
113 | 113 | line = line.replace('\r\n', '\n') |
|
114 | 114 | line = line.replace('\r', '\n') |
|
115 | 115 | elif mode == 1: |
|
116 | 116 | line = line.replace('\r\n', '\r') |
|
117 | 117 | line = line.replace('\n', '\r') |
|
118 | 118 | elif mode == 2: |
|
119 | 119 | line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line) |
|
120 | 120 | return line |
|
121 | 121 | |
|
122 | 122 | |
|
123 | 123 | def detect_mode(line: str, default) -> int: |
|
124 | 124 | """ |
|
125 | 125 | Detects line break for given line, if line break couldn't be found |
|
126 | 126 | given default value is returned |
|
127 | 127 | |
|
128 | 128 | :param line: str line |
|
129 | 129 | :param default: default |
|
130 | 130 | :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS |
|
131 | 131 | """ |
|
132 | 132 | if line.endswith('\r\n'): |
|
133 | 133 | return 2 |
|
134 | 134 | elif line.endswith('\n'): |
|
135 | 135 | return 0 |
|
136 | 136 | elif line.endswith('\r'): |
|
137 | 137 | return 1 |
|
138 | 138 | else: |
|
139 | 139 | return default |
|
140 | 140 | |
|
141 | 141 | |
|
142 | 142 | def remove_suffix(s, suffix): |
|
143 | 143 | if s.endswith(suffix): |
|
144 | 144 | s = s[:-1 * len(suffix)] |
|
145 | 145 | return s |
|
146 | 146 | |
|
147 | 147 | |
|
148 | 148 | def remove_prefix(s, prefix): |
|
149 | 149 | if s.startswith(prefix): |
|
150 | 150 | s = s[len(prefix):] |
|
151 | 151 | return s |
|
152 | 152 | |
|
153 | 153 | |
|
154 | 154 | def find_calling_context(ignore_modules=None, depth=4, output_writer=None, indent=True): |
|
155 | 155 | """ |
|
156 | 156 | Look through the calling stack and return the frame which called |
|
157 | 157 | this function and is part of core module ( ie. rhodecode.* ) |
|
158 | 158 | |
|
159 | 159 | :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib'] |
|
160 | 160 | :param depth: |
|
161 | 161 | :param output_writer: |
|
162 | 162 | :param indent: |
|
163 | 163 | |
|
164 | 164 | usage:: |
|
165 | 165 | |
|
166 | 166 | from rhodecode.lib.utils2 import find_calling_context |
|
167 | 167 | |
|
168 | 168 | calling_context = find_calling_context(ignore_modules=[ |
|
169 | 169 | 'rhodecode.lib.caching_query', |
|
170 | 170 | 'rhodecode.model.settings', |
|
171 | 171 | ]) |
|
172 | 172 | |
|
173 | 173 | """ |
|
174 | 174 | import inspect |
|
175 | 175 | if not output_writer: |
|
176 | 176 | try: |
|
177 | 177 | from rich import print as pprint |
|
178 | 178 | except ImportError: |
|
179 | 179 | pprint = print |
|
180 | 180 | output_writer = pprint |
|
181 | 181 | |
|
182 | 182 | frame = inspect.currentframe() |
|
183 | 183 | cc = [] |
|
184 | 184 | try: |
|
185 | 185 | for i in range(depth): # current frame + 3 callers |
|
186 | 186 | frame = frame.f_back |
|
187 | 187 | if not frame: |
|
188 | 188 | break |
|
189 | 189 | |
|
190 | 190 | info = inspect.getframeinfo(frame) |
|
191 | 191 | name = frame.f_globals.get('__name__') |
|
192 | 192 | if name not in ignore_modules: |
|
193 | 193 | cc.insert(0, f'CALL_CONTEXT:{i}: file {info.filename}:{info.lineno} -> {info.function}') |
|
194 | 194 | finally: |
|
195 | 195 | # Avoids a reference cycle |
|
196 | 196 | del frame |
|
197 | 197 | |
|
198 | 198 | output_writer('* INFO: This code was called from: *') |
|
199 | 199 | for cnt, frm_info in enumerate(cc): |
|
200 | 200 | if not indent: |
|
201 | 201 | cnt = 1 |
|
202 | 202 | output_writer(' ' * cnt + frm_info) |
|
203 | 203 | |
|
204 | 204 | |
|
205 | 205 | def ping_connection(connection, branch): |
|
206 | 206 | if branch: |
|
207 | 207 | # "branch" refers to a sub-connection of a connection, |
|
208 | 208 | # we don't want to bother pinging on these. |
|
209 | 209 | return |
|
210 | 210 | |
|
211 | 211 | # turn off "close with result". This flag is only used with |
|
212 | 212 | # "connectionless" execution, otherwise will be False in any case |
|
213 | 213 | save_should_close_with_result = connection.should_close_with_result |
|
214 | 214 | connection.should_close_with_result = False |
|
215 | 215 | |
|
216 | 216 | try: |
|
217 | 217 | # run a SELECT 1. use a core select() so that |
|
218 | 218 | # the SELECT of a scalar value without a table is |
|
219 | 219 | # appropriately formatted for the backend |
|
220 | 220 | connection.scalar(sqlalchemy.sql.select([1])) |
|
221 | 221 | except sqlalchemy.exc.DBAPIError as err: |
|
222 | 222 | # catch SQLAlchemy's DBAPIError, which is a wrapper |
|
223 | 223 | # for the DBAPI's exception. It includes a .connection_invalidated |
|
224 | 224 | # attribute which specifies if this connection is a "disconnect" |
|
225 | 225 | # condition, which is based on inspection of the original exception |
|
226 | 226 | # by the dialect in use. |
|
227 | 227 | if err.connection_invalidated: |
|
228 | 228 | # run the same SELECT again - the connection will re-validate |
|
229 | 229 | # itself and establish a new connection. The disconnect detection |
|
230 | 230 | # here also causes the whole connection pool to be invalidated |
|
231 | 231 | # so that all stale connections are discarded. |
|
232 | 232 | connection.scalar(sqlalchemy.sql.select([1])) |
|
233 | 233 | else: |
|
234 | 234 | raise |
|
235 | 235 | finally: |
|
236 | 236 | # restore "close with result" |
|
237 | 237 | connection.should_close_with_result = save_should_close_with_result |
|
238 | 238 | |
|
239 | 239 | |
|
240 | 240 | def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs): |
|
241 | 241 | """Custom engine_from_config functions.""" |
|
242 | 242 | log = logging.getLogger('sqlalchemy.engine') |
|
243 | 243 | use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None)) |
|
244 | 244 | debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None)) |
|
245 | 245 | |
|
246 | 246 | engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs) |
|
247 | 247 | |
|
248 | 248 | def color_sql(sql): |
|
249 | 249 | color_seq = '\033[1;33m' # This is yellow: code 33 |
|
250 | 250 | normal = '\x1b[0m' |
|
251 | 251 | return ''.join([color_seq, sql, normal]) |
|
252 | 252 | |
|
253 | 253 | if use_ping_connection: |
|
254 | 254 | log.debug('Adding ping_connection on the engine config.') |
|
255 | 255 | sqlalchemy.event.listen(engine, "engine_connect", ping_connection) |
|
256 | 256 | |
|
257 | 257 | if debug: |
|
258 | 258 | # attach events only for debug configuration |
|
259 | 259 | def before_cursor_execute(conn, cursor, statement, |
|
260 | 260 | parameters, context, executemany): |
|
261 | 261 | setattr(conn, 'query_start_time', time.time()) |
|
262 | 262 | log.info(color_sql(">>>>> STARTING QUERY >>>>>")) |
|
263 | 263 | find_calling_context(ignore_modules=[ |
|
264 | 264 | 'rhodecode.lib.caching_query', |
|
265 | 265 | 'rhodecode.model.settings', |
|
266 | 266 | ], output_writer=log.info) |
|
267 | 267 | |
|
268 | 268 | def after_cursor_execute(conn, cursor, statement, |
|
269 | 269 | parameters, context, executemany): |
|
270 | 270 | delattr(conn, 'query_start_time') |
|
271 | 271 | |
|
272 | 272 | sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute) |
|
273 | 273 | sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute) |
|
274 | 274 | |
|
275 | 275 | return engine |
|
276 | 276 | |
|
277 | 277 | |
|
278 | 278 | def get_encryption_key(config) -> bytes: |
|
279 | 279 | secret = config.get('rhodecode.encrypted_values.secret') |
|
280 | 280 | default = config['beaker.session.secret'] |
|
281 | 281 | enc_key = secret or default |
|
282 | 282 | |
|
283 | 283 | return safe_bytes(enc_key) |
|
284 | 284 | |
|
285 | 285 | |
|
286 | 286 | def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False): |
|
287 | 287 | """ |
|
288 | 288 | Turns a datetime into an age string. |
|
289 | 289 | If show_short_version is True, this generates a shorter string with |
|
290 | 290 | an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'. |
|
291 | 291 | |
|
292 | 292 | * IMPORTANT* |
|
293 | 293 | Code of this function is written in special way so it's easier to |
|
294 | 294 | backport it to javascript. If you mean to update it, please also update |
|
295 | 295 | `jquery.timeago-extension.js` file |
|
296 | 296 | |
|
297 | 297 | :param prevdate: datetime object |
|
298 | 298 | :param now: get current time, if not define we use |
|
299 | 299 | `datetime.datetime.now()` |
|
300 | 300 | :param show_short_version: if it should approximate the date and |
|
301 | 301 | return a shorter string |
|
302 | 302 | :param show_suffix: |
|
303 | 303 | :param short_format: show short format, eg 2D instead of 2 days |
|
304 | 304 | :rtype: unicode |
|
305 | 305 | :returns: unicode words describing age |
|
306 | 306 | """ |
|
307 | 307 | |
|
308 | 308 | def _get_relative_delta(now, prevdate): |
|
309 | 309 | base = dateutil.relativedelta.relativedelta(now, prevdate) |
|
310 | 310 | return { |
|
311 | 311 | 'year': base.years, |
|
312 | 312 | 'month': base.months, |
|
313 | 313 | 'day': base.days, |
|
314 | 314 | 'hour': base.hours, |
|
315 | 315 | 'minute': base.minutes, |
|
316 | 316 | 'second': base.seconds, |
|
317 | 317 | } |
|
318 | 318 | |
|
319 | 319 | def _is_leap_year(year): |
|
320 | 320 | return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) |
|
321 | 321 | |
|
322 | 322 | def get_month(prevdate): |
|
323 | 323 | return prevdate.month |
|
324 | 324 | |
|
325 | 325 | def get_year(prevdate): |
|
326 | 326 | return prevdate.year |
|
327 | 327 | |
|
328 | 328 | now = now or datetime.datetime.now() |
|
329 | 329 | order = ['year', 'month', 'day', 'hour', 'minute', 'second'] |
|
330 | 330 | deltas = {} |
|
331 | 331 | future = False |
|
332 | 332 | |
|
333 | 333 | if prevdate > now: |
|
334 | 334 | now_old = now |
|
335 | 335 | now = prevdate |
|
336 | 336 | prevdate = now_old |
|
337 | 337 | future = True |
|
338 | 338 | if future: |
|
339 | 339 | prevdate = prevdate.replace(microsecond=0) |
|
340 | 340 | # Get date parts deltas |
|
341 | 341 | for part in order: |
|
342 | 342 | rel_delta = _get_relative_delta(now, prevdate) |
|
343 | 343 | deltas[part] = rel_delta[part] |
|
344 | 344 | |
|
345 | 345 | # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00, |
|
346 | 346 | # not 1 hour, -59 minutes and -59 seconds) |
|
347 | 347 | offsets = [[5, 60], [4, 60], [3, 24]] |
|
348 | 348 | for element in offsets: # seconds, minutes, hours |
|
349 | 349 | num = element[0] |
|
350 | 350 | length = element[1] |
|
351 | 351 | |
|
352 | 352 | part = order[num] |
|
353 | 353 | carry_part = order[num - 1] |
|
354 | 354 | |
|
355 | 355 | if deltas[part] < 0: |
|
356 | 356 | deltas[part] += length |
|
357 | 357 | deltas[carry_part] -= 1 |
|
358 | 358 | |
|
359 | 359 | # Same thing for days except that the increment depends on the (variable) |
|
360 | 360 | # number of days in the month |
|
361 | 361 | month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] |
|
362 | 362 | if deltas['day'] < 0: |
|
363 | 363 | if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)): |
|
364 | 364 | deltas['day'] += 29 |
|
365 | 365 | else: |
|
366 | 366 | deltas['day'] += month_lengths[get_month(prevdate) - 1] |
|
367 | 367 | |
|
368 | 368 | deltas['month'] -= 1 |
|
369 | 369 | |
|
370 | 370 | if deltas['month'] < 0: |
|
371 | 371 | deltas['month'] += 12 |
|
372 | 372 | deltas['year'] -= 1 |
|
373 | 373 | |
|
374 | 374 | # Format the result |
|
375 | 375 | if short_format: |
|
376 | 376 | fmt_funcs = { |
|
377 | 377 | 'year': lambda d: '%dy' % d, |
|
378 | 378 | 'month': lambda d: '%dm' % d, |
|
379 | 379 | 'day': lambda d: '%dd' % d, |
|
380 | 380 | 'hour': lambda d: '%dh' % d, |
|
381 | 381 | 'minute': lambda d: '%dmin' % d, |
|
382 | 382 | 'second': lambda d: '%dsec' % d, |
|
383 | 383 | } |
|
384 | 384 | else: |
|
385 | 385 | fmt_funcs = { |
|
386 | 386 | 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(), |
|
387 | 387 | 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(), |
|
388 | 388 | 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(), |
|
389 | 389 | 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(), |
|
390 | 390 | 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(), |
|
391 | 391 | 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(), |
|
392 | 392 | } |
|
393 | 393 | |
|
394 | 394 | i = 0 |
|
395 | 395 | for part in order: |
|
396 | 396 | value = deltas[part] |
|
397 | 397 | if value != 0: |
|
398 | 398 | |
|
399 | 399 | if i < 5: |
|
400 | 400 | sub_part = order[i + 1] |
|
401 | 401 | sub_value = deltas[sub_part] |
|
402 | 402 | else: |
|
403 | 403 | sub_value = 0 |
|
404 | 404 | |
|
405 | 405 | if sub_value == 0 or show_short_version: |
|
406 | 406 | _val = fmt_funcs[part](value) |
|
407 | 407 | if future: |
|
408 | 408 | if show_suffix: |
|
409 | 409 | return _('in ${ago}', mapping={'ago': _val}) |
|
410 | 410 | else: |
|
411 | 411 | return _(_val) |
|
412 | 412 | |
|
413 | 413 | else: |
|
414 | 414 | if show_suffix: |
|
415 | 415 | return _('${ago} ago', mapping={'ago': _val}) |
|
416 | 416 | else: |
|
417 | 417 | return _(_val) |
|
418 | 418 | |
|
419 | 419 | val = fmt_funcs[part](value) |
|
420 | 420 | val_detail = fmt_funcs[sub_part](sub_value) |
|
421 | 421 | mapping = {'val': val, 'detail': val_detail} |
|
422 | 422 | |
|
423 | 423 | if short_format: |
|
424 | 424 | datetime_tmpl = _('${val}, ${detail}', mapping=mapping) |
|
425 | 425 | if show_suffix: |
|
426 | 426 | datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping) |
|
427 | 427 | if future: |
|
428 | 428 | datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping) |
|
429 | 429 | else: |
|
430 | 430 | datetime_tmpl = _('${val} and ${detail}', mapping=mapping) |
|
431 | 431 | if show_suffix: |
|
432 | 432 | datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping) |
|
433 | 433 | if future: |
|
434 | 434 | datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping) |
|
435 | 435 | |
|
436 | 436 | return datetime_tmpl |
|
437 | 437 | i += 1 |
|
438 | 438 | return _('just now') |
|
439 | 439 | |
|
440 | 440 | |
|
441 | 441 | def age_from_seconds(seconds): |
|
442 | 442 | seconds = safe_int(seconds) or 0 |
|
443 | 443 | prevdate = time_to_datetime(time.time() + seconds) |
|
444 | 444 | return age(prevdate, show_suffix=False, show_short_version=True) |
|
445 | 445 | |
|
446 | 446 | |
|
447 | 447 | def cleaned_uri(uri): |
|
448 | 448 | """ |
|
449 | 449 | Quotes '[' and ']' from uri if there is only one of them. |
|
450 | 450 | according to RFC3986 we cannot use such chars in uri |
|
451 | 451 | :param uri: |
|
452 | 452 | :return: uri without this chars |
|
453 | 453 | """ |
|
454 | 454 | return urllib.parse.quote(uri, safe='@$:/') |
|
455 | 455 | |
|
456 | 456 | |
|
457 | 457 | def credentials_filter(uri): |
|
458 | 458 | """ |
|
459 | 459 | Returns a url with removed credentials |
|
460 | 460 | |
|
461 | 461 | :param uri: |
|
462 | 462 | """ |
|
463 | 463 | import urlobject |
|
464 | 464 | if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue): |
|
465 | 465 | return 'InvalidDecryptionKey' |
|
466 | 466 | |
|
467 | 467 | url_obj = urlobject.URLObject(cleaned_uri(uri)) |
|
468 | 468 | url_obj = url_obj.without_password().without_username() |
|
469 | 469 | |
|
470 | 470 | return url_obj |
|
471 | 471 | |
|
472 | 472 | |
|
473 | 473 | def get_host_info(request): |
|
474 | 474 | """ |
|
475 | 475 | Generate host info, to obtain full url e.g https://server.com |
|
476 | 476 | use this |
|
477 | 477 | `{scheme}://{netloc}` |
|
478 | 478 | """ |
|
479 | 479 | if not request: |
|
480 | 480 | return {} |
|
481 | 481 | |
|
482 | 482 | qualified_home_url = request.route_url('home') |
|
483 | 483 | parsed_url = urlobject.URLObject(qualified_home_url) |
|
484 | 484 | decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/'))) |
|
485 | 485 | |
|
486 | 486 | return { |
|
487 | 487 | 'scheme': parsed_url.scheme, |
|
488 | 488 | 'netloc': parsed_url.netloc+decoded_path, |
|
489 | 489 | 'hostname': parsed_url.hostname, |
|
490 | 490 | } |
|
491 | 491 | |
|
492 | 492 | |
|
493 | 493 | def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override): |
|
494 | 494 | qualified_home_url = request.route_url('home') |
|
495 | 495 | parsed_url = urlobject.URLObject(qualified_home_url) |
|
496 | 496 | decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/'))) |
|
497 | 497 | |
|
498 | 498 | args = { |
|
499 | 499 | 'scheme': parsed_url.scheme, |
|
500 | 500 | 'user': '', |
|
501 | 501 | 'sys_user': getpass.getuser(), |
|
502 | 502 | # path if we use proxy-prefix |
|
503 | 503 | 'netloc': parsed_url.netloc+decoded_path, |
|
504 | 504 | 'hostname': parsed_url.hostname, |
|
505 | 505 | 'prefix': decoded_path, |
|
506 | 506 | 'repo': repo_name, |
|
507 | 507 | 'repoid': str(repo_id), |
|
508 | 508 | 'repo_type': repo_type |
|
509 | 509 | } |
|
510 | 510 | args.update(override) |
|
511 | 511 | args['user'] = urllib.parse.quote(safe_str(args['user'])) |
|
512 | 512 | |
|
513 | 513 | for k, v in list(args.items()): |
|
514 | 514 | tmpl_key = '{%s}' % k |
|
515 | 515 | uri_tmpl = uri_tmpl.replace(tmpl_key, v) |
|
516 | 516 | |
|
517 | 517 | # special case for SVN clone url |
|
518 | 518 | if repo_type == 'svn': |
|
519 | 519 | uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://') |
|
520 | 520 | |
|
521 | 521 | # remove leading @ sign if it's present. Case of empty user |
|
522 | 522 | url_obj = urlobject.URLObject(uri_tmpl) |
|
523 | 523 | url = url_obj.with_netloc(url_obj.netloc.lstrip('@')) |
|
524 | 524 | |
|
525 | 525 | return safe_str(url) |
|
526 | 526 | |
|
527 | 527 | |
|
528 | 528 | def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None, |
|
529 | 529 | maybe_unreachable=False, reference_obj=None): |
|
530 | 530 | """ |
|
531 | 531 | Safe version of get_commit if this commit doesn't exists for a |
|
532 | 532 | repository it returns a Dummy one instead |
|
533 | 533 | |
|
534 | 534 | :param repo: repository instance |
|
535 | 535 | :param commit_id: commit id as str |
|
536 | 536 | :param commit_idx: numeric commit index |
|
537 | 537 | :param pre_load: optional list of commit attributes to load |
|
538 | 538 | :param maybe_unreachable: translate unreachable commits on git repos |
|
539 | 539 | :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123" |
|
540 | 540 | """ |
|
541 | 541 | # TODO(skreft): remove these circular imports |
|
542 | 542 | from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit |
|
543 | 543 | from rhodecode.lib.vcs.exceptions import RepositoryError |
|
544 | 544 | if not isinstance(repo, BaseRepository): |
|
545 | 545 | raise Exception('You must pass an Repository ' |
|
546 | 546 | 'object as first argument got %s', type(repo)) |
|
547 | 547 | |
|
548 | 548 | try: |
|
549 | 549 | commit = repo.get_commit( |
|
550 | 550 | commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load, |
|
551 | 551 | maybe_unreachable=maybe_unreachable, reference_obj=reference_obj) |
|
552 | 552 | except (RepositoryError, LookupError): |
|
553 | 553 | commit = EmptyCommit() |
|
554 | 554 | return commit |
|
555 | 555 | |
|
556 | 556 | |
|
557 | 557 | def datetime_to_time(dt): |
|
558 | 558 | if dt: |
|
559 | 559 | return time.mktime(dt.timetuple()) |
|
560 | 560 | |
|
561 | 561 | |
|
562 | 562 | def time_to_datetime(tm): |
|
563 | 563 | if tm: |
|
564 | 564 | if isinstance(tm, str): |
|
565 | 565 | try: |
|
566 | 566 | tm = float(tm) |
|
567 | 567 | except ValueError: |
|
568 | 568 | return |
|
569 | 569 | return datetime.datetime.fromtimestamp(tm) |
|
570 | 570 | |
|
571 | 571 | |
|
572 | 572 | def time_to_utcdatetime(tm): |
|
573 | 573 | if tm: |
|
574 | 574 | if isinstance(tm, str): |
|
575 | 575 | try: |
|
576 | 576 | tm = float(tm) |
|
577 | 577 | except ValueError: |
|
578 | 578 | return |
|
579 | 579 | return datetime.datetime.utcfromtimestamp(tm) |
|
580 | 580 | |
|
581 | 581 | |
|
582 | 582 | MENTIONS_REGEX = re.compile( |
|
583 | 583 | # ^@ or @ without any special chars in front |
|
584 | 584 | r'(?:^@|[^a-zA-Z0-9\-\_\.]@)' |
|
585 | 585 | # main body starts with letter, then can be . - _ |
|
586 | 586 | r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)', |
|
587 | 587 | re.VERBOSE | re.MULTILINE) |
|
588 | 588 | |
|
589 | 589 | |
|
590 | 590 | def extract_mentioned_users(s): |
|
591 | 591 | """ |
|
592 | 592 | Returns unique usernames from given string s that have @mention |
|
593 | 593 | |
|
594 | 594 | :param s: string to get mentions |
|
595 | 595 | """ |
|
596 | 596 | usrs = set() |
|
597 | 597 | for username in MENTIONS_REGEX.findall(s): |
|
598 | 598 | usrs.add(username) |
|
599 | 599 | |
|
600 | 600 | return sorted(list(usrs), key=lambda k: k.lower()) |
|
601 | 601 | |
|
602 | 602 | |
|
603 | 603 | def fix_PATH(os_=None): |
|
604 | 604 | """ |
|
605 | 605 | Get current active python path, and append it to PATH variable to fix |
|
606 | 606 | issues of subprocess calls and different python versions |
|
607 | 607 | """ |
|
608 | 608 | if os_ is None: |
|
609 | 609 | import os |
|
610 | 610 | else: |
|
611 | 611 | os = os_ |
|
612 | 612 | |
|
613 | 613 | cur_path = os.path.split(sys.executable)[0] |
|
614 | 614 | os_path = os.environ['PATH'] |
|
615 | 615 | if not os.environ['PATH'].startswith(cur_path): |
|
616 | 616 | os.environ['PATH'] = f'{cur_path}:{os_path}' |
|
617 | 617 | |
|
618 | 618 | |
|
619 | 619 | def obfuscate_url_pw(engine): |
|
620 | 620 | _url = engine or '' |
|
621 | 621 | try: |
|
622 | 622 | _url = sqlalchemy.engine.url.make_url(engine) |
|
623 | 623 | except Exception: |
|
624 | 624 | pass |
|
625 | 625 | return repr(_url) |
|
626 | 626 | |
|
627 | 627 | |
|
628 | 628 | def get_server_url(environ): |
|
629 | 629 | req = webob.Request(environ) |
|
630 | 630 | return req.host_url + req.script_name |
|
631 | 631 | |
|
632 | 632 | |
|
633 | 633 | def unique_id(hexlen=32): |
|
634 | 634 | alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz" |
|
635 | 635 | return suuid(truncate_to=hexlen, alphabet=alphabet) |
|
636 | 636 | |
|
637 | 637 | |
|
638 | 638 | def suuid(url=None, truncate_to=22, alphabet=None): |
|
639 | 639 | """ |
|
640 | 640 | Generate and return a short URL safe UUID. |
|
641 | 641 | |
|
642 | 642 | If the url parameter is provided, set the namespace to the provided |
|
643 | 643 | URL and generate a UUID. |
|
644 | 644 | |
|
645 | 645 | :param url to get the uuid for |
|
646 | 646 | :truncate_to: truncate the basic 22 UUID to shorter version |
|
647 | 647 | |
|
648 | 648 | The IDs won't be universally unique any longer, but the probability of |
|
649 | 649 | a collision will still be very low. |
|
650 | 650 | """ |
|
651 | 651 | # Define our alphabet. |
|
652 | 652 | _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ" |
|
653 | 653 | |
|
654 | 654 | # If no URL is given, generate a random UUID. |
|
655 | 655 | if url is None: |
|
656 | 656 | unique_id = uuid.uuid4().int |
|
657 | 657 | else: |
|
658 | 658 | unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int |
|
659 | 659 | |
|
660 | 660 | alphabet_length = len(_ALPHABET) |
|
661 | 661 | output = [] |
|
662 | 662 | while unique_id > 0: |
|
663 | 663 | digit = unique_id % alphabet_length |
|
664 | 664 | output.append(_ALPHABET[digit]) |
|
665 | 665 | unique_id = int(unique_id / alphabet_length) |
|
666 | 666 | return "".join(output)[:truncate_to] |
|
667 | 667 | |
|
668 | 668 | |
|
669 | 669 | def get_current_rhodecode_user(request=None): |
|
670 | 670 | """ |
|
671 | 671 | Gets rhodecode user from request |
|
672 | 672 | """ |
|
673 | 673 | import pyramid.threadlocal |
|
674 | 674 | pyramid_request = request or pyramid.threadlocal.get_current_request() |
|
675 | 675 | |
|
676 | 676 | # web case |
|
677 | 677 | if pyramid_request and hasattr(pyramid_request, 'user'): |
|
678 | 678 | return pyramid_request.user |
|
679 | 679 | |
|
680 | 680 | # api case |
|
681 | 681 | if pyramid_request and hasattr(pyramid_request, 'rpc_user'): |
|
682 | 682 | return pyramid_request.rpc_user |
|
683 | 683 | |
|
684 | 684 | return None |
|
685 | 685 | |
|
686 | 686 | |
|
687 | 687 | def action_logger_generic(action, namespace=''): |
|
688 | 688 | """ |
|
689 | 689 | A generic logger for actions useful to the system overview, tries to find |
|
690 | 690 | an acting user for the context of the call otherwise reports unknown user |
|
691 | 691 | |
|
692 | 692 | :param action: logging message eg 'comment 5 deleted' |
|
693 | 693 | :param type: string |
|
694 | 694 | |
|
695 | 695 | :param namespace: namespace of the logging message eg. 'repo.comments' |
|
696 | 696 | :param type: string |
|
697 | 697 | |
|
698 | 698 | """ |
|
699 | 699 | |
|
700 | 700 | logger_name = 'rhodecode.actions' |
|
701 | 701 | |
|
702 | 702 | if namespace: |
|
703 | 703 | logger_name += '.' + namespace |
|
704 | 704 | |
|
705 | 705 | log = logging.getLogger(logger_name) |
|
706 | 706 | |
|
707 | 707 | # get a user if we can |
|
708 | 708 | user = get_current_rhodecode_user() |
|
709 | 709 | |
|
710 | 710 | logfunc = log.info |
|
711 | 711 | |
|
712 | 712 | if not user: |
|
713 | 713 | user = '<unknown user>' |
|
714 | 714 | logfunc = log.warning |
|
715 | 715 | |
|
716 | 716 | logfunc(f'Logging action by {user}: {action}') |
|
717 | 717 | |
|
718 | 718 | |
|
719 | 719 | def escape_split(text, sep=',', maxsplit=-1): |
|
720 | 720 | r""" |
|
721 | 721 | Allows for escaping of the separator: e.g. arg='foo\, bar' |
|
722 | 722 | |
|
723 | 723 | It should be noted that the way bash et. al. do command line parsing, those |
|
724 | 724 | single quotes are required. |
|
725 | 725 | """ |
|
726 | 726 | escaped_sep = r'\%s' % sep |
|
727 | 727 | |
|
728 | 728 | if escaped_sep not in text: |
|
729 | 729 | return text.split(sep, maxsplit) |
|
730 | 730 | |
|
731 | 731 | before, _mid, after = text.partition(escaped_sep) |
|
732 | 732 | startlist = before.split(sep, maxsplit) # a regular split is fine here |
|
733 | 733 | unfinished = startlist[-1] |
|
734 | 734 | startlist = startlist[:-1] |
|
735 | 735 | |
|
736 | 736 | # recurse because there may be more escaped separators |
|
737 | 737 | endlist = escape_split(after, sep, maxsplit) |
|
738 | 738 | |
|
739 | 739 | # finish building the escaped value. we use endlist[0] becaue the first |
|
740 | 740 | # part of the string sent in recursion is the rest of the escaped value. |
|
741 | 741 | unfinished += sep + endlist[0] |
|
742 | 742 | |
|
743 | 743 | return startlist + [unfinished] + endlist[1:] # put together all the parts |
|
744 | 744 | |
|
745 | 745 | |
|
746 | 746 | class OptionalAttr(object): |
|
747 | 747 | """ |
|
748 | 748 | Special Optional Option that defines other attribute. Example:: |
|
749 | 749 | |
|
750 | 750 | def test(apiuser, userid=Optional(OAttr('apiuser')): |
|
751 | 751 | user = Optional.extract(userid) |
|
752 | 752 | # calls |
|
753 | 753 | |
|
754 | 754 | """ |
|
755 | 755 | |
|
756 | 756 | def __init__(self, attr_name): |
|
757 | 757 | self.attr_name = attr_name |
|
758 | 758 | |
|
759 | 759 | def __repr__(self): |
|
760 | 760 | return '<OptionalAttr:%s>' % self.attr_name |
|
761 | 761 | |
|
762 | 762 | def __call__(self): |
|
763 | 763 | return self |
|
764 | 764 | |
|
765 | 765 | |
|
766 | 766 | # alias |
|
767 | 767 | OAttr = OptionalAttr |
|
768 | 768 | |
|
769 | 769 | |
|
770 | 770 | class Optional(object): |
|
771 | 771 | """ |
|
772 | 772 | Defines an optional parameter:: |
|
773 | 773 | |
|
774 | 774 | param = param.getval() if isinstance(param, Optional) else param |
|
775 | 775 | param = param() if isinstance(param, Optional) else param |
|
776 | 776 | |
|
777 | 777 | is equivalent of:: |
|
778 | 778 | |
|
779 | 779 | param = Optional.extract(param) |
|
780 | 780 | |
|
781 | 781 | """ |
|
782 | 782 | |
|
783 | 783 | def __init__(self, type_): |
|
784 | 784 | self.type_ = type_ |
|
785 | 785 | |
|
786 | 786 | def __repr__(self): |
|
787 | 787 | return '<Optional:%s>' % self.type_.__repr__() |
|
788 | 788 | |
|
789 | 789 | def __call__(self): |
|
790 | 790 | return self.getval() |
|
791 | 791 | |
|
792 | 792 | def getval(self): |
|
793 | 793 | """ |
|
794 | 794 | returns value from this Optional instance |
|
795 | 795 | """ |
|
796 | 796 | if isinstance(self.type_, OAttr): |
|
797 | 797 | # use params name |
|
798 | 798 | return self.type_.attr_name |
|
799 | 799 | return self.type_ |
|
800 | 800 | |
|
801 | 801 | @classmethod |
|
802 | 802 | def extract(cls, val): |
|
803 | 803 | """ |
|
804 | 804 | Extracts value from Optional() instance |
|
805 | 805 | |
|
806 | 806 | :param val: |
|
807 | 807 | :return: original value if it's not Optional instance else |
|
808 | 808 | value of instance |
|
809 | 809 | """ |
|
810 | 810 | if isinstance(val, cls): |
|
811 | 811 | return val.getval() |
|
812 | 812 | return val |
|
813 | 813 | |
|
814 | 814 | |
|
815 | 815 | def glob2re(pat): |
|
816 | 816 | import fnmatch |
|
817 | 817 | return fnmatch.translate(pat) |
|
818 | 818 | |
|
819 | 819 | |
|
820 | 820 | def parse_byte_string(size_str): |
|
821 | 821 | match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE) |
|
822 | 822 | if not match: |
|
823 | 823 | raise ValueError(f'Given size:{size_str} is invalid, please make sure ' |
|
824 | 824 | f'to use format of <num>(MB|KB)') |
|
825 | 825 | |
|
826 | 826 | _parts = match.groups() |
|
827 | 827 | num, type_ = _parts |
|
828 | 828 | return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()] |
|
829 | 829 | |
|
830 | 830 | |
|
831 | 831 | class CachedProperty(object): |
|
832 | 832 | """ |
|
833 | 833 | Lazy Attributes. With option to invalidate the cache by running a method |
|
834 | 834 | |
|
835 | 835 | >>> class Foo(object): |
|
836 | 836 | ... |
|
837 | 837 | ... @CachedProperty |
|
838 | 838 | ... def heavy_func(self): |
|
839 | 839 | ... return 'super-calculation' |
|
840 | 840 | ... |
|
841 | 841 | ... foo = Foo() |
|
842 | 842 | ... foo.heavy_func() # first computation |
|
843 | 843 | ... foo.heavy_func() # fetch from cache |
|
844 | 844 | ... foo._invalidate_prop_cache('heavy_func') |
|
845 | 845 | |
|
846 | 846 | # at this point calling foo.heavy_func() will be re-computed |
|
847 | 847 | """ |
|
848 | 848 | |
|
849 | 849 | def __init__(self, func, func_name=None): |
|
850 | 850 | |
|
851 | 851 | if func_name is None: |
|
852 | 852 | func_name = func.__name__ |
|
853 | 853 | self.data = (func, func_name) |
|
854 | 854 | functools.update_wrapper(self, func) |
|
855 | 855 | |
|
856 | 856 | def __get__(self, inst, class_): |
|
857 | 857 | if inst is None: |
|
858 | 858 | return self |
|
859 | 859 | |
|
860 | 860 | func, func_name = self.data |
|
861 | 861 | value = func(inst) |
|
862 | 862 | inst.__dict__[func_name] = value |
|
863 | 863 | if '_invalidate_prop_cache' not in inst.__dict__: |
|
864 | 864 | inst.__dict__['_invalidate_prop_cache'] = functools.partial( |
|
865 | 865 | self._invalidate_prop_cache, inst) |
|
866 | 866 | return value |
|
867 | 867 | |
|
868 | 868 | def _invalidate_prop_cache(self, inst, name): |
|
869 | 869 | inst.__dict__.pop(name, None) |
|
870 | 870 | |
|
871 | 871 | |
|
872 | 872 | def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True): |
|
873 | 873 | """ |
|
874 | 874 | Retry decorator with exponential backoff. |
|
875 | 875 | |
|
876 | 876 | Parameters |
|
877 | 877 | ---------- |
|
878 | 878 | func : typing.Callable, optional |
|
879 | 879 | Callable on which the decorator is applied, by default None |
|
880 | 880 | exception : Exception or tuple of Exceptions, optional |
|
881 | 881 | Exception(s) that invoke retry, by default Exception |
|
882 | 882 | n_tries : int, optional |
|
883 | 883 | Number of tries before giving up, by default 5 |
|
884 | 884 | delay : int, optional |
|
885 | 885 | Initial delay between retries in seconds, by default 5 |
|
886 | 886 | backoff : int, optional |
|
887 | 887 | Backoff multiplier e.g. value of 2 will double the delay, by default 1 |
|
888 | 888 | logger : bool, optional |
|
889 | 889 | Option to log or print, by default False |
|
890 | 890 | |
|
891 | 891 | Returns |
|
892 | 892 | ------- |
|
893 | 893 | typing.Callable |
|
894 | 894 | Decorated callable that calls itself when exception(s) occur. |
|
895 | 895 | |
|
896 | 896 | Examples |
|
897 | 897 | -------- |
|
898 | 898 | >>> import random |
|
899 | 899 | >>> @retry(exception=Exception, n_tries=3) |
|
900 | 900 | ... def test_random(text): |
|
901 | 901 | ... x = random.random() |
|
902 | 902 | ... if x < 0.5: |
|
903 | 903 | ... raise Exception("Fail") |
|
904 | 904 | ... else: |
|
905 | 905 | ... print("Success: ", text) |
|
906 | 906 | >>> test_random("It works!") |
|
907 | 907 | """ |
|
908 | 908 | |
|
909 | 909 | if func is None: |
|
910 | 910 | return functools.partial( |
|
911 | 911 | retry, |
|
912 | 912 | exception=exception, |
|
913 | 913 | n_tries=n_tries, |
|
914 | 914 | delay=delay, |
|
915 | 915 | backoff=backoff, |
|
916 | 916 | logger=logger, |
|
917 | 917 | ) |
|
918 | 918 | |
|
919 | 919 | @functools.wraps(func) |
|
920 | 920 | def wrapper(*args, **kwargs): |
|
921 | 921 | _n_tries, n_delay = n_tries, delay |
|
922 | 922 | log = logging.getLogger('rhodecode.retry') |
|
923 | 923 | |
|
924 | 924 | while _n_tries > 1: |
|
925 | 925 | try: |
|
926 | 926 | return func(*args, **kwargs) |
|
927 | 927 | except exception as e: |
|
928 | 928 | e_details = repr(e) |
|
929 | 929 | msg = "Exception on calling func {func}: {e}, " \ |
|
930 | 930 | "Retrying in {n_delay} seconds..."\ |
|
931 | 931 | .format(func=func, e=e_details, n_delay=n_delay) |
|
932 | 932 | if logger: |
|
933 | 933 | log.warning(msg) |
|
934 | 934 | else: |
|
935 | 935 | print(msg) |
|
936 | 936 | time.sleep(n_delay) |
|
937 | 937 | _n_tries -= 1 |
|
938 | 938 | n_delay *= backoff |
|
939 | 939 | |
|
940 | 940 | return func(*args, **kwargs) |
|
941 | 941 | |
|
942 | 942 | return wrapper |
|
943 | 943 | |
|
944 | 944 | |
|
945 | 945 | def user_agent_normalizer(user_agent_raw, safe=True): |
|
946 | 946 | log = logging.getLogger('rhodecode.user_agent_normalizer') |
|
947 | 947 | ua = (user_agent_raw or '').strip().lower() |
|
948 | 948 | ua = ua.replace('"', '') |
|
949 | 949 | |
|
950 | 950 | try: |
|
951 | 951 | if 'mercurial/proto-1.0' in ua: |
|
952 | 952 | ua = ua.replace('mercurial/proto-1.0', '') |
|
953 | 953 | ua = ua.replace('(', '').replace(')', '').strip() |
|
954 | 954 | ua = ua.replace('mercurial ', 'mercurial/') |
|
955 | 955 | elif ua.startswith('git'): |
|
956 | 956 | parts = ua.split(' ') |
|
957 | 957 | if parts: |
|
958 | 958 | ua = parts[0] |
|
959 | 959 | ua = re.sub(r'\.windows\.\d', '', ua).strip() |
|
960 | 960 | |
|
961 | 961 | return ua |
|
962 | 962 | except Exception: |
|
963 | 963 | log.exception('Failed to parse scm user-agent') |
|
964 | 964 | if not safe: |
|
965 | 965 | raise |
|
966 | 966 | |
|
967 | 967 | return ua |
|
968 | 968 | |
|
969 | 969 | |
|
970 | 970 | def get_available_port(min_port=40000, max_port=55555, use_range=False): |
|
971 | 971 | hostname = '' |
|
972 | for _ in range(min_port, max_port): | |
|
972 | for _check_port in range(min_port, max_port): | |
|
973 | 973 | pick_port = 0 |
|
974 | 974 | if use_range: |
|
975 | 975 | pick_port = random.randint(min_port, max_port) |
|
976 | 976 | |
|
977 | 977 | with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: |
|
978 | 978 | try: |
|
979 | 979 | s.bind((hostname, pick_port)) |
|
980 | 980 | s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) |
|
981 | 981 | return s.getsockname()[1] |
|
982 | except OSError: | |
|
983 | continue | |
|
984 | 982 | except socket.error as e: |
|
985 | 983 | if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]: |
|
986 | 984 | continue |
|
987 | 985 | raise |
|
986 | except OSError: | |
|
987 | continue |
@@ -1,1984 +1,1984 b'' | |||
|
1 | 1 | # Copyright (C) 2014-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | """ |
|
20 | 20 | Base module for all VCS systems |
|
21 | 21 | """ |
|
22 | 22 | import os |
|
23 | 23 | import re |
|
24 | 24 | import time |
|
25 | 25 | import shutil |
|
26 | 26 | import datetime |
|
27 | 27 | import fnmatch |
|
28 | 28 | import itertools |
|
29 | 29 | import logging |
|
30 | 30 | import dataclasses |
|
31 | 31 | import warnings |
|
32 | 32 | |
|
33 | 33 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
34 | 34 | |
|
35 | 35 | |
|
36 | 36 | import rhodecode |
|
37 | 37 | from rhodecode.translation import lazy_ugettext |
|
38 | 38 | from rhodecode.lib.utils2 import safe_str, CachedProperty |
|
39 | 39 | from rhodecode.lib.vcs.utils import author_name, author_email |
|
40 | 40 | from rhodecode.lib.vcs.conf import settings |
|
41 | 41 | from rhodecode.lib.vcs.exceptions import ( |
|
42 | 42 | CommitError, EmptyRepositoryError, NodeAlreadyAddedError, |
|
43 | 43 | NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError, |
|
44 | 44 | NodeDoesNotExistError, NodeNotChangedError, VCSError, |
|
45 | 45 | ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError, |
|
46 | 46 | RepositoryError) |
|
47 | 47 | |
|
48 | 48 | |
|
49 | 49 | log = logging.getLogger(__name__) |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | FILEMODE_DEFAULT = 0o100644 |
|
53 | 53 | FILEMODE_EXECUTABLE = 0o100755 |
|
54 | 54 | EMPTY_COMMIT_ID = '0' * 40 |
|
55 | 55 | |
|
56 | 56 | |
|
57 | 57 | @dataclasses.dataclass |
|
58 | 58 | class Reference: |
|
59 | 59 | type: str |
|
60 | 60 | name: str |
|
61 | 61 | commit_id: str |
|
62 | 62 | |
|
63 | 63 | def __iter__(self): |
|
64 | 64 | yield self.type |
|
65 | 65 | yield self.name |
|
66 | 66 | yield self.commit_id |
|
67 | 67 | |
|
68 | 68 | @property |
|
69 | 69 | def branch(self): |
|
70 | 70 | if self.type == 'branch': |
|
71 | 71 | return self.name |
|
72 | 72 | |
|
73 | 73 | @property |
|
74 | 74 | def bookmark(self): |
|
75 | 75 | if self.type == 'book': |
|
76 | 76 | return self.name |
|
77 | 77 | |
|
78 | 78 | @property |
|
79 | 79 | def to_str(self): |
|
80 | 80 | return reference_to_unicode(self) |
|
81 | 81 | |
|
82 | 82 | def asdict(self): |
|
83 | 83 | return dict( |
|
84 | 84 | type=self.type, |
|
85 | 85 | name=self.name, |
|
86 | 86 | commit_id=self.commit_id |
|
87 | 87 | ) |
|
88 | 88 | |
|
89 | 89 | |
|
90 | 90 | def unicode_to_reference(raw: str): |
|
91 | 91 | """ |
|
92 | 92 | Convert a unicode (or string) to a reference object. |
|
93 | 93 | If unicode evaluates to False it returns None. |
|
94 | 94 | """ |
|
95 | 95 | if raw: |
|
96 | 96 | refs = raw.split(':') |
|
97 | 97 | return Reference(*refs) |
|
98 | 98 | else: |
|
99 | 99 | return None |
|
100 | 100 | |
|
101 | 101 | |
|
102 | 102 | def reference_to_unicode(ref: Reference): |
|
103 | 103 | """ |
|
104 | 104 | Convert a reference object to unicode. |
|
105 | 105 | If reference is None it returns None. |
|
106 | 106 | """ |
|
107 | 107 | if ref: |
|
108 | 108 | return ':'.join(ref) |
|
109 | 109 | else: |
|
110 | 110 | return None |
|
111 | 111 | |
|
112 | 112 | |
|
113 | 113 | class MergeFailureReason(object): |
|
114 | 114 | """ |
|
115 | 115 | Enumeration with all the reasons why the server side merge could fail. |
|
116 | 116 | |
|
117 | 117 | DO NOT change the number of the reasons, as they may be stored in the |
|
118 | 118 | database. |
|
119 | 119 | |
|
120 | 120 | Changing the name of a reason is acceptable and encouraged to deprecate old |
|
121 | 121 | reasons. |
|
122 | 122 | """ |
|
123 | 123 | |
|
124 | 124 | # Everything went well. |
|
125 | 125 | NONE = 0 |
|
126 | 126 | |
|
127 | 127 | # An unexpected exception was raised. Check the logs for more details. |
|
128 | 128 | UNKNOWN = 1 |
|
129 | 129 | |
|
130 | 130 | # The merge was not successful, there are conflicts. |
|
131 | 131 | MERGE_FAILED = 2 |
|
132 | 132 | |
|
133 | 133 | # The merge succeeded but we could not push it to the target repository. |
|
134 | 134 | PUSH_FAILED = 3 |
|
135 | 135 | |
|
136 | 136 | # The specified target is not a head in the target repository. |
|
137 | 137 | TARGET_IS_NOT_HEAD = 4 |
|
138 | 138 | |
|
139 | 139 | # The source repository contains more branches than the target. Pushing |
|
140 | 140 | # the merge will create additional branches in the target. |
|
141 | 141 | HG_SOURCE_HAS_MORE_BRANCHES = 5 |
|
142 | 142 | |
|
143 | 143 | # The target reference has multiple heads. That does not allow to correctly |
|
144 | 144 | # identify the target location. This could only happen for mercurial |
|
145 | 145 | # branches. |
|
146 | 146 | HG_TARGET_HAS_MULTIPLE_HEADS = 6 |
|
147 | 147 | |
|
148 | 148 | # The target repository is locked |
|
149 | 149 | TARGET_IS_LOCKED = 7 |
|
150 | 150 | |
|
151 | 151 | # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead. |
|
152 | 152 | # A involved commit could not be found. |
|
153 | 153 | _DEPRECATED_MISSING_COMMIT = 8 |
|
154 | 154 | |
|
155 | 155 | # The target repo reference is missing. |
|
156 | 156 | MISSING_TARGET_REF = 9 |
|
157 | 157 | |
|
158 | 158 | # The source repo reference is missing. |
|
159 | 159 | MISSING_SOURCE_REF = 10 |
|
160 | 160 | |
|
161 | 161 | # The merge was not successful, there are conflicts related to sub |
|
162 | 162 | # repositories. |
|
163 | 163 | SUBREPO_MERGE_FAILED = 11 |
|
164 | 164 | |
|
165 | 165 | |
|
166 | 166 | class UpdateFailureReason(object): |
|
167 | 167 | """ |
|
168 | 168 | Enumeration with all the reasons why the pull request update could fail. |
|
169 | 169 | |
|
170 | 170 | DO NOT change the number of the reasons, as they may be stored in the |
|
171 | 171 | database. |
|
172 | 172 | |
|
173 | 173 | Changing the name of a reason is acceptable and encouraged to deprecate old |
|
174 | 174 | reasons. |
|
175 | 175 | """ |
|
176 | 176 | |
|
177 | 177 | # Everything went well. |
|
178 | 178 | NONE = 0 |
|
179 | 179 | |
|
180 | 180 | # An unexpected exception was raised. Check the logs for more details. |
|
181 | 181 | UNKNOWN = 1 |
|
182 | 182 | |
|
183 | 183 | # The pull request is up to date. |
|
184 | 184 | NO_CHANGE = 2 |
|
185 | 185 | |
|
186 | 186 | # The pull request has a reference type that is not supported for update. |
|
187 | 187 | WRONG_REF_TYPE = 3 |
|
188 | 188 | |
|
189 | 189 | # Update failed because the target reference is missing. |
|
190 | 190 | MISSING_TARGET_REF = 4 |
|
191 | 191 | |
|
192 | 192 | # Update failed because the source reference is missing. |
|
193 | 193 | MISSING_SOURCE_REF = 5 |
|
194 | 194 | |
|
195 | 195 | |
|
196 | 196 | class MergeResponse(object): |
|
197 | 197 | |
|
198 | 198 | # uses .format(**metadata) for variables |
|
199 | 199 | MERGE_STATUS_MESSAGES = { |
|
200 | 200 | MergeFailureReason.NONE: lazy_ugettext( |
|
201 | 201 | 'This pull request can be automatically merged.'), |
|
202 | 202 | MergeFailureReason.UNKNOWN: lazy_ugettext( |
|
203 | 203 | 'This pull request cannot be merged because of an unhandled exception. ' |
|
204 | 204 | '{exception}'), |
|
205 | 205 | MergeFailureReason.MERGE_FAILED: lazy_ugettext( |
|
206 | 206 | 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'), |
|
207 | 207 | MergeFailureReason.PUSH_FAILED: lazy_ugettext( |
|
208 | 208 | 'This pull request could not be merged because push to ' |
|
209 | 209 | 'target:`{target}@{merge_commit}` failed.'), |
|
210 | 210 | MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext( |
|
211 | 211 | 'This pull request cannot be merged because the target ' |
|
212 | 212 | '`{target_ref.name}` is not a head.'), |
|
213 | 213 | MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext( |
|
214 | 214 | 'This pull request cannot be merged because the source contains ' |
|
215 | 215 | 'more branches than the target.'), |
|
216 | 216 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext( |
|
217 | 217 | 'This pull request cannot be merged because the target `{target_ref.name}` ' |
|
218 | 218 | 'has multiple heads: `{heads}`.'), |
|
219 | 219 | MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext( |
|
220 | 220 | 'This pull request cannot be merged because the target repository is ' |
|
221 | 221 | 'locked by {locked_by}.'), |
|
222 | 222 | |
|
223 | 223 | MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext( |
|
224 | 224 | 'This pull request cannot be merged because the target ' |
|
225 | 225 | 'reference `{target_ref.name}` is missing.'), |
|
226 | 226 | MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext( |
|
227 | 227 | 'This pull request cannot be merged because the source ' |
|
228 | 228 | 'reference `{source_ref.name}` is missing.'), |
|
229 | 229 | MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext( |
|
230 | 230 | 'This pull request cannot be merged because of conflicts related ' |
|
231 | 231 | 'to sub repositories.'), |
|
232 | 232 | |
|
233 | 233 | # Deprecations |
|
234 | 234 | MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext( |
|
235 | 235 | 'This pull request cannot be merged because the target or the ' |
|
236 | 236 | 'source reference is missing.'), |
|
237 | 237 | |
|
238 | 238 | } |
|
239 | 239 | |
|
240 | def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None): | |
|
240 | def __init__(self, possible, executed, merge_ref: Reference, failure_reason, metadata=None): | |
|
241 | 241 | self.possible = possible |
|
242 | 242 | self.executed = executed |
|
243 | 243 | self.merge_ref = merge_ref |
|
244 | 244 | self.failure_reason = failure_reason |
|
245 | 245 | self.metadata = metadata or {} |
|
246 | 246 | |
|
247 | 247 | def __repr__(self): |
|
248 | 248 | return f'<MergeResponse:{self.label} {self.failure_reason}>' |
|
249 | 249 | |
|
250 | 250 | def __eq__(self, other): |
|
251 | 251 | same_instance = isinstance(other, self.__class__) |
|
252 | 252 | return same_instance \ |
|
253 | 253 | and self.possible == other.possible \ |
|
254 | 254 | and self.executed == other.executed \ |
|
255 | 255 | and self.failure_reason == other.failure_reason |
|
256 | 256 | |
|
257 | 257 | @property |
|
258 | 258 | def label(self): |
|
259 | 259 | label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if |
|
260 | 260 | not k.startswith('_')) |
|
261 | 261 | return label_dict.get(self.failure_reason) |
|
262 | 262 | |
|
263 | 263 | @property |
|
264 | 264 | def merge_status_message(self): |
|
265 | 265 | """ |
|
266 | 266 | Return a human friendly error message for the given merge status code. |
|
267 | 267 | """ |
|
268 | 268 | msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason]) |
|
269 | 269 | |
|
270 | 270 | try: |
|
271 | 271 | return msg.format(**self.metadata) |
|
272 | 272 | except Exception: |
|
273 | 273 | log.exception('Failed to format %s message', self) |
|
274 | 274 | return msg |
|
275 | 275 | |
|
276 | 276 | def asdict(self): |
|
277 | 277 | data = {} |
|
278 | 278 | for k in ['possible', 'executed', 'merge_ref', 'failure_reason', |
|
279 | 279 | 'merge_status_message']: |
|
280 | 280 | data[k] = getattr(self, k) |
|
281 | 281 | return data |
|
282 | 282 | |
|
283 | 283 | |
|
284 | 284 | class TargetRefMissing(ValueError): |
|
285 | 285 | pass |
|
286 | 286 | |
|
287 | 287 | |
|
288 | 288 | class SourceRefMissing(ValueError): |
|
289 | 289 | pass |
|
290 | 290 | |
|
291 | 291 | |
|
292 | 292 | class BaseRepository(object): |
|
293 | 293 | """ |
|
294 | 294 | Base Repository for final backends |
|
295 | 295 | |
|
296 | 296 | .. attribute:: DEFAULT_BRANCH_NAME |
|
297 | 297 | |
|
298 | 298 | name of default branch (i.e. "trunk" for svn, "master" for git etc. |
|
299 | 299 | |
|
300 | 300 | .. attribute:: commit_ids |
|
301 | 301 | |
|
302 | 302 | list of all available commit ids, in ascending order |
|
303 | 303 | |
|
304 | 304 | .. attribute:: path |
|
305 | 305 | |
|
306 | 306 | absolute path to the repository |
|
307 | 307 | |
|
308 | 308 | .. attribute:: bookmarks |
|
309 | 309 | |
|
310 | 310 | Mapping from name to :term:`Commit ID` of the bookmark. Empty in case |
|
311 | 311 | there are no bookmarks or the backend implementation does not support |
|
312 | 312 | bookmarks. |
|
313 | 313 | |
|
314 | 314 | .. attribute:: tags |
|
315 | 315 | |
|
316 | 316 | Mapping from name to :term:`Commit ID` of the tag. |
|
317 | 317 | |
|
318 | 318 | """ |
|
319 | 319 | |
|
320 | 320 | DEFAULT_BRANCH_NAME = None |
|
321 | 321 | DEFAULT_CONTACT = "Unknown" |
|
322 | 322 | DEFAULT_DESCRIPTION = "unknown" |
|
323 | 323 | EMPTY_COMMIT_ID = '0' * 40 |
|
324 | 324 | COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}') |
|
325 | 325 | |
|
326 | 326 | path = None |
|
327 | 327 | |
|
328 | 328 | _is_empty = None |
|
329 | 329 | _commit_ids = {} |
|
330 | 330 | |
|
331 | 331 | def __init__(self, repo_path, config=None, create=False, **kwargs): |
|
332 | 332 | """ |
|
333 | 333 | Initializes repository. Raises RepositoryError if repository could |
|
334 | 334 | not be find at the given ``repo_path`` or directory at ``repo_path`` |
|
335 | 335 | exists and ``create`` is set to True. |
|
336 | 336 | |
|
337 | 337 | :param repo_path: local path of the repository |
|
338 | 338 | :param config: repository configuration |
|
339 | 339 | :param create=False: if set to True, would try to create repository. |
|
340 | 340 | :param src_url=None: if set, should be proper url from which repository |
|
341 | 341 | would be cloned; requires ``create`` parameter to be set to True - |
|
342 | 342 | raises RepositoryError if src_url is set and create evaluates to |
|
343 | 343 | False |
|
344 | 344 | """ |
|
345 | 345 | raise NotImplementedError |
|
346 | 346 | |
|
347 | 347 | def __repr__(self): |
|
348 | 348 | return f'<{self.__class__.__name__} at {self.path}>' |
|
349 | 349 | |
|
350 | 350 | def __len__(self): |
|
351 | 351 | return self.count() |
|
352 | 352 | |
|
353 | 353 | def __eq__(self, other): |
|
354 | 354 | same_instance = isinstance(other, self.__class__) |
|
355 | 355 | return same_instance and other.path == self.path |
|
356 | 356 | |
|
357 | 357 | def __ne__(self, other): |
|
358 | 358 | return not self.__eq__(other) |
|
359 | 359 | |
|
360 | 360 | def get_create_shadow_cache_pr_path(self, db_repo): |
|
361 | 361 | path = db_repo.cached_diffs_dir |
|
362 | 362 | if not os.path.exists(path): |
|
363 | 363 | os.makedirs(path, 0o755) |
|
364 | 364 | return path |
|
365 | 365 | |
|
366 | 366 | @classmethod |
|
367 | 367 | def get_default_config(cls, default=None): |
|
368 | 368 | config = Config() |
|
369 | 369 | if default and isinstance(default, list): |
|
370 | 370 | for section, key, val in default: |
|
371 | 371 | config.set(section, key, val) |
|
372 | 372 | return config |
|
373 | 373 | |
|
374 | 374 | @LazyProperty |
|
375 | 375 | def _remote(self): |
|
376 | 376 | raise NotImplementedError |
|
377 | 377 | |
|
378 | 378 | def _heads(self, branch=None): |
|
379 | 379 | return [] |
|
380 | 380 | |
|
381 | 381 | @LazyProperty |
|
382 | 382 | def EMPTY_COMMIT(self): |
|
383 | 383 | return EmptyCommit(self.EMPTY_COMMIT_ID) |
|
384 | 384 | |
|
385 | 385 | @LazyProperty |
|
386 | 386 | def alias(self): |
|
387 | 387 | for k, v in settings.BACKENDS.items(): |
|
388 | 388 | if v.split('.')[-1] == str(self.__class__.__name__): |
|
389 | 389 | return k |
|
390 | 390 | |
|
391 | 391 | @LazyProperty |
|
392 | 392 | def name(self): |
|
393 | 393 | return safe_str(os.path.basename(self.path)) |
|
394 | 394 | |
|
395 | 395 | @LazyProperty |
|
396 | 396 | def description(self): |
|
397 | 397 | raise NotImplementedError |
|
398 | 398 | |
|
399 | 399 | def refs(self): |
|
400 | 400 | """ |
|
401 | 401 | returns a `dict` with branches, bookmarks, tags, and closed_branches |
|
402 | 402 | for this repository |
|
403 | 403 | """ |
|
404 | 404 | return dict( |
|
405 | 405 | branches=self.branches, |
|
406 | 406 | branches_closed=self.branches_closed, |
|
407 | 407 | tags=self.tags, |
|
408 | 408 | bookmarks=self.bookmarks |
|
409 | 409 | ) |
|
410 | 410 | |
|
411 | 411 | @LazyProperty |
|
412 | 412 | def branches(self): |
|
413 | 413 | """ |
|
414 | 414 | A `dict` which maps branch names to commit ids. |
|
415 | 415 | """ |
|
416 | 416 | raise NotImplementedError |
|
417 | 417 | |
|
418 | 418 | @LazyProperty |
|
419 | 419 | def branches_closed(self): |
|
420 | 420 | """ |
|
421 | 421 | A `dict` which maps tags names to commit ids. |
|
422 | 422 | """ |
|
423 | 423 | raise NotImplementedError |
|
424 | 424 | |
|
425 | 425 | @LazyProperty |
|
426 | 426 | def bookmarks(self): |
|
427 | 427 | """ |
|
428 | 428 | A `dict` which maps tags names to commit ids. |
|
429 | 429 | """ |
|
430 | 430 | raise NotImplementedError |
|
431 | 431 | |
|
432 | 432 | @LazyProperty |
|
433 | 433 | def tags(self): |
|
434 | 434 | """ |
|
435 | 435 | A `dict` which maps tags names to commit ids. |
|
436 | 436 | """ |
|
437 | 437 | raise NotImplementedError |
|
438 | 438 | |
|
439 | 439 | @LazyProperty |
|
440 | 440 | def size(self): |
|
441 | 441 | """ |
|
442 | 442 | Returns combined size in bytes for all repository files |
|
443 | 443 | """ |
|
444 | 444 | tip = self.get_commit() |
|
445 | 445 | return tip.size |
|
446 | 446 | |
|
447 | 447 | def size_at_commit(self, commit_id): |
|
448 | 448 | commit = self.get_commit(commit_id) |
|
449 | 449 | return commit.size |
|
450 | 450 | |
|
451 | 451 | def _check_for_empty(self): |
|
452 | 452 | no_commits = len(self._commit_ids) == 0 |
|
453 | 453 | if no_commits: |
|
454 | 454 | # check on remote to be sure |
|
455 | 455 | return self._remote.is_empty() |
|
456 | 456 | else: |
|
457 | 457 | return False |
|
458 | 458 | |
|
459 | 459 | def is_empty(self): |
|
460 | 460 | if rhodecode.is_test: |
|
461 | 461 | return self._check_for_empty() |
|
462 | 462 | |
|
463 | 463 | if self._is_empty is None: |
|
464 | 464 | # cache empty for production, but not tests |
|
465 | 465 | self._is_empty = self._check_for_empty() |
|
466 | 466 | |
|
467 | 467 | return self._is_empty |
|
468 | 468 | |
|
469 | 469 | @staticmethod |
|
470 | 470 | def check_url(url, config): |
|
471 | 471 | """ |
|
472 | 472 | Function will check given url and try to verify if it's a valid |
|
473 | 473 | link. |
|
474 | 474 | """ |
|
475 | 475 | raise NotImplementedError |
|
476 | 476 | |
|
477 | 477 | @staticmethod |
|
478 | 478 | def is_valid_repository(path): |
|
479 | 479 | """ |
|
480 | 480 | Check if given `path` contains a valid repository of this backend |
|
481 | 481 | """ |
|
482 | 482 | raise NotImplementedError |
|
483 | 483 | |
|
484 | 484 | # ========================================================================== |
|
485 | 485 | # COMMITS |
|
486 | 486 | # ========================================================================== |
|
487 | 487 | |
|
488 | 488 | @CachedProperty |
|
489 | 489 | def commit_ids(self): |
|
490 | 490 | raise NotImplementedError |
|
491 | 491 | |
|
492 | 492 | def append_commit_id(self, commit_id): |
|
493 | 493 | if commit_id not in self.commit_ids: |
|
494 | 494 | self._rebuild_cache(self.commit_ids + [commit_id]) |
|
495 | 495 | |
|
496 | 496 | # clear cache |
|
497 | 497 | self._invalidate_prop_cache('commit_ids') |
|
498 | 498 | self._is_empty = False |
|
499 | 499 | |
|
500 | 500 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, |
|
501 | 501 | translate_tag=None, maybe_unreachable=False, reference_obj=None): |
|
502 | 502 | """ |
|
503 | 503 | Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx` |
|
504 | 504 | are both None, most recent commit is returned. |
|
505 | 505 | |
|
506 | 506 | :param pre_load: Optional. List of commit attributes to load. |
|
507 | 507 | |
|
508 | 508 | :raises ``EmptyRepositoryError``: if there are no commits |
|
509 | 509 | """ |
|
510 | 510 | raise NotImplementedError |
|
511 | 511 | |
|
512 | 512 | def __iter__(self): |
|
513 | 513 | for commit_id in self.commit_ids: |
|
514 | 514 | yield self.get_commit(commit_id=commit_id) |
|
515 | 515 | |
|
516 | 516 | def get_commits( |
|
517 | 517 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
518 | 518 | branch_name=None, show_hidden=False, pre_load=None, translate_tags=None): |
|
519 | 519 | """ |
|
520 | 520 | Returns iterator of `BaseCommit` objects from start to end |
|
521 | 521 | not inclusive. This should behave just like a list, ie. end is not |
|
522 | 522 | inclusive. |
|
523 | 523 | |
|
524 | 524 | :param start_id: None or str, must be a valid commit id |
|
525 | 525 | :param end_id: None or str, must be a valid commit id |
|
526 | 526 | :param start_date: |
|
527 | 527 | :param end_date: |
|
528 | 528 | :param branch_name: |
|
529 | 529 | :param show_hidden: |
|
530 | 530 | :param pre_load: |
|
531 | 531 | :param translate_tags: |
|
532 | 532 | """ |
|
533 | 533 | raise NotImplementedError |
|
534 | 534 | |
|
535 | 535 | def __getitem__(self, key): |
|
536 | 536 | """ |
|
537 | 537 | Allows index based access to the commit objects of this repository. |
|
538 | 538 | """ |
|
539 | 539 | pre_load = ["author", "branch", "date", "message", "parents"] |
|
540 | 540 | if isinstance(key, slice): |
|
541 | 541 | return self._get_range(key, pre_load) |
|
542 | 542 | return self.get_commit(commit_idx=key, pre_load=pre_load) |
|
543 | 543 | |
|
544 | 544 | def _get_range(self, slice_obj, pre_load): |
|
545 | 545 | for commit_id in self.commit_ids.__getitem__(slice_obj): |
|
546 | 546 | yield self.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
547 | 547 | |
|
548 | 548 | def count(self): |
|
549 | 549 | return len(self.commit_ids) |
|
550 | 550 | |
|
551 | 551 | def tag(self, name, user, commit_id=None, message=None, date=None, **opts): |
|
552 | 552 | """ |
|
553 | 553 | Creates and returns a tag for the given ``commit_id``. |
|
554 | 554 | |
|
555 | 555 | :param name: name for new tag |
|
556 | 556 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
557 | 557 | :param commit_id: commit id for which new tag would be created |
|
558 | 558 | :param message: message of the tag's commit |
|
559 | 559 | :param date: date of tag's commit |
|
560 | 560 | |
|
561 | 561 | :raises TagAlreadyExistError: if tag with same name already exists |
|
562 | 562 | """ |
|
563 | 563 | raise NotImplementedError |
|
564 | 564 | |
|
565 | 565 | def remove_tag(self, name, user, message=None, date=None): |
|
566 | 566 | """ |
|
567 | 567 | Removes tag with the given ``name``. |
|
568 | 568 | |
|
569 | 569 | :param name: name of the tag to be removed |
|
570 | 570 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
571 | 571 | :param message: message of the tag's removal commit |
|
572 | 572 | :param date: date of tag's removal commit |
|
573 | 573 | |
|
574 | 574 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
575 | 575 | """ |
|
576 | 576 | raise NotImplementedError |
|
577 | 577 | |
|
578 | 578 | def get_diff( |
|
579 | 579 | self, commit1, commit2, path=None, ignore_whitespace=False, |
|
580 | 580 | context=3, path1=None): |
|
581 | 581 | """ |
|
582 | 582 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
583 | 583 | `commit2` since `commit1`. |
|
584 | 584 | |
|
585 | 585 | :param commit1: Entry point from which diff is shown. Can be |
|
586 | 586 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
587 | 587 | the changes since empty state of the repository until `commit2` |
|
588 | 588 | :param commit2: Until which commit changes should be shown. |
|
589 | 589 | :param path: Can be set to a path of a file to create a diff of that |
|
590 | 590 | file. If `path1` is also set, this value is only associated to |
|
591 | 591 | `commit2`. |
|
592 | 592 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
593 | 593 | changes. Defaults to ``False``. |
|
594 | 594 | :param context: How many lines before/after changed lines should be |
|
595 | 595 | shown. Defaults to ``3``. |
|
596 | 596 | :param path1: Can be set to a path to associate with `commit1`. This |
|
597 | 597 | parameter works only for backends which support diff generation for |
|
598 | 598 | different paths. Other backends will raise a `ValueError` if `path1` |
|
599 | 599 | is set and has a different value than `path`. |
|
600 | 600 | :param file_path: filter this diff by given path pattern |
|
601 | 601 | """ |
|
602 | 602 | raise NotImplementedError |
|
603 | 603 | |
|
604 | 604 | def strip(self, commit_id, branch=None): |
|
605 | 605 | """ |
|
606 | 606 | Strip given commit_id from the repository |
|
607 | 607 | """ |
|
608 | 608 | raise NotImplementedError |
|
609 | 609 | |
|
610 | 610 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
611 | 611 | """ |
|
612 | 612 | Return a latest common ancestor commit if one exists for this repo |
|
613 | 613 | `commit_id1` vs `commit_id2` from `repo2`. |
|
614 | 614 | |
|
615 | 615 | :param commit_id1: Commit it from this repository to use as a |
|
616 | 616 | target for the comparison. |
|
617 | 617 | :param commit_id2: Source commit id to use for comparison. |
|
618 | 618 | :param repo2: Source repository to use for comparison. |
|
619 | 619 | """ |
|
620 | 620 | raise NotImplementedError |
|
621 | 621 | |
|
622 | 622 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
623 | 623 | """ |
|
624 | 624 | Compare this repository's revision `commit_id1` with `commit_id2`. |
|
625 | 625 | |
|
626 | 626 | Returns a tuple(commits, ancestor) that would be merged from |
|
627 | 627 | `commit_id2`. Doing a normal compare (``merge=False``), ``None`` |
|
628 | 628 | will be returned as ancestor. |
|
629 | 629 | |
|
630 | 630 | :param commit_id1: Commit it from this repository to use as a |
|
631 | 631 | target for the comparison. |
|
632 | 632 | :param commit_id2: Source commit id to use for comparison. |
|
633 | 633 | :param repo2: Source repository to use for comparison. |
|
634 | 634 | :param merge: If set to ``True`` will do a merge compare which also |
|
635 | 635 | returns the common ancestor. |
|
636 | 636 | :param pre_load: Optional. List of commit attributes to load. |
|
637 | 637 | """ |
|
638 | 638 | raise NotImplementedError |
|
639 | 639 | |
|
640 | 640 | def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref, |
|
641 | 641 | user_name='', user_email='', message='', dry_run=False, |
|
642 | 642 | use_rebase=False, close_branch=False): |
|
643 | 643 | """ |
|
644 | 644 | Merge the revisions specified in `source_ref` from `source_repo` |
|
645 | 645 | onto the `target_ref` of this repository. |
|
646 | 646 | |
|
647 | 647 | `source_ref` and `target_ref` are named tupls with the following |
|
648 | 648 | fields `type`, `name` and `commit_id`. |
|
649 | 649 | |
|
650 | 650 | Returns a MergeResponse named tuple with the following fields |
|
651 | 651 | 'possible', 'executed', 'source_commit', 'target_commit', |
|
652 | 652 | 'merge_commit'. |
|
653 | 653 | |
|
654 | 654 | :param repo_id: `repo_id` target repo id. |
|
655 | 655 | :param workspace_id: `workspace_id` unique identifier. |
|
656 | 656 | :param target_ref: `target_ref` points to the commit on top of which |
|
657 | 657 | the `source_ref` should be merged. |
|
658 | 658 | :param source_repo: The repository that contains the commits to be |
|
659 | 659 | merged. |
|
660 | 660 | :param source_ref: `source_ref` points to the topmost commit from |
|
661 | 661 | the `source_repo` which should be merged. |
|
662 | 662 | :param user_name: Merge commit `user_name`. |
|
663 | 663 | :param user_email: Merge commit `user_email`. |
|
664 | 664 | :param message: Merge commit `message`. |
|
665 | 665 | :param dry_run: If `True` the merge will not take place. |
|
666 | 666 | :param use_rebase: If `True` commits from the source will be rebased |
|
667 | 667 | on top of the target instead of being merged. |
|
668 | 668 | :param close_branch: If `True` branch will be close before merging it |
|
669 | 669 | """ |
|
670 | 670 | if dry_run: |
|
671 | 671 | message = message or settings.MERGE_DRY_RUN_MESSAGE |
|
672 | 672 | user_email = user_email or settings.MERGE_DRY_RUN_EMAIL |
|
673 | 673 | user_name = user_name or settings.MERGE_DRY_RUN_USER |
|
674 | 674 | else: |
|
675 | 675 | if not user_name: |
|
676 | 676 | raise ValueError('user_name cannot be empty') |
|
677 | 677 | if not user_email: |
|
678 | 678 | raise ValueError('user_email cannot be empty') |
|
679 | 679 | if not message: |
|
680 | 680 | raise ValueError('message cannot be empty') |
|
681 | 681 | |
|
682 | 682 | try: |
|
683 | 683 | return self._merge_repo( |
|
684 | 684 | repo_id, workspace_id, target_ref, source_repo, |
|
685 | 685 | source_ref, message, user_name, user_email, dry_run=dry_run, |
|
686 | 686 | use_rebase=use_rebase, close_branch=close_branch) |
|
687 | 687 | except RepositoryError as exc: |
|
688 | 688 | log.exception('Unexpected failure when running merge, dry-run=%s', dry_run) |
|
689 | 689 | return MergeResponse( |
|
690 | 690 | False, False, None, MergeFailureReason.UNKNOWN, |
|
691 | 691 | metadata={'exception': str(exc)}) |
|
692 | 692 | |
|
693 | 693 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
694 | 694 | source_repo, source_ref, merge_message, |
|
695 | 695 | merger_name, merger_email, dry_run=False, |
|
696 | 696 | use_rebase=False, close_branch=False): |
|
697 | 697 | """Internal implementation of merge.""" |
|
698 | 698 | raise NotImplementedError |
|
699 | 699 | |
|
700 | 700 | def _maybe_prepare_merge_workspace( |
|
701 | 701 | self, repo_id, workspace_id, target_ref, source_ref): |
|
702 | 702 | """ |
|
703 | 703 | Create the merge workspace. |
|
704 | 704 | |
|
705 | 705 | :param workspace_id: `workspace_id` unique identifier. |
|
706 | 706 | """ |
|
707 | 707 | raise NotImplementedError |
|
708 | 708 | |
|
709 | 709 | @classmethod |
|
710 | 710 | def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id): |
|
711 | 711 | """ |
|
712 | 712 | Legacy version that was used before. We still need it for |
|
713 | 713 | backward compat |
|
714 | 714 | """ |
|
715 | 715 | return os.path.join( |
|
716 | 716 | os.path.dirname(repo_path), |
|
717 | 717 | f'.__shadow_{os.path.basename(repo_path)}_{workspace_id}') |
|
718 | 718 | |
|
719 | 719 | @classmethod |
|
720 | 720 | def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id): |
|
721 | 721 | # The name of the shadow repository must start with '.', so it is |
|
722 | 722 | # skipped by 'rhodecode.lib.utils.get_filesystem_repos'. |
|
723 | 723 | legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id) |
|
724 | 724 | if os.path.exists(legacy_repository_path): |
|
725 | 725 | return legacy_repository_path |
|
726 | 726 | else: |
|
727 | 727 | return os.path.join( |
|
728 | 728 | os.path.dirname(repo_path), |
|
729 | 729 | f'.__shadow_repo_{repo_id}_{workspace_id}') |
|
730 | 730 | |
|
731 | 731 | def cleanup_merge_workspace(self, repo_id, workspace_id): |
|
732 | 732 | """ |
|
733 | 733 | Remove merge workspace. |
|
734 | 734 | |
|
735 | 735 | This function MUST not fail in case there is no workspace associated to |
|
736 | 736 | the given `workspace_id`. |
|
737 | 737 | |
|
738 | 738 | :param workspace_id: `workspace_id` unique identifier. |
|
739 | 739 | """ |
|
740 | 740 | shadow_repository_path = self._get_shadow_repository_path( |
|
741 | 741 | self.path, repo_id, workspace_id) |
|
742 | 742 | shadow_repository_path_del = '{}.{}.delete'.format( |
|
743 | 743 | shadow_repository_path, time.time()) |
|
744 | 744 | |
|
745 | 745 | # move the shadow repo, so it never conflicts with the one used. |
|
746 | 746 | # we use this method because shutil.rmtree had some edge case problems |
|
747 | 747 | # removing symlinked repositories |
|
748 | 748 | if not os.path.isdir(shadow_repository_path): |
|
749 | 749 | return |
|
750 | 750 | |
|
751 | 751 | shutil.move(shadow_repository_path, shadow_repository_path_del) |
|
752 | 752 | try: |
|
753 | 753 | shutil.rmtree(shadow_repository_path_del, ignore_errors=False) |
|
754 | 754 | except Exception: |
|
755 | 755 | log.exception('Failed to gracefully remove shadow repo under %s', |
|
756 | 756 | shadow_repository_path_del) |
|
757 | 757 | shutil.rmtree(shadow_repository_path_del, ignore_errors=True) |
|
758 | 758 | |
|
759 | 759 | # ========== # |
|
760 | 760 | # COMMIT API # |
|
761 | 761 | # ========== # |
|
762 | 762 | |
|
763 | 763 | @LazyProperty |
|
764 | 764 | def in_memory_commit(self): |
|
765 | 765 | """ |
|
766 | 766 | Returns :class:`InMemoryCommit` object for this repository. |
|
767 | 767 | """ |
|
768 | 768 | raise NotImplementedError |
|
769 | 769 | |
|
770 | 770 | # ======================== # |
|
771 | 771 | # UTILITIES FOR SUBCLASSES # |
|
772 | 772 | # ======================== # |
|
773 | 773 | |
|
774 | 774 | def _validate_diff_commits(self, commit1, commit2): |
|
775 | 775 | """ |
|
776 | 776 | Validates that the given commits are related to this repository. |
|
777 | 777 | |
|
778 | 778 | Intended as a utility for sub classes to have a consistent validation |
|
779 | 779 | of input parameters in methods like :meth:`get_diff`. |
|
780 | 780 | """ |
|
781 | 781 | self._validate_commit(commit1) |
|
782 | 782 | self._validate_commit(commit2) |
|
783 | 783 | if (isinstance(commit1, EmptyCommit) and |
|
784 | 784 | isinstance(commit2, EmptyCommit)): |
|
785 | 785 | raise ValueError("Cannot compare two empty commits") |
|
786 | 786 | |
|
787 | 787 | def _validate_commit(self, commit): |
|
788 | 788 | if not isinstance(commit, BaseCommit): |
|
789 | 789 | raise TypeError( |
|
790 | 790 | "%s is not of type BaseCommit" % repr(commit)) |
|
791 | 791 | if commit.repository != self and not isinstance(commit, EmptyCommit): |
|
792 | 792 | raise ValueError( |
|
793 | 793 | "Commit %s must be a valid commit from this repository %s, " |
|
794 | 794 | "related to this repository instead %s." % |
|
795 | 795 | (commit, self, commit.repository)) |
|
796 | 796 | |
|
797 | 797 | def _validate_commit_id(self, commit_id): |
|
798 | 798 | if not isinstance(commit_id, str): |
|
799 | 799 | raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead") |
|
800 | 800 | |
|
801 | 801 | def _validate_commit_idx(self, commit_idx): |
|
802 | 802 | if not isinstance(commit_idx, int): |
|
803 | 803 | raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}") |
|
804 | 804 | |
|
805 | 805 | def _validate_branch_name(self, branch_name): |
|
806 | 806 | if branch_name and branch_name not in self.branches_all: |
|
807 | 807 | msg = (f"Branch {branch_name} not found in {self}") |
|
808 | 808 | raise BranchDoesNotExistError(msg) |
|
809 | 809 | |
|
810 | 810 | # |
|
811 | 811 | # Supporting deprecated API parts |
|
812 | 812 | # TODO: johbo: consider to move this into a mixin |
|
813 | 813 | # |
|
814 | 814 | |
|
815 | 815 | @property |
|
816 | 816 | def EMPTY_CHANGESET(self): |
|
817 | 817 | warnings.warn( |
|
818 | 818 | "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning) |
|
819 | 819 | return self.EMPTY_COMMIT_ID |
|
820 | 820 | |
|
821 | 821 | @property |
|
822 | 822 | def revisions(self): |
|
823 | 823 | warnings.warn("Use commits attribute instead", DeprecationWarning) |
|
824 | 824 | return self.commit_ids |
|
825 | 825 | |
|
826 | 826 | @revisions.setter |
|
827 | 827 | def revisions(self, value): |
|
828 | 828 | warnings.warn("Use commits attribute instead", DeprecationWarning) |
|
829 | 829 | self.commit_ids = value |
|
830 | 830 | |
|
831 | 831 | def get_changeset(self, revision=None, pre_load=None): |
|
832 | 832 | warnings.warn("Use get_commit instead", DeprecationWarning) |
|
833 | 833 | commit_id = None |
|
834 | 834 | commit_idx = None |
|
835 | 835 | if isinstance(revision, str): |
|
836 | 836 | commit_id = revision |
|
837 | 837 | else: |
|
838 | 838 | commit_idx = revision |
|
839 | 839 | return self.get_commit( |
|
840 | 840 | commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load) |
|
841 | 841 | |
|
842 | 842 | def get_changesets( |
|
843 | 843 | self, start=None, end=None, start_date=None, end_date=None, |
|
844 | 844 | branch_name=None, pre_load=None): |
|
845 | 845 | warnings.warn("Use get_commits instead", DeprecationWarning) |
|
846 | 846 | start_id = self._revision_to_commit(start) |
|
847 | 847 | end_id = self._revision_to_commit(end) |
|
848 | 848 | return self.get_commits( |
|
849 | 849 | start_id=start_id, end_id=end_id, start_date=start_date, |
|
850 | 850 | end_date=end_date, branch_name=branch_name, pre_load=pre_load) |
|
851 | 851 | |
|
852 | 852 | def _revision_to_commit(self, revision): |
|
853 | 853 | """ |
|
854 | 854 | Translates a revision to a commit_id |
|
855 | 855 | |
|
856 | 856 | Helps to support the old changeset based API which allows to use |
|
857 | 857 | commit ids and commit indices interchangeable. |
|
858 | 858 | """ |
|
859 | 859 | if revision is None: |
|
860 | 860 | return revision |
|
861 | 861 | |
|
862 | 862 | if isinstance(revision, str): |
|
863 | 863 | commit_id = revision |
|
864 | 864 | else: |
|
865 | 865 | commit_id = self.commit_ids[revision] |
|
866 | 866 | return commit_id |
|
867 | 867 | |
|
868 | 868 | @property |
|
869 | 869 | def in_memory_changeset(self): |
|
870 | 870 | warnings.warn("Use in_memory_commit instead", DeprecationWarning) |
|
871 | 871 | return self.in_memory_commit |
|
872 | 872 | |
|
873 | 873 | def get_path_permissions(self, username): |
|
874 | 874 | """ |
|
875 | 875 | Returns a path permission checker or None if not supported |
|
876 | 876 | |
|
877 | 877 | :param username: session user name |
|
878 | 878 | :return: an instance of BasePathPermissionChecker or None |
|
879 | 879 | """ |
|
880 | 880 | return None |
|
881 | 881 | |
|
882 | 882 | def install_hooks(self, force=False): |
|
883 | 883 | return self._remote.install_hooks(force) |
|
884 | 884 | |
|
885 | 885 | def get_hooks_info(self): |
|
886 | 886 | return self._remote.get_hooks_info() |
|
887 | 887 | |
|
888 | 888 | def vcsserver_invalidate_cache(self, delete=False): |
|
889 | 889 | return self._remote.vcsserver_invalidate_cache(delete) |
|
890 | 890 | |
|
891 | 891 | |
|
892 | 892 | class BaseCommit(object): |
|
893 | 893 | """ |
|
894 | 894 | Each backend should implement it's commit representation. |
|
895 | 895 | |
|
896 | 896 | **Attributes** |
|
897 | 897 | |
|
898 | 898 | ``repository`` |
|
899 | 899 | repository object within which commit exists |
|
900 | 900 | |
|
901 | 901 | ``id`` |
|
902 | 902 | The commit id, may be ``raw_id`` or i.e. for mercurial's tip |
|
903 | 903 | just ``tip``. |
|
904 | 904 | |
|
905 | 905 | ``raw_id`` |
|
906 | 906 | raw commit representation (i.e. full 40 length sha for git |
|
907 | 907 | backend) |
|
908 | 908 | |
|
909 | 909 | ``short_id`` |
|
910 | 910 | shortened (if apply) version of ``raw_id``; it would be simple |
|
911 | 911 | shortcut for ``raw_id[:12]`` for git/mercurial backends or same |
|
912 | 912 | as ``raw_id`` for subversion |
|
913 | 913 | |
|
914 | 914 | ``idx`` |
|
915 | 915 | commit index |
|
916 | 916 | |
|
917 | 917 | ``files`` |
|
918 | 918 | list of ``FileNode`` (``Node`` with NodeKind.FILE) objects |
|
919 | 919 | |
|
920 | 920 | ``dirs`` |
|
921 | 921 | list of ``DirNode`` (``Node`` with NodeKind.DIR) objects |
|
922 | 922 | |
|
923 | 923 | ``nodes`` |
|
924 | 924 | combined list of ``Node`` objects |
|
925 | 925 | |
|
926 | 926 | ``author`` |
|
927 | 927 | author of the commit, as unicode |
|
928 | 928 | |
|
929 | 929 | ``message`` |
|
930 | 930 | message of the commit, as unicode |
|
931 | 931 | |
|
932 | 932 | ``parents`` |
|
933 | 933 | list of parent commits |
|
934 | 934 | |
|
935 | 935 | """ |
|
936 | 936 | repository = None |
|
937 | 937 | branch = None |
|
938 | 938 | |
|
939 | 939 | """ |
|
940 | 940 | Depending on the backend this should be set to the branch name of the |
|
941 | 941 | commit. Backends not supporting branches on commits should leave this |
|
942 | 942 | value as ``None``. |
|
943 | 943 | """ |
|
944 | 944 | |
|
945 | 945 | _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}' |
|
946 | 946 | """ |
|
947 | 947 | This template is used to generate a default prefix for repository archives |
|
948 | 948 | if no prefix has been specified. |
|
949 | 949 | """ |
|
950 | 950 | |
|
951 | 951 | def __repr__(self): |
|
952 | 952 | return self.__str__() |
|
953 | 953 | |
|
954 | 954 | def __str__(self): |
|
955 | 955 | return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>' |
|
956 | 956 | |
|
957 | 957 | def __eq__(self, other): |
|
958 | 958 | same_instance = isinstance(other, self.__class__) |
|
959 | 959 | return same_instance and self.raw_id == other.raw_id |
|
960 | 960 | |
|
961 | 961 | def __json__(self): |
|
962 | 962 | parents = [] |
|
963 | 963 | try: |
|
964 | 964 | for parent in self.parents: |
|
965 | 965 | parents.append({'raw_id': parent.raw_id}) |
|
966 | 966 | except NotImplementedError: |
|
967 | 967 | # empty commit doesn't have parents implemented |
|
968 | 968 | pass |
|
969 | 969 | |
|
970 | 970 | return { |
|
971 | 971 | 'short_id': self.short_id, |
|
972 | 972 | 'raw_id': self.raw_id, |
|
973 | 973 | 'revision': self.idx, |
|
974 | 974 | 'message': self.message, |
|
975 | 975 | 'date': self.date, |
|
976 | 976 | 'author': self.author, |
|
977 | 977 | 'parents': parents, |
|
978 | 978 | 'branch': self.branch |
|
979 | 979 | } |
|
980 | 980 | |
|
981 | 981 | def __getstate__(self): |
|
982 | 982 | d = self.__dict__.copy() |
|
983 | 983 | d.pop('_remote', None) |
|
984 | 984 | d.pop('repository', None) |
|
985 | 985 | return d |
|
986 | 986 | |
|
987 | 987 | def get_remote(self): |
|
988 | 988 | return self._remote |
|
989 | 989 | |
|
990 | 990 | def serialize(self): |
|
991 | 991 | return self.__json__() |
|
992 | 992 | |
|
993 | 993 | def _get_refs(self): |
|
994 | 994 | return { |
|
995 | 995 | 'branches': [self.branch] if self.branch else [], |
|
996 | 996 | 'bookmarks': getattr(self, 'bookmarks', []), |
|
997 | 997 | 'tags': self.tags |
|
998 | 998 | } |
|
999 | 999 | |
|
1000 | 1000 | @LazyProperty |
|
1001 | 1001 | def last(self): |
|
1002 | 1002 | """ |
|
1003 | 1003 | ``True`` if this is last commit in repository, ``False`` |
|
1004 | 1004 | otherwise; trying to access this attribute while there is no |
|
1005 | 1005 | commits would raise `EmptyRepositoryError` |
|
1006 | 1006 | """ |
|
1007 | 1007 | if self.repository is None: |
|
1008 | 1008 | raise CommitError("Cannot check if it's most recent commit") |
|
1009 | 1009 | return self.raw_id == self.repository.commit_ids[-1] |
|
1010 | 1010 | |
|
1011 | 1011 | @LazyProperty |
|
1012 | 1012 | def parents(self): |
|
1013 | 1013 | """ |
|
1014 | 1014 | Returns list of parent commits. |
|
1015 | 1015 | """ |
|
1016 | 1016 | raise NotImplementedError |
|
1017 | 1017 | |
|
1018 | 1018 | @LazyProperty |
|
1019 | 1019 | def first_parent(self): |
|
1020 | 1020 | """ |
|
1021 | 1021 | Returns list of parent commits. |
|
1022 | 1022 | """ |
|
1023 | 1023 | return self.parents[0] if self.parents else EmptyCommit() |
|
1024 | 1024 | |
|
1025 | 1025 | @property |
|
1026 | 1026 | def merge(self): |
|
1027 | 1027 | """ |
|
1028 | 1028 | Returns boolean if commit is a merge. |
|
1029 | 1029 | """ |
|
1030 | 1030 | return len(self.parents) > 1 |
|
1031 | 1031 | |
|
1032 | 1032 | @LazyProperty |
|
1033 | 1033 | def children(self): |
|
1034 | 1034 | """ |
|
1035 | 1035 | Returns list of child commits. |
|
1036 | 1036 | """ |
|
1037 | 1037 | raise NotImplementedError |
|
1038 | 1038 | |
|
1039 | 1039 | @LazyProperty |
|
1040 | 1040 | def id(self): |
|
1041 | 1041 | """ |
|
1042 | 1042 | Returns string identifying this commit. |
|
1043 | 1043 | """ |
|
1044 | 1044 | raise NotImplementedError |
|
1045 | 1045 | |
|
1046 | 1046 | @LazyProperty |
|
1047 | 1047 | def raw_id(self): |
|
1048 | 1048 | """ |
|
1049 | 1049 | Returns raw string identifying this commit. |
|
1050 | 1050 | """ |
|
1051 | 1051 | raise NotImplementedError |
|
1052 | 1052 | |
|
1053 | 1053 | @LazyProperty |
|
1054 | 1054 | def short_id(self): |
|
1055 | 1055 | """ |
|
1056 | 1056 | Returns shortened version of ``raw_id`` attribute, as string, |
|
1057 | 1057 | identifying this commit, useful for presentation to users. |
|
1058 | 1058 | """ |
|
1059 | 1059 | raise NotImplementedError |
|
1060 | 1060 | |
|
1061 | 1061 | @LazyProperty |
|
1062 | 1062 | def idx(self): |
|
1063 | 1063 | """ |
|
1064 | 1064 | Returns integer identifying this commit. |
|
1065 | 1065 | """ |
|
1066 | 1066 | raise NotImplementedError |
|
1067 | 1067 | |
|
1068 | 1068 | @LazyProperty |
|
1069 | 1069 | def committer(self): |
|
1070 | 1070 | """ |
|
1071 | 1071 | Returns committer for this commit |
|
1072 | 1072 | """ |
|
1073 | 1073 | raise NotImplementedError |
|
1074 | 1074 | |
|
1075 | 1075 | @LazyProperty |
|
1076 | 1076 | def committer_name(self): |
|
1077 | 1077 | """ |
|
1078 | 1078 | Returns committer name for this commit |
|
1079 | 1079 | """ |
|
1080 | 1080 | |
|
1081 | 1081 | return author_name(self.committer) |
|
1082 | 1082 | |
|
1083 | 1083 | @LazyProperty |
|
1084 | 1084 | def committer_email(self): |
|
1085 | 1085 | """ |
|
1086 | 1086 | Returns committer email address for this commit |
|
1087 | 1087 | """ |
|
1088 | 1088 | |
|
1089 | 1089 | return author_email(self.committer) |
|
1090 | 1090 | |
|
1091 | 1091 | @LazyProperty |
|
1092 | 1092 | def author(self): |
|
1093 | 1093 | """ |
|
1094 | 1094 | Returns author for this commit |
|
1095 | 1095 | """ |
|
1096 | 1096 | |
|
1097 | 1097 | raise NotImplementedError |
|
1098 | 1098 | |
|
1099 | 1099 | @LazyProperty |
|
1100 | 1100 | def author_name(self): |
|
1101 | 1101 | """ |
|
1102 | 1102 | Returns author name for this commit |
|
1103 | 1103 | """ |
|
1104 | 1104 | |
|
1105 | 1105 | return author_name(self.author) |
|
1106 | 1106 | |
|
1107 | 1107 | @LazyProperty |
|
1108 | 1108 | def author_email(self): |
|
1109 | 1109 | """ |
|
1110 | 1110 | Returns author email address for this commit |
|
1111 | 1111 | """ |
|
1112 | 1112 | |
|
1113 | 1113 | return author_email(self.author) |
|
1114 | 1114 | |
|
1115 | 1115 | def get_file_mode(self, path: bytes): |
|
1116 | 1116 | """ |
|
1117 | 1117 | Returns stat mode of the file at `path`. |
|
1118 | 1118 | """ |
|
1119 | 1119 | raise NotImplementedError |
|
1120 | 1120 | |
|
1121 | 1121 | def is_link(self, path): |
|
1122 | 1122 | """ |
|
1123 | 1123 | Returns ``True`` if given `path` is a symlink |
|
1124 | 1124 | """ |
|
1125 | 1125 | raise NotImplementedError |
|
1126 | 1126 | |
|
1127 | 1127 | def is_node_binary(self, path): |
|
1128 | 1128 | """ |
|
1129 | 1129 | Returns ``True`` is given path is a binary file |
|
1130 | 1130 | """ |
|
1131 | 1131 | raise NotImplementedError |
|
1132 | 1132 | |
|
1133 | 1133 | def node_md5_hash(self, path): |
|
1134 | 1134 | """ |
|
1135 | 1135 | Returns md5 hash of a node data |
|
1136 | 1136 | """ |
|
1137 | 1137 | raise NotImplementedError |
|
1138 | 1138 | |
|
1139 | 1139 | def get_file_content(self, path) -> bytes: |
|
1140 | 1140 | """ |
|
1141 | 1141 | Returns content of the file at the given `path`. |
|
1142 | 1142 | """ |
|
1143 | 1143 | raise NotImplementedError |
|
1144 | 1144 | |
|
1145 | 1145 | def get_file_content_streamed(self, path): |
|
1146 | 1146 | """ |
|
1147 | 1147 | returns a streaming response from vcsserver with file content |
|
1148 | 1148 | """ |
|
1149 | 1149 | raise NotImplementedError |
|
1150 | 1150 | |
|
1151 | 1151 | def get_file_size(self, path): |
|
1152 | 1152 | """ |
|
1153 | 1153 | Returns size of the file at the given `path`. |
|
1154 | 1154 | """ |
|
1155 | 1155 | raise NotImplementedError |
|
1156 | 1156 | |
|
1157 | 1157 | def get_path_commit(self, path, pre_load=None): |
|
1158 | 1158 | """ |
|
1159 | 1159 | Returns last commit of the file at the given `path`. |
|
1160 | 1160 | |
|
1161 | 1161 | :param pre_load: Optional. List of commit attributes to load. |
|
1162 | 1162 | """ |
|
1163 | 1163 | commits = self.get_path_history(path, limit=1, pre_load=pre_load) |
|
1164 | 1164 | if not commits: |
|
1165 | 1165 | raise RepositoryError( |
|
1166 | 1166 | 'Failed to fetch history for path {}. ' |
|
1167 | 1167 | 'Please check if such path exists in your repository'.format( |
|
1168 | 1168 | path)) |
|
1169 | 1169 | return commits[0] |
|
1170 | 1170 | |
|
1171 | 1171 | def get_path_history(self, path, limit=None, pre_load=None): |
|
1172 | 1172 | """ |
|
1173 | 1173 | Returns history of file as reversed list of :class:`BaseCommit` |
|
1174 | 1174 | objects for which file at given `path` has been modified. |
|
1175 | 1175 | |
|
1176 | 1176 | :param limit: Optional. Allows to limit the size of the returned |
|
1177 | 1177 | history. This is intended as a hint to the underlying backend, so |
|
1178 | 1178 | that it can apply optimizations depending on the limit. |
|
1179 | 1179 | :param pre_load: Optional. List of commit attributes to load. |
|
1180 | 1180 | """ |
|
1181 | 1181 | raise NotImplementedError |
|
1182 | 1182 | |
|
1183 | 1183 | def get_file_annotate(self, path, pre_load=None): |
|
1184 | 1184 | """ |
|
1185 | 1185 | Returns a generator of four element tuples with |
|
1186 | 1186 | lineno, sha, commit lazy loader and line |
|
1187 | 1187 | |
|
1188 | 1188 | :param pre_load: Optional. List of commit attributes to load. |
|
1189 | 1189 | """ |
|
1190 | 1190 | raise NotImplementedError |
|
1191 | 1191 | |
|
1192 | 1192 | def get_nodes(self, path, pre_load=None): |
|
1193 | 1193 | """ |
|
1194 | 1194 | Returns combined ``DirNode`` and ``FileNode`` objects list representing |
|
1195 | 1195 | state of commit at the given ``path``. |
|
1196 | 1196 | |
|
1197 | 1197 | :raises ``CommitError``: if node at the given ``path`` is not |
|
1198 | 1198 | instance of ``DirNode`` |
|
1199 | 1199 | """ |
|
1200 | 1200 | raise NotImplementedError |
|
1201 | 1201 | |
|
1202 | 1202 | def get_node(self, path): |
|
1203 | 1203 | """ |
|
1204 | 1204 | Returns ``Node`` object from the given ``path``. |
|
1205 | 1205 | |
|
1206 | 1206 | :raises ``NodeDoesNotExistError``: if there is no node at the given |
|
1207 | 1207 | ``path`` |
|
1208 | 1208 | """ |
|
1209 | 1209 | raise NotImplementedError |
|
1210 | 1210 | |
|
1211 | 1211 | def get_largefile_node(self, path): |
|
1212 | 1212 | """ |
|
1213 | 1213 | Returns the path to largefile from Mercurial/Git-lfs storage. |
|
1214 | 1214 | or None if it's not a largefile node |
|
1215 | 1215 | """ |
|
1216 | 1216 | return None |
|
1217 | 1217 | |
|
1218 | 1218 | def archive_repo(self, archive_name_key, kind='tgz', subrepos=None, |
|
1219 | 1219 | archive_dir_name=None, write_metadata=False, mtime=None, |
|
1220 | 1220 | archive_at_path='/', cache_config=None): |
|
1221 | 1221 | """ |
|
1222 | 1222 | Creates an archive containing the contents of the repository. |
|
1223 | 1223 | |
|
1224 | 1224 | :param archive_name_key: unique key under this archive should be generated |
|
1225 | 1225 | :param kind: one of the following: ``"tbz2"``, ``"tgz"``, ``"zip"``. |
|
1226 | 1226 | :param archive_dir_name: name of root directory in archive. |
|
1227 | 1227 | Default is repository name and commit's short_id joined with dash: |
|
1228 | 1228 | ``"{repo_name}-{short_id}"``. |
|
1229 | 1229 | :param write_metadata: write a metadata file into archive. |
|
1230 | 1230 | :param mtime: custom modification time for archive creation, defaults |
|
1231 | 1231 | to time.time() if not given. |
|
1232 | 1232 | :param archive_at_path: pack files at this path (default '/') |
|
1233 | 1233 | :param cache_config: config spec to send to vcsserver to configure the backend to store files |
|
1234 | 1234 | |
|
1235 | 1235 | :raise VCSError: If prefix has a problem. |
|
1236 | 1236 | """ |
|
1237 | 1237 | cache_config = cache_config or {} |
|
1238 | 1238 | allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS] |
|
1239 | 1239 | if kind not in allowed_kinds: |
|
1240 | 1240 | raise ImproperArchiveTypeError( |
|
1241 | 1241 | f'Archive kind ({kind}) not supported use one of {allowed_kinds}') |
|
1242 | 1242 | |
|
1243 | 1243 | archive_dir_name = self._validate_archive_prefix(archive_dir_name) |
|
1244 | 1244 | mtime = mtime is not None or time.mktime(self.date.timetuple()) |
|
1245 | 1245 | commit_id = self.raw_id |
|
1246 | 1246 | |
|
1247 | 1247 | return self.repository._remote.archive_repo( |
|
1248 | 1248 | archive_name_key, kind, mtime, archive_at_path, |
|
1249 | 1249 | archive_dir_name, commit_id, cache_config) |
|
1250 | 1250 | |
|
1251 | 1251 | def _validate_archive_prefix(self, archive_dir_name): |
|
1252 | 1252 | if archive_dir_name is None: |
|
1253 | 1253 | archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format( |
|
1254 | 1254 | repo_name=safe_str(self.repository.name), |
|
1255 | 1255 | short_id=self.short_id) |
|
1256 | 1256 | elif not isinstance(archive_dir_name, str): |
|
1257 | 1257 | raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}") |
|
1258 | 1258 | elif archive_dir_name.startswith('/'): |
|
1259 | 1259 | raise VCSError("Prefix cannot start with leading slash") |
|
1260 | 1260 | elif archive_dir_name.strip() == '': |
|
1261 | 1261 | raise VCSError("Prefix cannot be empty") |
|
1262 | 1262 | elif not archive_dir_name.isascii(): |
|
1263 | 1263 | raise VCSError("Prefix cannot contain non ascii characters") |
|
1264 | 1264 | return archive_dir_name |
|
1265 | 1265 | |
|
1266 | 1266 | @LazyProperty |
|
1267 | 1267 | def root(self): |
|
1268 | 1268 | """ |
|
1269 | 1269 | Returns ``RootNode`` object for this commit. |
|
1270 | 1270 | """ |
|
1271 | 1271 | return self.get_node('') |
|
1272 | 1272 | |
|
1273 | 1273 | def next(self, branch=None): |
|
1274 | 1274 | """ |
|
1275 | 1275 | Returns next commit from current, if branch is gives it will return |
|
1276 | 1276 | next commit belonging to this branch |
|
1277 | 1277 | |
|
1278 | 1278 | :param branch: show commits within the given named branch |
|
1279 | 1279 | """ |
|
1280 | 1280 | indexes = range(self.idx + 1, self.repository.count()) |
|
1281 | 1281 | return self._find_next(indexes, branch) |
|
1282 | 1282 | |
|
1283 | 1283 | def prev(self, branch=None): |
|
1284 | 1284 | """ |
|
1285 | 1285 | Returns previous commit from current, if branch is gives it will |
|
1286 | 1286 | return previous commit belonging to this branch |
|
1287 | 1287 | |
|
1288 | 1288 | :param branch: show commit within the given named branch |
|
1289 | 1289 | """ |
|
1290 | 1290 | indexes = range(self.idx - 1, -1, -1) |
|
1291 | 1291 | return self._find_next(indexes, branch) |
|
1292 | 1292 | |
|
1293 | 1293 | def _find_next(self, indexes, branch=None): |
|
1294 | 1294 | if branch and self.branch != branch: |
|
1295 | 1295 | raise VCSError('Branch option used on commit not belonging ' |
|
1296 | 1296 | 'to that branch') |
|
1297 | 1297 | |
|
1298 | 1298 | for next_idx in indexes: |
|
1299 | 1299 | commit = self.repository.get_commit(commit_idx=next_idx) |
|
1300 | 1300 | if branch and branch != commit.branch: |
|
1301 | 1301 | continue |
|
1302 | 1302 | return commit |
|
1303 | 1303 | raise CommitDoesNotExistError |
|
1304 | 1304 | |
|
1305 | 1305 | def diff(self, ignore_whitespace=True, context=3): |
|
1306 | 1306 | """ |
|
1307 | 1307 | Returns a `Diff` object representing the change made by this commit. |
|
1308 | 1308 | """ |
|
1309 | 1309 | parent = self.first_parent |
|
1310 | 1310 | diff = self.repository.get_diff( |
|
1311 | 1311 | parent, self, |
|
1312 | 1312 | ignore_whitespace=ignore_whitespace, |
|
1313 | 1313 | context=context) |
|
1314 | 1314 | return diff |
|
1315 | 1315 | |
|
1316 | 1316 | @LazyProperty |
|
1317 | 1317 | def added(self): |
|
1318 | 1318 | """ |
|
1319 | 1319 | Returns list of added ``FileNode`` objects. |
|
1320 | 1320 | """ |
|
1321 | 1321 | raise NotImplementedError |
|
1322 | 1322 | |
|
1323 | 1323 | @LazyProperty |
|
1324 | 1324 | def changed(self): |
|
1325 | 1325 | """ |
|
1326 | 1326 | Returns list of modified ``FileNode`` objects. |
|
1327 | 1327 | """ |
|
1328 | 1328 | raise NotImplementedError |
|
1329 | 1329 | |
|
1330 | 1330 | @LazyProperty |
|
1331 | 1331 | def removed(self): |
|
1332 | 1332 | """ |
|
1333 | 1333 | Returns list of removed ``FileNode`` objects. |
|
1334 | 1334 | """ |
|
1335 | 1335 | raise NotImplementedError |
|
1336 | 1336 | |
|
1337 | 1337 | @LazyProperty |
|
1338 | 1338 | def size(self): |
|
1339 | 1339 | """ |
|
1340 | 1340 | Returns total number of bytes from contents of all filenodes. |
|
1341 | 1341 | """ |
|
1342 | 1342 | return sum(node.size for node in self.get_filenodes_generator()) |
|
1343 | 1343 | |
|
1344 | 1344 | def walk(self, topurl=''): |
|
1345 | 1345 | """ |
|
1346 | 1346 | Similar to os.walk method. Insted of filesystem it walks through |
|
1347 | 1347 | commit starting at given ``topurl``. Returns generator of tuples |
|
1348 | 1348 | (top_node, dirnodes, filenodes). |
|
1349 | 1349 | """ |
|
1350 | 1350 | from rhodecode.lib.vcs.nodes import DirNode |
|
1351 | 1351 | |
|
1352 | 1352 | if isinstance(topurl, DirNode): |
|
1353 | 1353 | top_node = topurl |
|
1354 | 1354 | else: |
|
1355 | 1355 | top_node = self.get_node(topurl) |
|
1356 | 1356 | |
|
1357 | 1357 | has_default_pre_load = False |
|
1358 | 1358 | if isinstance(top_node, DirNode): |
|
1359 | 1359 | # used to inject as we walk same defaults as given top_node |
|
1360 | 1360 | default_pre_load = top_node.default_pre_load |
|
1361 | 1361 | has_default_pre_load = True |
|
1362 | 1362 | |
|
1363 | 1363 | if not top_node.is_dir(): |
|
1364 | 1364 | return |
|
1365 | 1365 | yield top_node, top_node.dirs, top_node.files |
|
1366 | 1366 | for dir_node in top_node.dirs: |
|
1367 | 1367 | if has_default_pre_load: |
|
1368 | 1368 | dir_node.default_pre_load = default_pre_load |
|
1369 | 1369 | yield from self.walk(dir_node) |
|
1370 | 1370 | |
|
1371 | 1371 | def get_filenodes_generator(self): |
|
1372 | 1372 | """ |
|
1373 | 1373 | Returns generator that yields *all* file nodes. |
|
1374 | 1374 | """ |
|
1375 | 1375 | for topnode, dirs, files in self.walk(): |
|
1376 | 1376 | yield from files |
|
1377 | 1377 | |
|
1378 | 1378 | # |
|
1379 | 1379 | # Utilities for sub classes to support consistent behavior |
|
1380 | 1380 | # |
|
1381 | 1381 | |
|
1382 | 1382 | def no_node_at_path(self, path): |
|
1383 | 1383 | return NodeDoesNotExistError( |
|
1384 | 1384 | f"There is no file nor directory at the given path: " |
|
1385 | 1385 | f"`{safe_str(path)}` at commit {self.short_id}") |
|
1386 | 1386 | |
|
1387 | 1387 | def _fix_path(self, path: str) -> str: |
|
1388 | 1388 | """ |
|
1389 | 1389 | Paths are stored without trailing slash so we need to get rid off it if |
|
1390 | 1390 | needed. |
|
1391 | 1391 | """ |
|
1392 | 1392 | return safe_str(path).rstrip('/') |
|
1393 | 1393 | |
|
1394 | 1394 | # |
|
1395 | 1395 | # Deprecated API based on changesets |
|
1396 | 1396 | # |
|
1397 | 1397 | |
|
1398 | 1398 | @property |
|
1399 | 1399 | def revision(self): |
|
1400 | 1400 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1401 | 1401 | return self.idx |
|
1402 | 1402 | |
|
1403 | 1403 | @revision.setter |
|
1404 | 1404 | def revision(self, value): |
|
1405 | 1405 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1406 | 1406 | self.idx = value |
|
1407 | 1407 | |
|
1408 | 1408 | def get_file_changeset(self, path): |
|
1409 | 1409 | warnings.warn("Use get_path_commit instead", DeprecationWarning) |
|
1410 | 1410 | return self.get_path_commit(path) |
|
1411 | 1411 | |
|
1412 | 1412 | |
|
1413 | 1413 | class BaseChangesetClass(type): |
|
1414 | 1414 | |
|
1415 | 1415 | def __instancecheck__(self, instance): |
|
1416 | 1416 | return isinstance(instance, BaseCommit) |
|
1417 | 1417 | |
|
1418 | 1418 | |
|
1419 | 1419 | class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass): |
|
1420 | 1420 | |
|
1421 | 1421 | def __new__(cls, *args, **kwargs): |
|
1422 | 1422 | warnings.warn( |
|
1423 | 1423 | "Use BaseCommit instead of BaseChangeset", DeprecationWarning) |
|
1424 | 1424 | return super().__new__(cls, *args, **kwargs) |
|
1425 | 1425 | |
|
1426 | 1426 | |
|
1427 | 1427 | class BaseInMemoryCommit(object): |
|
1428 | 1428 | """ |
|
1429 | 1429 | Represents differences between repository's state (most recent head) and |
|
1430 | 1430 | changes made *in place*. |
|
1431 | 1431 | |
|
1432 | 1432 | **Attributes** |
|
1433 | 1433 | |
|
1434 | 1434 | ``repository`` |
|
1435 | 1435 | repository object for this in-memory-commit |
|
1436 | 1436 | |
|
1437 | 1437 | ``added`` |
|
1438 | 1438 | list of ``FileNode`` objects marked as *added* |
|
1439 | 1439 | |
|
1440 | 1440 | ``changed`` |
|
1441 | 1441 | list of ``FileNode`` objects marked as *changed* |
|
1442 | 1442 | |
|
1443 | 1443 | ``removed`` |
|
1444 | 1444 | list of ``FileNode`` or ``RemovedFileNode`` objects marked to be |
|
1445 | 1445 | *removed* |
|
1446 | 1446 | |
|
1447 | 1447 | ``parents`` |
|
1448 | 1448 | list of :class:`BaseCommit` instances representing parents of |
|
1449 | 1449 | in-memory commit. Should always be 2-element sequence. |
|
1450 | 1450 | |
|
1451 | 1451 | """ |
|
1452 | 1452 | |
|
1453 | 1453 | def __init__(self, repository): |
|
1454 | 1454 | self.repository = repository |
|
1455 | 1455 | self.added = [] |
|
1456 | 1456 | self.changed = [] |
|
1457 | 1457 | self.removed = [] |
|
1458 | 1458 | self.parents = [] |
|
1459 | 1459 | |
|
1460 | 1460 | def add(self, *filenodes): |
|
1461 | 1461 | """ |
|
1462 | 1462 | Marks given ``FileNode`` objects as *to be committed*. |
|
1463 | 1463 | |
|
1464 | 1464 | :raises ``NodeAlreadyExistsError``: if node with same path exists at |
|
1465 | 1465 | latest commit |
|
1466 | 1466 | :raises ``NodeAlreadyAddedError``: if node with same path is already |
|
1467 | 1467 | marked as *added* |
|
1468 | 1468 | """ |
|
1469 | 1469 | # Check if not already marked as *added* first |
|
1470 | 1470 | for node in filenodes: |
|
1471 | 1471 | if node.path in (n.path for n in self.added): |
|
1472 | 1472 | raise NodeAlreadyAddedError( |
|
1473 | 1473 | "Such FileNode %s is already marked for addition" |
|
1474 | 1474 | % node.path) |
|
1475 | 1475 | for node in filenodes: |
|
1476 | 1476 | self.added.append(node) |
|
1477 | 1477 | |
|
1478 | 1478 | def change(self, *filenodes): |
|
1479 | 1479 | """ |
|
1480 | 1480 | Marks given ``FileNode`` objects to be *changed* in next commit. |
|
1481 | 1481 | |
|
1482 | 1482 | :raises ``EmptyRepositoryError``: if there are no commits yet |
|
1483 | 1483 | :raises ``NodeAlreadyExistsError``: if node with same path is already |
|
1484 | 1484 | marked to be *changed* |
|
1485 | 1485 | :raises ``NodeAlreadyRemovedError``: if node with same path is already |
|
1486 | 1486 | marked to be *removed* |
|
1487 | 1487 | :raises ``NodeDoesNotExistError``: if node doesn't exist in latest |
|
1488 | 1488 | commit |
|
1489 | 1489 | :raises ``NodeNotChangedError``: if node hasn't really be changed |
|
1490 | 1490 | """ |
|
1491 | 1491 | for node in filenodes: |
|
1492 | 1492 | if node.path in (n.path for n in self.removed): |
|
1493 | 1493 | raise NodeAlreadyRemovedError( |
|
1494 | 1494 | "Node at %s is already marked as removed" % node.path) |
|
1495 | 1495 | try: |
|
1496 | 1496 | self.repository.get_commit() |
|
1497 | 1497 | except EmptyRepositoryError: |
|
1498 | 1498 | raise EmptyRepositoryError( |
|
1499 | 1499 | "Nothing to change - try to *add* new nodes rather than " |
|
1500 | 1500 | "changing them") |
|
1501 | 1501 | for node in filenodes: |
|
1502 | 1502 | if node.path in (n.path for n in self.changed): |
|
1503 | 1503 | raise NodeAlreadyChangedError( |
|
1504 | 1504 | "Node at '%s' is already marked as changed" % node.path) |
|
1505 | 1505 | self.changed.append(node) |
|
1506 | 1506 | |
|
1507 | 1507 | def remove(self, *filenodes): |
|
1508 | 1508 | """ |
|
1509 | 1509 | Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be |
|
1510 | 1510 | *removed* in next commit. |
|
1511 | 1511 | |
|
1512 | 1512 | :raises ``NodeAlreadyRemovedError``: if node has been already marked to |
|
1513 | 1513 | be *removed* |
|
1514 | 1514 | :raises ``NodeAlreadyChangedError``: if node has been already marked to |
|
1515 | 1515 | be *changed* |
|
1516 | 1516 | """ |
|
1517 | 1517 | for node in filenodes: |
|
1518 | 1518 | if node.path in (n.path for n in self.removed): |
|
1519 | 1519 | raise NodeAlreadyRemovedError( |
|
1520 | 1520 | "Node is already marked to for removal at %s" % node.path) |
|
1521 | 1521 | if node.path in (n.path for n in self.changed): |
|
1522 | 1522 | raise NodeAlreadyChangedError( |
|
1523 | 1523 | "Node is already marked to be changed at %s" % node.path) |
|
1524 | 1524 | # We only mark node as *removed* - real removal is done by |
|
1525 | 1525 | # commit method |
|
1526 | 1526 | self.removed.append(node) |
|
1527 | 1527 | |
|
1528 | 1528 | def reset(self): |
|
1529 | 1529 | """ |
|
1530 | 1530 | Resets this instance to initial state (cleans ``added``, ``changed`` |
|
1531 | 1531 | and ``removed`` lists). |
|
1532 | 1532 | """ |
|
1533 | 1533 | self.added = [] |
|
1534 | 1534 | self.changed = [] |
|
1535 | 1535 | self.removed = [] |
|
1536 | 1536 | self.parents = [] |
|
1537 | 1537 | |
|
1538 | 1538 | def get_ipaths(self): |
|
1539 | 1539 | """ |
|
1540 | 1540 | Returns generator of paths from nodes marked as added, changed or |
|
1541 | 1541 | removed. |
|
1542 | 1542 | """ |
|
1543 | 1543 | for node in itertools.chain(self.added, self.changed, self.removed): |
|
1544 | 1544 | yield node.path |
|
1545 | 1545 | |
|
1546 | 1546 | def get_paths(self): |
|
1547 | 1547 | """ |
|
1548 | 1548 | Returns list of paths from nodes marked as added, changed or removed. |
|
1549 | 1549 | """ |
|
1550 | 1550 | return list(self.get_ipaths()) |
|
1551 | 1551 | |
|
1552 | 1552 | def check_integrity(self, parents=None): |
|
1553 | 1553 | """ |
|
1554 | 1554 | Checks in-memory commit's integrity. Also, sets parents if not |
|
1555 | 1555 | already set. |
|
1556 | 1556 | |
|
1557 | 1557 | :raises CommitError: if any error occurs (i.e. |
|
1558 | 1558 | ``NodeDoesNotExistError``). |
|
1559 | 1559 | """ |
|
1560 | 1560 | if not self.parents: |
|
1561 | 1561 | parents = parents or [] |
|
1562 | 1562 | if len(parents) == 0: |
|
1563 | 1563 | try: |
|
1564 | 1564 | parents = [self.repository.get_commit(), None] |
|
1565 | 1565 | except EmptyRepositoryError: |
|
1566 | 1566 | parents = [None, None] |
|
1567 | 1567 | elif len(parents) == 1: |
|
1568 | 1568 | parents += [None] |
|
1569 | 1569 | self.parents = parents |
|
1570 | 1570 | |
|
1571 | 1571 | # Local parents, only if not None |
|
1572 | 1572 | parents = [p for p in self.parents if p] |
|
1573 | 1573 | |
|
1574 | 1574 | # Check nodes marked as added |
|
1575 | 1575 | for p in parents: |
|
1576 | 1576 | for node in self.added: |
|
1577 | 1577 | try: |
|
1578 | 1578 | p.get_node(node.path) |
|
1579 | 1579 | except NodeDoesNotExistError: |
|
1580 | 1580 | pass |
|
1581 | 1581 | else: |
|
1582 | 1582 | raise NodeAlreadyExistsError( |
|
1583 | 1583 | f"Node `{node.path}` already exists at {p}") |
|
1584 | 1584 | |
|
1585 | 1585 | # Check nodes marked as changed |
|
1586 | 1586 | missing = set(self.changed) |
|
1587 | 1587 | not_changed = set(self.changed) |
|
1588 | 1588 | if self.changed and not parents: |
|
1589 | 1589 | raise NodeDoesNotExistError(str(self.changed[0].path)) |
|
1590 | 1590 | for p in parents: |
|
1591 | 1591 | for node in self.changed: |
|
1592 | 1592 | try: |
|
1593 | 1593 | old = p.get_node(node.path) |
|
1594 | 1594 | missing.remove(node) |
|
1595 | 1595 | # if content actually changed, remove node from not_changed |
|
1596 | 1596 | if old.content != node.content: |
|
1597 | 1597 | not_changed.remove(node) |
|
1598 | 1598 | except NodeDoesNotExistError: |
|
1599 | 1599 | pass |
|
1600 | 1600 | if self.changed and missing: |
|
1601 | 1601 | raise NodeDoesNotExistError( |
|
1602 | 1602 | "Node `%s` marked as modified but missing in parents: %s" |
|
1603 | 1603 | % (node.path, parents)) |
|
1604 | 1604 | |
|
1605 | 1605 | if self.changed and not_changed: |
|
1606 | 1606 | raise NodeNotChangedError( |
|
1607 | 1607 | "Node `%s` wasn't actually changed (parents: %s)" |
|
1608 | 1608 | % (not_changed.pop().path, parents)) |
|
1609 | 1609 | |
|
1610 | 1610 | # Check nodes marked as removed |
|
1611 | 1611 | if self.removed and not parents: |
|
1612 | 1612 | raise NodeDoesNotExistError( |
|
1613 | 1613 | "Cannot remove node at %s as there " |
|
1614 | 1614 | "were no parents specified" % self.removed[0].path) |
|
1615 | 1615 | really_removed = set() |
|
1616 | 1616 | for p in parents: |
|
1617 | 1617 | for node in self.removed: |
|
1618 | 1618 | try: |
|
1619 | 1619 | p.get_node(node.path) |
|
1620 | 1620 | really_removed.add(node) |
|
1621 | 1621 | except CommitError: |
|
1622 | 1622 | pass |
|
1623 | 1623 | not_removed = set(self.removed) - really_removed |
|
1624 | 1624 | if not_removed: |
|
1625 | 1625 | # TODO: johbo: This code branch does not seem to be covered |
|
1626 | 1626 | raise NodeDoesNotExistError( |
|
1627 | 1627 | "Cannot remove node at %s from " |
|
1628 | 1628 | "following parents: %s" % (not_removed, parents)) |
|
1629 | 1629 | |
|
1630 | 1630 | def commit(self, message, author, parents=None, branch=None, date=None, **kwargs): |
|
1631 | 1631 | """ |
|
1632 | 1632 | Performs in-memory commit (doesn't check workdir in any way) and |
|
1633 | 1633 | returns newly created :class:`BaseCommit`. Updates repository's |
|
1634 | 1634 | attribute `commits`. |
|
1635 | 1635 | |
|
1636 | 1636 | .. note:: |
|
1637 | 1637 | |
|
1638 | 1638 | While overriding this method each backend's should call |
|
1639 | 1639 | ``self.check_integrity(parents)`` in the first place. |
|
1640 | 1640 | |
|
1641 | 1641 | :param message: message of the commit |
|
1642 | 1642 | :param author: full username, i.e. "Joe Doe <joe.doe@example.com>" |
|
1643 | 1643 | :param parents: single parent or sequence of parents from which commit |
|
1644 | 1644 | would be derived |
|
1645 | 1645 | :param date: ``datetime.datetime`` instance. Defaults to |
|
1646 | 1646 | ``datetime.datetime.now()``. |
|
1647 | 1647 | :param branch: branch name, as string. If none given, default backend's |
|
1648 | 1648 | branch would be used. |
|
1649 | 1649 | |
|
1650 | 1650 | :raises ``CommitError``: if any error occurs while committing |
|
1651 | 1651 | """ |
|
1652 | 1652 | raise NotImplementedError |
|
1653 | 1653 | |
|
1654 | 1654 | |
|
1655 | 1655 | class BaseInMemoryChangesetClass(type): |
|
1656 | 1656 | |
|
1657 | 1657 | def __instancecheck__(self, instance): |
|
1658 | 1658 | return isinstance(instance, BaseInMemoryCommit) |
|
1659 | 1659 | |
|
1660 | 1660 | |
|
1661 | 1661 | class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass): |
|
1662 | 1662 | |
|
1663 | 1663 | def __new__(cls, *args, **kwargs): |
|
1664 | 1664 | warnings.warn( |
|
1665 | 1665 | "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning) |
|
1666 | 1666 | return super().__new__(cls, *args, **kwargs) |
|
1667 | 1667 | |
|
1668 | 1668 | |
|
1669 | 1669 | class EmptyCommit(BaseCommit): |
|
1670 | 1670 | """ |
|
1671 | 1671 | An dummy empty commit. It's possible to pass hash when creating |
|
1672 | 1672 | an EmptyCommit |
|
1673 | 1673 | """ |
|
1674 | 1674 | |
|
1675 | 1675 | def __init__( |
|
1676 | 1676 | self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1, |
|
1677 | 1677 | message='', author='', date=None): |
|
1678 | 1678 | self._empty_commit_id = commit_id |
|
1679 | 1679 | # TODO: johbo: Solve idx parameter, default value does not make |
|
1680 | 1680 | # too much sense |
|
1681 | 1681 | self.idx = idx |
|
1682 | 1682 | self.message = message |
|
1683 | 1683 | self.author = author |
|
1684 | 1684 | self.date = date or datetime.datetime.fromtimestamp(0) |
|
1685 | 1685 | self.repository = repo |
|
1686 | 1686 | self.alias = alias |
|
1687 | 1687 | |
|
1688 | 1688 | @LazyProperty |
|
1689 | 1689 | def raw_id(self): |
|
1690 | 1690 | """ |
|
1691 | 1691 | Returns raw string identifying this commit, useful for web |
|
1692 | 1692 | representation. |
|
1693 | 1693 | """ |
|
1694 | 1694 | |
|
1695 | 1695 | return self._empty_commit_id |
|
1696 | 1696 | |
|
1697 | 1697 | @LazyProperty |
|
1698 | 1698 | def branch(self): |
|
1699 | 1699 | if self.alias: |
|
1700 | 1700 | from rhodecode.lib.vcs.backends import get_backend |
|
1701 | 1701 | return get_backend(self.alias).DEFAULT_BRANCH_NAME |
|
1702 | 1702 | |
|
1703 | 1703 | @LazyProperty |
|
1704 | 1704 | def short_id(self): |
|
1705 | 1705 | return self.raw_id[:12] |
|
1706 | 1706 | |
|
1707 | 1707 | @LazyProperty |
|
1708 | 1708 | def id(self): |
|
1709 | 1709 | return self.raw_id |
|
1710 | 1710 | |
|
1711 | 1711 | def get_path_commit(self, path, pre_load=None): |
|
1712 | 1712 | return self |
|
1713 | 1713 | |
|
1714 | 1714 | def get_file_content(self, path) -> bytes: |
|
1715 | 1715 | return b'' |
|
1716 | 1716 | |
|
1717 | 1717 | def get_file_content_streamed(self, path): |
|
1718 | 1718 | yield self.get_file_content(path) |
|
1719 | 1719 | |
|
1720 | 1720 | def get_file_size(self, path): |
|
1721 | 1721 | return 0 |
|
1722 | 1722 | |
|
1723 | 1723 | |
|
1724 | 1724 | class EmptyChangesetClass(type): |
|
1725 | 1725 | |
|
1726 | 1726 | def __instancecheck__(self, instance): |
|
1727 | 1727 | return isinstance(instance, EmptyCommit) |
|
1728 | 1728 | |
|
1729 | 1729 | |
|
1730 | 1730 | class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass): |
|
1731 | 1731 | |
|
1732 | 1732 | def __new__(cls, *args, **kwargs): |
|
1733 | 1733 | warnings.warn( |
|
1734 | 1734 | "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning) |
|
1735 | 1735 | return super(EmptyCommit, cls).__new__(cls, *args, **kwargs) |
|
1736 | 1736 | |
|
1737 | 1737 | def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None, |
|
1738 | 1738 | alias=None, revision=-1, message='', author='', date=None): |
|
1739 | 1739 | if requested_revision is not None: |
|
1740 | 1740 | warnings.warn( |
|
1741 | 1741 | "Parameter requested_revision not supported anymore", |
|
1742 | 1742 | DeprecationWarning) |
|
1743 | 1743 | super().__init__( |
|
1744 | 1744 | commit_id=cs, repo=repo, alias=alias, idx=revision, |
|
1745 | 1745 | message=message, author=author, date=date) |
|
1746 | 1746 | |
|
1747 | 1747 | @property |
|
1748 | 1748 | def revision(self): |
|
1749 | 1749 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1750 | 1750 | return self.idx |
|
1751 | 1751 | |
|
1752 | 1752 | @revision.setter |
|
1753 | 1753 | def revision(self, value): |
|
1754 | 1754 | warnings.warn("Use idx instead", DeprecationWarning) |
|
1755 | 1755 | self.idx = value |
|
1756 | 1756 | |
|
1757 | 1757 | |
|
1758 | 1758 | class EmptyRepository(BaseRepository): |
|
1759 | 1759 | def __init__(self, repo_path=None, config=None, create=False, **kwargs): |
|
1760 | 1760 | pass |
|
1761 | 1761 | |
|
1762 | 1762 | def get_diff(self, *args, **kwargs): |
|
1763 | 1763 | from rhodecode.lib.vcs.backends.git.diff import GitDiff |
|
1764 | 1764 | return GitDiff(b'') |
|
1765 | 1765 | |
|
1766 | 1766 | |
|
1767 | 1767 | class CollectionGenerator(object): |
|
1768 | 1768 | |
|
1769 | 1769 | def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None): |
|
1770 | 1770 | self.repo = repo |
|
1771 | 1771 | self.commit_ids = commit_ids |
|
1772 | 1772 | self.collection_size = collection_size |
|
1773 | 1773 | self.pre_load = pre_load |
|
1774 | 1774 | self.translate_tag = translate_tag |
|
1775 | 1775 | |
|
1776 | 1776 | def __len__(self): |
|
1777 | 1777 | if self.collection_size is not None: |
|
1778 | 1778 | return self.collection_size |
|
1779 | 1779 | return self.commit_ids.__len__() |
|
1780 | 1780 | |
|
1781 | 1781 | def __iter__(self): |
|
1782 | 1782 | for commit_id in self.commit_ids: |
|
1783 | 1783 | # TODO: johbo: Mercurial passes in commit indices or commit ids |
|
1784 | 1784 | yield self._commit_factory(commit_id) |
|
1785 | 1785 | |
|
1786 | 1786 | def _commit_factory(self, commit_id): |
|
1787 | 1787 | """ |
|
1788 | 1788 | Allows backends to override the way commits are generated. |
|
1789 | 1789 | """ |
|
1790 | 1790 | return self.repo.get_commit( |
|
1791 | 1791 | commit_id=commit_id, pre_load=self.pre_load, |
|
1792 | 1792 | translate_tag=self.translate_tag) |
|
1793 | 1793 | |
|
1794 | 1794 | def __getitem__(self, key): |
|
1795 | 1795 | """Return either a single element by index, or a sliced collection.""" |
|
1796 | 1796 | |
|
1797 | 1797 | if isinstance(key, slice): |
|
1798 | 1798 | commit_ids = self.commit_ids[key.start:key.stop] |
|
1799 | 1799 | |
|
1800 | 1800 | else: |
|
1801 | 1801 | # single item |
|
1802 | 1802 | commit_ids = self.commit_ids[key] |
|
1803 | 1803 | |
|
1804 | 1804 | return self.__class__( |
|
1805 | 1805 | self.repo, commit_ids, pre_load=self.pre_load, |
|
1806 | 1806 | translate_tag=self.translate_tag) |
|
1807 | 1807 | |
|
1808 | 1808 | def __repr__(self): |
|
1809 | 1809 | return '<CollectionGenerator[len:%s]>' % (self.__len__()) |
|
1810 | 1810 | |
|
1811 | 1811 | |
|
1812 | 1812 | class Config(object): |
|
1813 | 1813 | """ |
|
1814 | 1814 | Represents the configuration for a repository. |
|
1815 | 1815 | |
|
1816 | 1816 | The API is inspired by :class:`ConfigParser.ConfigParser` from the |
|
1817 | 1817 | standard library. It implements only the needed subset. |
|
1818 | 1818 | """ |
|
1819 | 1819 | |
|
1820 | 1820 | def __init__(self): |
|
1821 | 1821 | self._values = {} |
|
1822 | 1822 | |
|
1823 | 1823 | def copy(self): |
|
1824 | 1824 | clone = Config() |
|
1825 | 1825 | for section, values in self._values.items(): |
|
1826 | 1826 | clone._values[section] = values.copy() |
|
1827 | 1827 | return clone |
|
1828 | 1828 | |
|
1829 | 1829 | def __repr__(self): |
|
1830 | 1830 | return '<Config({} sections) at {}>'.format( |
|
1831 | 1831 | len(self._values), hex(id(self))) |
|
1832 | 1832 | |
|
1833 | 1833 | def items(self, section): |
|
1834 | 1834 | return self._values.get(section, {}).items() |
|
1835 | 1835 | |
|
1836 | 1836 | def get(self, section, option): |
|
1837 | 1837 | return self._values.get(section, {}).get(option) |
|
1838 | 1838 | |
|
1839 | 1839 | def set(self, section, option, value): |
|
1840 | 1840 | section_values = self._values.setdefault(section, {}) |
|
1841 | 1841 | section_values[option] = value |
|
1842 | 1842 | |
|
1843 | 1843 | def clear_section(self, section): |
|
1844 | 1844 | self._values[section] = {} |
|
1845 | 1845 | |
|
1846 | 1846 | def serialize(self): |
|
1847 | 1847 | """ |
|
1848 | 1848 | Creates a list of three tuples (section, key, value) representing |
|
1849 | 1849 | this config object. |
|
1850 | 1850 | """ |
|
1851 | 1851 | items = [] |
|
1852 | 1852 | for section in self._values: |
|
1853 | 1853 | for option, value in self._values[section].items(): |
|
1854 | 1854 | items.append( |
|
1855 | 1855 | (safe_str(section), safe_str(option), safe_str(value))) |
|
1856 | 1856 | return items |
|
1857 | 1857 | |
|
1858 | 1858 | |
|
1859 | 1859 | class Diff(object): |
|
1860 | 1860 | """ |
|
1861 | 1861 | Represents a diff result from a repository backend. |
|
1862 | 1862 | |
|
1863 | 1863 | Subclasses have to provide a backend specific value for |
|
1864 | 1864 | :attr:`_header_re` and :attr:`_meta_re`. |
|
1865 | 1865 | """ |
|
1866 | 1866 | _meta_re = None |
|
1867 | 1867 | _header_re: bytes = re.compile(br"") |
|
1868 | 1868 | |
|
1869 | 1869 | def __init__(self, raw_diff: bytes): |
|
1870 | 1870 | if not isinstance(raw_diff, bytes): |
|
1871 | 1871 | raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}') |
|
1872 | 1872 | |
|
1873 | 1873 | self.raw = memoryview(raw_diff) |
|
1874 | 1874 | |
|
1875 | 1875 | def get_header_re(self): |
|
1876 | 1876 | return self._header_re |
|
1877 | 1877 | |
|
1878 | 1878 | def chunks(self): |
|
1879 | 1879 | """ |
|
1880 | 1880 | split the diff in chunks of separate --git a/file b/file chunks |
|
1881 | 1881 | to make diffs consistent we must prepend with \n, and make sure |
|
1882 | 1882 | we can detect last chunk as this was also has special rule |
|
1883 | 1883 | """ |
|
1884 | 1884 | |
|
1885 | 1885 | diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git') |
|
1886 | 1886 | |
|
1887 | 1887 | chunks = diff_parts[1:] |
|
1888 | 1888 | total_chunks = len(chunks) |
|
1889 | 1889 | |
|
1890 | 1890 | def diff_iter(_chunks): |
|
1891 | 1891 | for cur_chunk, chunk in enumerate(_chunks, start=1): |
|
1892 | 1892 | yield DiffChunk(chunk, self, cur_chunk == total_chunks) |
|
1893 | 1893 | return diff_iter(chunks) |
|
1894 | 1894 | |
|
1895 | 1895 | |
|
1896 | 1896 | class DiffChunk(object): |
|
1897 | 1897 | |
|
1898 | 1898 | def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool): |
|
1899 | 1899 | self.diff_obj = diff_obj |
|
1900 | 1900 | |
|
1901 | 1901 | # since we split by \ndiff --git that part is lost from original diff |
|
1902 | 1902 | # we need to re-apply it at the end, EXCEPT ! if it's last chunk |
|
1903 | 1903 | if not is_last_chunk: |
|
1904 | 1904 | chunk += b'\n' |
|
1905 | 1905 | header_re = self.diff_obj.get_header_re() |
|
1906 | 1906 | match = header_re.match(chunk) |
|
1907 | 1907 | self.header = match.groupdict() |
|
1908 | 1908 | self.diff = chunk[match.end():] |
|
1909 | 1909 | self.raw = chunk |
|
1910 | 1910 | |
|
1911 | 1911 | @property |
|
1912 | 1912 | def header_as_str(self): |
|
1913 | 1913 | if self.header: |
|
1914 | 1914 | def safe_str_on_bytes(val): |
|
1915 | 1915 | if isinstance(val, bytes): |
|
1916 | 1916 | return safe_str(val) |
|
1917 | 1917 | return val |
|
1918 | 1918 | return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()} |
|
1919 | 1919 | |
|
1920 | 1920 | def __repr__(self): |
|
1921 | 1921 | return f'DiffChunk({self.header_as_str})' |
|
1922 | 1922 | |
|
1923 | 1923 | |
|
1924 | 1924 | class BasePathPermissionChecker(object): |
|
1925 | 1925 | |
|
1926 | 1926 | @staticmethod |
|
1927 | 1927 | def create_from_patterns(includes, excludes): |
|
1928 | 1928 | if includes and '*' in includes and not excludes: |
|
1929 | 1929 | return AllPathPermissionChecker() |
|
1930 | 1930 | elif excludes and '*' in excludes: |
|
1931 | 1931 | return NonePathPermissionChecker() |
|
1932 | 1932 | else: |
|
1933 | 1933 | return PatternPathPermissionChecker(includes, excludes) |
|
1934 | 1934 | |
|
1935 | 1935 | @property |
|
1936 | 1936 | def has_full_access(self): |
|
1937 | 1937 | raise NotImplementedError() |
|
1938 | 1938 | |
|
1939 | 1939 | def has_access(self, path): |
|
1940 | 1940 | raise NotImplementedError() |
|
1941 | 1941 | |
|
1942 | 1942 | |
|
1943 | 1943 | class AllPathPermissionChecker(BasePathPermissionChecker): |
|
1944 | 1944 | |
|
1945 | 1945 | @property |
|
1946 | 1946 | def has_full_access(self): |
|
1947 | 1947 | return True |
|
1948 | 1948 | |
|
1949 | 1949 | def has_access(self, path): |
|
1950 | 1950 | return True |
|
1951 | 1951 | |
|
1952 | 1952 | |
|
1953 | 1953 | class NonePathPermissionChecker(BasePathPermissionChecker): |
|
1954 | 1954 | |
|
1955 | 1955 | @property |
|
1956 | 1956 | def has_full_access(self): |
|
1957 | 1957 | return False |
|
1958 | 1958 | |
|
1959 | 1959 | def has_access(self, path): |
|
1960 | 1960 | return False |
|
1961 | 1961 | |
|
1962 | 1962 | |
|
1963 | 1963 | class PatternPathPermissionChecker(BasePathPermissionChecker): |
|
1964 | 1964 | |
|
1965 | 1965 | def __init__(self, includes, excludes): |
|
1966 | 1966 | self.includes = includes |
|
1967 | 1967 | self.excludes = excludes |
|
1968 | 1968 | self.includes_re = [] if not includes else [ |
|
1969 | 1969 | re.compile(fnmatch.translate(pattern)) for pattern in includes] |
|
1970 | 1970 | self.excludes_re = [] if not excludes else [ |
|
1971 | 1971 | re.compile(fnmatch.translate(pattern)) for pattern in excludes] |
|
1972 | 1972 | |
|
1973 | 1973 | @property |
|
1974 | 1974 | def has_full_access(self): |
|
1975 | 1975 | return '*' in self.includes and not self.excludes |
|
1976 | 1976 | |
|
1977 | 1977 | def has_access(self, path): |
|
1978 | 1978 | for regex in self.excludes_re: |
|
1979 | 1979 | if regex.match(path): |
|
1980 | 1980 | return False |
|
1981 | 1981 | for regex in self.includes_re: |
|
1982 | 1982 | if regex.match(path): |
|
1983 | 1983 | return True |
|
1984 | 1984 | return False |
@@ -1,1053 +1,1054 b'' | |||
|
1 | 1 | # Copyright (C) 2014-2023 RhodeCode GmbH |
|
2 | 2 | # |
|
3 | 3 | # This program is free software: you can redistribute it and/or modify |
|
4 | 4 | # it under the terms of the GNU Affero General Public License, version 3 |
|
5 | 5 | # (only), as published by the Free Software Foundation. |
|
6 | 6 | # |
|
7 | 7 | # This program is distributed in the hope that it will be useful, |
|
8 | 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
9 | 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
10 | 10 | # GNU General Public License for more details. |
|
11 | 11 | # |
|
12 | 12 | # You should have received a copy of the GNU Affero General Public License |
|
13 | 13 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
14 | 14 | # |
|
15 | 15 | # This program is dual-licensed. If you wish to learn more about the |
|
16 | 16 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
17 | 17 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
18 | 18 | |
|
19 | 19 | """ |
|
20 | 20 | GIT repository module |
|
21 | 21 | """ |
|
22 | 22 | |
|
23 | 23 | import logging |
|
24 | 24 | import os |
|
25 | 25 | import re |
|
26 | 26 | |
|
27 | 27 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
28 | 28 | |
|
29 | 29 | from collections import OrderedDict |
|
30 | 30 | from rhodecode.lib.datelib import ( |
|
31 | 31 | utcdate_fromtimestamp, makedate, date_astimestamp) |
|
32 | 32 | from rhodecode.lib.hash_utils import safe_str |
|
33 | 33 | from rhodecode.lib.utils2 import CachedProperty |
|
34 | 34 | from rhodecode.lib.vcs import connection, path as vcspath |
|
35 | 35 | from rhodecode.lib.vcs.backends.base import ( |
|
36 | 36 | BaseRepository, CollectionGenerator, Config, MergeResponse, |
|
37 | 37 | MergeFailureReason, Reference) |
|
38 | 38 | from rhodecode.lib.vcs.backends.git.commit import GitCommit |
|
39 | 39 | from rhodecode.lib.vcs.backends.git.diff import GitDiff |
|
40 | 40 | from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit |
|
41 | 41 | from rhodecode.lib.vcs.exceptions import ( |
|
42 | 42 | CommitDoesNotExistError, EmptyRepositoryError, |
|
43 | 43 | RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo) |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$') |
|
47 | 47 | |
|
48 | 48 | log = logging.getLogger(__name__) |
|
49 | 49 | |
|
50 | 50 | |
|
51 | 51 | class GitRepository(BaseRepository): |
|
52 | 52 | """ |
|
53 | 53 | Git repository backend. |
|
54 | 54 | """ |
|
55 | 55 | DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master' |
|
56 | 56 | DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}' |
|
57 | 57 | |
|
58 | 58 | contact = BaseRepository.DEFAULT_CONTACT |
|
59 | 59 | |
|
60 | 60 | def __init__(self, repo_path, config=None, create=False, src_url=None, |
|
61 | 61 | do_workspace_checkout=False, with_wire=None, bare=False): |
|
62 | 62 | |
|
63 | 63 | self.path = safe_str(os.path.abspath(repo_path)) |
|
64 | 64 | self.config = config if config else self.get_default_config() |
|
65 | 65 | self.with_wire = with_wire or {"cache": False} # default should not use cache |
|
66 | 66 | |
|
67 | 67 | self._init_repo(create, src_url, do_workspace_checkout, bare) |
|
68 | 68 | |
|
69 | 69 | # caches |
|
70 | 70 | self._commit_ids = {} |
|
71 | 71 | |
|
72 | 72 | @LazyProperty |
|
73 | 73 | def _remote(self): |
|
74 | 74 | repo_id = self.path |
|
75 | 75 | return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire) |
|
76 | 76 | |
|
77 | 77 | @LazyProperty |
|
78 | 78 | def bare(self): |
|
79 | 79 | return self._remote.bare() |
|
80 | 80 | |
|
81 | 81 | @LazyProperty |
|
82 | 82 | def head(self): |
|
83 | 83 | return self._remote.head() |
|
84 | 84 | |
|
85 | 85 | @CachedProperty |
|
86 | 86 | def commit_ids(self): |
|
87 | 87 | """ |
|
88 | 88 | Returns list of commit ids, in ascending order. Being lazy |
|
89 | 89 | attribute allows external tools to inject commit ids from cache. |
|
90 | 90 | """ |
|
91 | 91 | commit_ids = self._get_all_commit_ids() |
|
92 | 92 | self._rebuild_cache(commit_ids) |
|
93 | 93 | return commit_ids |
|
94 | 94 | |
|
95 | 95 | def _rebuild_cache(self, commit_ids): |
|
96 | 96 | self._commit_ids = {commit_id: index |
|
97 | 97 | for index, commit_id in enumerate(commit_ids)} |
|
98 | 98 | |
|
99 | 99 | def run_git_command(self, cmd, **opts): |
|
100 | 100 | """ |
|
101 | 101 | Runs given ``cmd`` as git command and returns tuple |
|
102 | 102 | (stdout, stderr). |
|
103 | 103 | |
|
104 | 104 | :param cmd: git command to be executed |
|
105 | 105 | :param opts: env options to pass into Subprocess command |
|
106 | 106 | """ |
|
107 | 107 | if not isinstance(cmd, list): |
|
108 | 108 | raise ValueError(f'cmd must be a list, got {type(cmd)} instead') |
|
109 | 109 | |
|
110 | 110 | skip_stderr_log = opts.pop('skip_stderr_log', False) |
|
111 | 111 | out, err = self._remote.run_git_command(cmd, **opts) |
|
112 | 112 | if err and not skip_stderr_log: |
|
113 | 113 | log.debug('Stderr output of git command "%s":\n%s', cmd, err) |
|
114 | 114 | return out, err |
|
115 | 115 | |
|
116 | 116 | @staticmethod |
|
117 | 117 | def check_url(url, config): |
|
118 | 118 | """ |
|
119 | 119 | Function will check given url and try to verify if it's a valid |
|
120 | 120 | link. Sometimes it may happened that git will issue basic |
|
121 | 121 | auth request that can cause whole API to hang when used from python |
|
122 | 122 | or other external calls. |
|
123 | 123 | |
|
124 | 124 | On failures it'll raise urllib2.HTTPError, exception is also thrown |
|
125 | 125 | when the return code is non 200 |
|
126 | 126 | """ |
|
127 | 127 | # check first if it's not an url |
|
128 | 128 | if os.path.isdir(url) or url.startswith('file:'): |
|
129 | 129 | return True |
|
130 | 130 | |
|
131 | 131 | if '+' in url.split('://', 1)[0]: |
|
132 | 132 | url = url.split('+', 1)[1] |
|
133 | 133 | |
|
134 | 134 | # Request the _remote to verify the url |
|
135 | 135 | return connection.Git.check_url(url, config.serialize()) |
|
136 | 136 | |
|
137 | 137 | @staticmethod |
|
138 | 138 | def is_valid_repository(path): |
|
139 | 139 | if os.path.isdir(os.path.join(path, '.git')): |
|
140 | 140 | return True |
|
141 | 141 | # check case of bare repository |
|
142 | 142 | try: |
|
143 | 143 | GitRepository(path) |
|
144 | 144 | return True |
|
145 | 145 | except VCSError: |
|
146 | 146 | pass |
|
147 | 147 | return False |
|
148 | 148 | |
|
149 | 149 | def _init_repo(self, create, src_url=None, do_workspace_checkout=False, |
|
150 | 150 | bare=False): |
|
151 | 151 | if create and os.path.exists(self.path): |
|
152 | 152 | raise RepositoryError( |
|
153 | 153 | "Cannot create repository at %s, location already exist" |
|
154 | 154 | % self.path) |
|
155 | 155 | |
|
156 | 156 | if bare and do_workspace_checkout: |
|
157 | 157 | raise RepositoryError("Cannot update a bare repository") |
|
158 | 158 | try: |
|
159 | 159 | |
|
160 | 160 | if src_url: |
|
161 | 161 | # check URL before any actions |
|
162 | 162 | GitRepository.check_url(src_url, self.config) |
|
163 | 163 | |
|
164 | 164 | if create: |
|
165 | 165 | os.makedirs(self.path, mode=0o755) |
|
166 | 166 | |
|
167 | 167 | if bare: |
|
168 | 168 | self._remote.init_bare() |
|
169 | 169 | else: |
|
170 | 170 | self._remote.init() |
|
171 | 171 | |
|
172 | 172 | if src_url and bare: |
|
173 | 173 | # bare repository only allows a fetch and checkout is not allowed |
|
174 | 174 | self.fetch(src_url, commit_ids=None) |
|
175 | 175 | elif src_url: |
|
176 | 176 | self.pull(src_url, commit_ids=None, |
|
177 | 177 | update_after=do_workspace_checkout) |
|
178 | 178 | |
|
179 | 179 | else: |
|
180 | 180 | if not self._remote.assert_correct_path(): |
|
181 | 181 | raise RepositoryError( |
|
182 | 182 | 'Path "%s" does not contain a Git repository' % |
|
183 | 183 | (self.path,)) |
|
184 | 184 | |
|
185 | 185 | # TODO: johbo: check if we have to translate the OSError here |
|
186 | 186 | except OSError as err: |
|
187 | 187 | raise RepositoryError(err) |
|
188 | 188 | |
|
189 | 189 | def _get_all_commit_ids(self): |
|
190 | 190 | return self._remote.get_all_commit_ids() |
|
191 | 191 | |
|
192 | 192 | def _get_commit_ids(self, filters=None): |
|
193 | 193 | # we must check if this repo is not empty, since later command |
|
194 | 194 | # fails if it is. And it's cheaper to ask than throw the subprocess |
|
195 | 195 | # errors |
|
196 | 196 | |
|
197 | 197 | head = self._remote.head(show_exc=False) |
|
198 | 198 | |
|
199 | 199 | if not head: |
|
200 | 200 | return [] |
|
201 | 201 | |
|
202 | 202 | rev_filter = ['--branches', '--tags'] |
|
203 | 203 | extra_filter = [] |
|
204 | 204 | |
|
205 | 205 | if filters: |
|
206 | 206 | if filters.get('since'): |
|
207 | 207 | extra_filter.append('--since=%s' % (filters['since'])) |
|
208 | 208 | if filters.get('until'): |
|
209 | 209 | extra_filter.append('--until=%s' % (filters['until'])) |
|
210 | 210 | if filters.get('branch_name'): |
|
211 | 211 | rev_filter = [] |
|
212 | 212 | extra_filter.append(filters['branch_name']) |
|
213 | 213 | rev_filter.extend(extra_filter) |
|
214 | 214 | |
|
215 | 215 | # if filters.get('start') or filters.get('end'): |
|
216 | 216 | # # skip is offset, max-count is limit |
|
217 | 217 | # if filters.get('start'): |
|
218 | 218 | # extra_filter += ' --skip=%s' % filters['start'] |
|
219 | 219 | # if filters.get('end'): |
|
220 | 220 | # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0)) |
|
221 | 221 | |
|
222 | 222 | cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter |
|
223 | 223 | try: |
|
224 | 224 | output, __ = self.run_git_command(cmd) |
|
225 | 225 | except RepositoryError: |
|
226 | 226 | # Can be raised for empty repositories |
|
227 | 227 | return [] |
|
228 | 228 | return output.splitlines() |
|
229 | 229 | |
|
230 | 230 | def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None): |
|
231 | 231 | |
|
232 | 232 | def is_null(value): |
|
233 | 233 | return len(value) == commit_id_or_idx.count('0') |
|
234 | 234 | |
|
235 | 235 | if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1): |
|
236 | 236 | return self.commit_ids[-1] |
|
237 | 237 | |
|
238 | 238 | commit_missing_err = "Commit {} does not exist for `{}`".format( |
|
239 | 239 | *map(safe_str, [commit_id_or_idx, self.name])) |
|
240 | 240 | |
|
241 | 241 | is_bstr = isinstance(commit_id_or_idx, str) |
|
242 | 242 | is_branch = reference_obj and reference_obj.branch |
|
243 | 243 | |
|
244 | 244 | lookup_ok = False |
|
245 | 245 | if is_bstr: |
|
246 | 246 | # Need to call remote to translate id for tagging scenarios, |
|
247 | 247 | # or branch that are numeric |
|
248 | 248 | try: |
|
249 | 249 | remote_data = self._remote.get_object(commit_id_or_idx, |
|
250 | 250 | maybe_unreachable=maybe_unreachable) |
|
251 | 251 | commit_id_or_idx = remote_data["commit_id"] |
|
252 | 252 | lookup_ok = True |
|
253 | 253 | except (CommitDoesNotExistError,): |
|
254 | 254 | lookup_ok = False |
|
255 | 255 | |
|
256 | 256 | if lookup_ok is False: |
|
257 | 257 | is_numeric_idx = \ |
|
258 | 258 | (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \ |
|
259 | 259 | or isinstance(commit_id_or_idx, int) |
|
260 | 260 | if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)): |
|
261 | 261 | try: |
|
262 | 262 | commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)] |
|
263 | 263 | lookup_ok = True |
|
264 | 264 | except Exception: |
|
265 | 265 | raise CommitDoesNotExistError(commit_missing_err) |
|
266 | 266 | |
|
267 | 267 | # we failed regular lookup, and by integer number lookup |
|
268 | 268 | if lookup_ok is False: |
|
269 | 269 | raise CommitDoesNotExistError(commit_missing_err) |
|
270 | 270 | |
|
271 | 271 | # Ensure we return full id |
|
272 | 272 | if not SHA_PATTERN.match(str(commit_id_or_idx)): |
|
273 | 273 | raise CommitDoesNotExistError( |
|
274 | 274 | "Given commit id %s not recognized" % commit_id_or_idx) |
|
275 | 275 | return commit_id_or_idx |
|
276 | 276 | |
|
277 | 277 | def get_hook_location(self): |
|
278 | 278 | """ |
|
279 | 279 | returns absolute path to location where hooks are stored |
|
280 | 280 | """ |
|
281 | 281 | loc = os.path.join(self.path, 'hooks') |
|
282 | 282 | if not self.bare: |
|
283 | 283 | loc = os.path.join(self.path, '.git', 'hooks') |
|
284 | 284 | return loc |
|
285 | 285 | |
|
286 | 286 | @LazyProperty |
|
287 | 287 | def last_change(self): |
|
288 | 288 | """ |
|
289 | 289 | Returns last change made on this repository as |
|
290 | 290 | `datetime.datetime` object. |
|
291 | 291 | """ |
|
292 | 292 | try: |
|
293 | 293 | return self.get_commit().date |
|
294 | 294 | except RepositoryError: |
|
295 | 295 | tzoffset = makedate()[1] |
|
296 | 296 | return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset) |
|
297 | 297 | |
|
298 | 298 | def _get_fs_mtime(self): |
|
299 | 299 | idx_loc = '' if self.bare else '.git' |
|
300 | 300 | # fallback to filesystem |
|
301 | 301 | in_path = os.path.join(self.path, idx_loc, "index") |
|
302 | 302 | he_path = os.path.join(self.path, idx_loc, "HEAD") |
|
303 | 303 | if os.path.exists(in_path): |
|
304 | 304 | return os.stat(in_path).st_mtime |
|
305 | 305 | else: |
|
306 | 306 | return os.stat(he_path).st_mtime |
|
307 | 307 | |
|
308 | 308 | @LazyProperty |
|
309 | 309 | def description(self): |
|
310 | 310 | description = self._remote.get_description() |
|
311 | 311 | return safe_str(description or self.DEFAULT_DESCRIPTION) |
|
312 | 312 | |
|
313 | 313 | def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True): |
|
314 | 314 | if self.is_empty(): |
|
315 | 315 | return OrderedDict() |
|
316 | 316 | |
|
317 | 317 | result = [] |
|
318 | 318 | for ref, sha in self._refs.items(): |
|
319 | 319 | if ref.startswith(prefix): |
|
320 | 320 | ref_name = ref |
|
321 | 321 | if strip_prefix: |
|
322 | 322 | ref_name = ref[len(prefix):] |
|
323 | 323 | result.append((safe_str(ref_name), sha)) |
|
324 | 324 | |
|
325 | 325 | def get_name(entry): |
|
326 | 326 | return entry[0] |
|
327 | 327 | |
|
328 | 328 | return OrderedDict(sorted(result, key=get_name, reverse=reverse)) |
|
329 | 329 | |
|
330 | 330 | def _get_branches(self): |
|
331 | 331 | return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True) |
|
332 | 332 | |
|
333 | 333 | @CachedProperty |
|
334 | 334 | def branches(self): |
|
335 | 335 | return self._get_branches() |
|
336 | 336 | |
|
337 | 337 | @CachedProperty |
|
338 | 338 | def branches_closed(self): |
|
339 | 339 | return {} |
|
340 | 340 | |
|
341 | 341 | @CachedProperty |
|
342 | 342 | def bookmarks(self): |
|
343 | 343 | return {} |
|
344 | 344 | |
|
345 | 345 | @CachedProperty |
|
346 | 346 | def branches_all(self): |
|
347 | 347 | all_branches = {} |
|
348 | 348 | all_branches.update(self.branches) |
|
349 | 349 | all_branches.update(self.branches_closed) |
|
350 | 350 | return all_branches |
|
351 | 351 | |
|
352 | 352 | @CachedProperty |
|
353 | 353 | def tags(self): |
|
354 | 354 | return self._get_tags() |
|
355 | 355 | |
|
356 | 356 | def _get_tags(self): |
|
357 | 357 | return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True) |
|
358 | 358 | |
|
359 | 359 | def tag(self, name, user, commit_id=None, message=None, date=None, |
|
360 | 360 | **kwargs): |
|
361 | 361 | # TODO: fix this method to apply annotated tags correct with message |
|
362 | 362 | """ |
|
363 | 363 | Creates and returns a tag for the given ``commit_id``. |
|
364 | 364 | |
|
365 | 365 | :param name: name for new tag |
|
366 | 366 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
367 | 367 | :param commit_id: commit id for which new tag would be created |
|
368 | 368 | :param message: message of the tag's commit |
|
369 | 369 | :param date: date of tag's commit |
|
370 | 370 | |
|
371 | 371 | :raises TagAlreadyExistError: if tag with same name already exists |
|
372 | 372 | """ |
|
373 | 373 | if name in self.tags: |
|
374 | 374 | raise TagAlreadyExistError("Tag %s already exists" % name) |
|
375 | 375 | commit = self.get_commit(commit_id=commit_id) |
|
376 | 376 | message = message or f"Added tag {name} for commit {commit.raw_id}" |
|
377 | 377 | |
|
378 | 378 | self._remote.set_refs('refs/tags/%s' % name, commit.raw_id) |
|
379 | 379 | |
|
380 | 380 | self._invalidate_prop_cache('tags') |
|
381 | 381 | self._invalidate_prop_cache('_refs') |
|
382 | 382 | |
|
383 | 383 | return commit |
|
384 | 384 | |
|
385 | 385 | def remove_tag(self, name, user, message=None, date=None): |
|
386 | 386 | """ |
|
387 | 387 | Removes tag with the given ``name``. |
|
388 | 388 | |
|
389 | 389 | :param name: name of the tag to be removed |
|
390 | 390 | :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>" |
|
391 | 391 | :param message: message of the tag's removal commit |
|
392 | 392 | :param date: date of tag's removal commit |
|
393 | 393 | |
|
394 | 394 | :raises TagDoesNotExistError: if tag with given name does not exists |
|
395 | 395 | """ |
|
396 | 396 | if name not in self.tags: |
|
397 | 397 | raise TagDoesNotExistError("Tag %s does not exist" % name) |
|
398 | 398 | |
|
399 | 399 | self._remote.tag_remove(name) |
|
400 | 400 | self._invalidate_prop_cache('tags') |
|
401 | 401 | self._invalidate_prop_cache('_refs') |
|
402 | 402 | |
|
403 | 403 | def _get_refs(self): |
|
404 | 404 | return self._remote.get_refs() |
|
405 | 405 | |
|
406 | 406 | @CachedProperty |
|
407 | 407 | def _refs(self): |
|
408 | 408 | return self._get_refs() |
|
409 | 409 | |
|
410 | 410 | @property |
|
411 | 411 | def _ref_tree(self): |
|
412 | 412 | node = tree = {} |
|
413 | 413 | for ref, sha in self._refs.items(): |
|
414 | 414 | path = ref.split('/') |
|
415 | 415 | for bit in path[:-1]: |
|
416 | 416 | node = node.setdefault(bit, {}) |
|
417 | 417 | node[path[-1]] = sha |
|
418 | 418 | node = tree |
|
419 | 419 | return tree |
|
420 | 420 | |
|
421 | 421 | def get_remote_ref(self, ref_name): |
|
422 | 422 | ref_key = f'refs/remotes/origin/{safe_str(ref_name)}' |
|
423 | 423 | try: |
|
424 | 424 | return self._refs[ref_key] |
|
425 | 425 | except Exception: |
|
426 | 426 | return |
|
427 | 427 | |
|
428 | 428 | def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, |
|
429 | 429 | translate_tag=True, maybe_unreachable=False, reference_obj=None): |
|
430 | 430 | """ |
|
431 | 431 | Returns `GitCommit` object representing commit from git repository |
|
432 | 432 | at the given `commit_id` or head (most recent commit) if None given. |
|
433 | 433 | """ |
|
434 | 434 | |
|
435 | 435 | if self.is_empty(): |
|
436 | 436 | raise EmptyRepositoryError("There are no commits yet") |
|
437 | 437 | |
|
438 | 438 | if commit_id is not None: |
|
439 | 439 | self._validate_commit_id(commit_id) |
|
440 | 440 | try: |
|
441 | 441 | # we have cached idx, use it without contacting the remote |
|
442 | 442 | idx = self._commit_ids[commit_id] |
|
443 | 443 | return GitCommit(self, commit_id, idx, pre_load=pre_load) |
|
444 | 444 | except KeyError: |
|
445 | 445 | pass |
|
446 | 446 | |
|
447 | 447 | elif commit_idx is not None: |
|
448 | 448 | self._validate_commit_idx(commit_idx) |
|
449 | 449 | try: |
|
450 | 450 | _commit_id = self.commit_ids[commit_idx] |
|
451 | 451 | if commit_idx < 0: |
|
452 | 452 | commit_idx = self.commit_ids.index(_commit_id) |
|
453 | 453 | return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load) |
|
454 | 454 | except IndexError: |
|
455 | 455 | commit_id = commit_idx |
|
456 | 456 | else: |
|
457 | 457 | commit_id = "tip" |
|
458 | 458 | |
|
459 | 459 | if translate_tag: |
|
460 | 460 | commit_id = self._lookup_commit( |
|
461 | 461 | commit_id, maybe_unreachable=maybe_unreachable, |
|
462 | 462 | reference_obj=reference_obj) |
|
463 | 463 | |
|
464 | 464 | try: |
|
465 | 465 | idx = self._commit_ids[commit_id] |
|
466 | 466 | except KeyError: |
|
467 | 467 | idx = -1 |
|
468 | 468 | |
|
469 | 469 | return GitCommit(self, commit_id, idx, pre_load=pre_load) |
|
470 | 470 | |
|
471 | 471 | def get_commits( |
|
472 | 472 | self, start_id=None, end_id=None, start_date=None, end_date=None, |
|
473 | 473 | branch_name=None, show_hidden=False, pre_load=None, translate_tags=True): |
|
474 | 474 | """ |
|
475 | 475 | Returns generator of `GitCommit` objects from start to end (both |
|
476 | 476 | are inclusive), in ascending date order. |
|
477 | 477 | |
|
478 | 478 | :param start_id: None, str(commit_id) |
|
479 | 479 | :param end_id: None, str(commit_id) |
|
480 | 480 | :param start_date: if specified, commits with commit date less than |
|
481 | 481 | ``start_date`` would be filtered out from returned set |
|
482 | 482 | :param end_date: if specified, commits with commit date greater than |
|
483 | 483 | ``end_date`` would be filtered out from returned set |
|
484 | 484 | :param branch_name: if specified, commits not reachable from given |
|
485 | 485 | branch would be filtered out from returned set |
|
486 | 486 | :param show_hidden: Show hidden commits such as obsolete or hidden from |
|
487 | 487 | Mercurial evolve |
|
488 | 488 | :raise BranchDoesNotExistError: If given `branch_name` does not |
|
489 | 489 | exist. |
|
490 | 490 | :raise CommitDoesNotExistError: If commits for given `start` or |
|
491 | 491 | `end` could not be found. |
|
492 | 492 | |
|
493 | 493 | """ |
|
494 | 494 | if self.is_empty(): |
|
495 | 495 | raise EmptyRepositoryError("There are no commits yet") |
|
496 | 496 | |
|
497 | 497 | self._validate_branch_name(branch_name) |
|
498 | 498 | |
|
499 | 499 | if start_id is not None: |
|
500 | 500 | self._validate_commit_id(start_id) |
|
501 | 501 | if end_id is not None: |
|
502 | 502 | self._validate_commit_id(end_id) |
|
503 | 503 | |
|
504 | 504 | start_raw_id = self._lookup_commit(start_id) |
|
505 | 505 | start_pos = self._commit_ids[start_raw_id] if start_id else None |
|
506 | 506 | end_raw_id = self._lookup_commit(end_id) |
|
507 | 507 | end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None |
|
508 | 508 | |
|
509 | 509 | if None not in [start_id, end_id] and start_pos > end_pos: |
|
510 | 510 | raise RepositoryError( |
|
511 | 511 | "Start commit '%s' cannot be after end commit '%s'" % |
|
512 | 512 | (start_id, end_id)) |
|
513 | 513 | |
|
514 | 514 | if end_pos is not None: |
|
515 | 515 | end_pos += 1 |
|
516 | 516 | |
|
517 | 517 | filter_ = [] |
|
518 | 518 | if branch_name: |
|
519 | 519 | filter_.append({'branch_name': branch_name}) |
|
520 | 520 | if start_date and not end_date: |
|
521 | 521 | filter_.append({'since': start_date}) |
|
522 | 522 | if end_date and not start_date: |
|
523 | 523 | filter_.append({'until': end_date}) |
|
524 | 524 | if start_date and end_date: |
|
525 | 525 | filter_.append({'since': start_date}) |
|
526 | 526 | filter_.append({'until': end_date}) |
|
527 | 527 | |
|
528 | 528 | # if start_pos or end_pos: |
|
529 | 529 | # filter_.append({'start': start_pos}) |
|
530 | 530 | # filter_.append({'end': end_pos}) |
|
531 | 531 | |
|
532 | 532 | if filter_: |
|
533 | 533 | revfilters = { |
|
534 | 534 | 'branch_name': branch_name, |
|
535 | 535 | 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None, |
|
536 | 536 | 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None, |
|
537 | 537 | 'start': start_pos, |
|
538 | 538 | 'end': end_pos, |
|
539 | 539 | } |
|
540 | 540 | commit_ids = self._get_commit_ids(filters=revfilters) |
|
541 | 541 | |
|
542 | 542 | else: |
|
543 | 543 | commit_ids = self.commit_ids |
|
544 | 544 | |
|
545 | 545 | if start_pos or end_pos: |
|
546 | 546 | commit_ids = commit_ids[start_pos: end_pos] |
|
547 | 547 | |
|
548 | 548 | return CollectionGenerator(self, commit_ids, pre_load=pre_load, |
|
549 | 549 | translate_tag=translate_tags) |
|
550 | 550 | |
|
551 | 551 | def get_diff( |
|
552 | 552 | self, commit1, commit2, path='', ignore_whitespace=False, |
|
553 | 553 | context=3, path1=None): |
|
554 | 554 | """ |
|
555 | 555 | Returns (git like) *diff*, as plain text. Shows changes introduced by |
|
556 | 556 | ``commit2`` since ``commit1``. |
|
557 | 557 | |
|
558 | 558 | :param commit1: Entry point from which diff is shown. Can be |
|
559 | 559 | ``self.EMPTY_COMMIT`` - in this case, patch showing all |
|
560 | 560 | the changes since empty state of the repository until ``commit2`` |
|
561 | 561 | :param commit2: Until which commits changes should be shown. |
|
562 | 562 | :param path: |
|
563 | 563 | :param ignore_whitespace: If set to ``True``, would not show whitespace |
|
564 | 564 | changes. Defaults to ``False``. |
|
565 | 565 | :param context: How many lines before/after changed lines should be |
|
566 | 566 | shown. Defaults to ``3``. |
|
567 | 567 | :param path1: |
|
568 | 568 | """ |
|
569 | 569 | self._validate_diff_commits(commit1, commit2) |
|
570 | 570 | if path1 is not None and path1 != path: |
|
571 | 571 | raise ValueError("Diff of two different paths not supported.") |
|
572 | 572 | |
|
573 | 573 | if path: |
|
574 | 574 | file_filter = path |
|
575 | 575 | else: |
|
576 | 576 | file_filter = None |
|
577 | 577 | |
|
578 | 578 | diff = self._remote.diff( |
|
579 | 579 | commit1.raw_id, commit2.raw_id, file_filter=file_filter, |
|
580 | 580 | opt_ignorews=ignore_whitespace, |
|
581 | 581 | context=context) |
|
582 | 582 | |
|
583 | 583 | return GitDiff(diff) |
|
584 | 584 | |
|
585 | 585 | def strip(self, commit_id, branch_name): |
|
586 | 586 | commit = self.get_commit(commit_id=commit_id) |
|
587 | 587 | if commit.merge: |
|
588 | 588 | raise Exception('Cannot reset to merge commit') |
|
589 | 589 | |
|
590 | 590 | # parent is going to be the new head now |
|
591 | 591 | commit = commit.parents[0] |
|
592 | 592 | self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id) |
|
593 | 593 | |
|
594 | 594 | # clear cached properties |
|
595 | 595 | self._invalidate_prop_cache('commit_ids') |
|
596 | 596 | self._invalidate_prop_cache('_refs') |
|
597 | 597 | self._invalidate_prop_cache('branches') |
|
598 | 598 | |
|
599 | 599 | return len(self.commit_ids) |
|
600 | 600 | |
|
601 | 601 | def get_common_ancestor(self, commit_id1, commit_id2, repo2): |
|
602 | 602 | log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s', |
|
603 | 603 | self, commit_id1, repo2, commit_id2) |
|
604 | 604 | |
|
605 | 605 | if commit_id1 == commit_id2: |
|
606 | 606 | return commit_id1 |
|
607 | 607 | |
|
608 | 608 | if self != repo2: |
|
609 | 609 | commits = self._remote.get_missing_revs( |
|
610 | 610 | commit_id1, commit_id2, repo2.path) |
|
611 | 611 | if commits: |
|
612 | 612 | commit = repo2.get_commit(commits[-1]) |
|
613 | 613 | if commit.parents: |
|
614 | 614 | ancestor_id = commit.parents[0].raw_id |
|
615 | 615 | else: |
|
616 | 616 | ancestor_id = None |
|
617 | 617 | else: |
|
618 | 618 | # no commits from other repo, ancestor_id is the commit_id2 |
|
619 | 619 | ancestor_id = commit_id2 |
|
620 | 620 | else: |
|
621 | 621 | output, __ = self.run_git_command( |
|
622 | 622 | ['merge-base', commit_id1, commit_id2]) |
|
623 | 623 | ancestor_id = self.COMMIT_ID_PAT.findall(output)[0] |
|
624 | 624 | |
|
625 | 625 | log.debug('Found common ancestor with sha: %s', ancestor_id) |
|
626 | 626 | |
|
627 | 627 | return ancestor_id |
|
628 | 628 | |
|
629 | 629 | def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None): |
|
630 | 630 | repo1 = self |
|
631 | 631 | ancestor_id = None |
|
632 | 632 | |
|
633 | 633 | if commit_id1 == commit_id2: |
|
634 | 634 | commits = [] |
|
635 | 635 | elif repo1 != repo2: |
|
636 | 636 | missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2, |
|
637 | 637 | repo2.path) |
|
638 | 638 | commits = [ |
|
639 | 639 | repo2.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
640 | 640 | for commit_id in reversed(missing_ids)] |
|
641 | 641 | else: |
|
642 | 642 | output, __ = repo1.run_git_command( |
|
643 | 643 | ['log', '--reverse', '--pretty=format: %H', '-s', |
|
644 | 644 | f'{commit_id1}..{commit_id2}']) |
|
645 | 645 | commits = [ |
|
646 | 646 | repo1.get_commit(commit_id=commit_id, pre_load=pre_load) |
|
647 | 647 | for commit_id in self.COMMIT_ID_PAT.findall(output)] |
|
648 | 648 | |
|
649 | 649 | return commits |
|
650 | 650 | |
|
651 | 651 | @LazyProperty |
|
652 | 652 | def in_memory_commit(self): |
|
653 | 653 | """ |
|
654 | 654 | Returns ``GitInMemoryCommit`` object for this repository. |
|
655 | 655 | """ |
|
656 | 656 | return GitInMemoryCommit(self) |
|
657 | 657 | |
|
658 | 658 | def pull(self, url, commit_ids=None, update_after=False): |
|
659 | 659 | """ |
|
660 | 660 | Pull changes from external location. Pull is different in GIT |
|
661 | 661 | that fetch since it's doing a checkout |
|
662 | 662 | |
|
663 | 663 | :param commit_ids: Optional. Can be set to a list of commit ids |
|
664 | 664 | which shall be pulled from the other repository. |
|
665 | 665 | """ |
|
666 | 666 | refs = None |
|
667 | 667 | if commit_ids is not None: |
|
668 | 668 | remote_refs = self._remote.get_remote_refs(url) |
|
669 | 669 | refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids] |
|
670 | 670 | self._remote.pull(url, refs=refs, update_after=update_after) |
|
671 | 671 | self._remote.invalidate_vcs_cache() |
|
672 | 672 | |
|
673 | 673 | def fetch(self, url, commit_ids=None): |
|
674 | 674 | """ |
|
675 | 675 | Fetch all git objects from external location. |
|
676 | 676 | """ |
|
677 | 677 | self._remote.sync_fetch(url, refs=commit_ids) |
|
678 | 678 | self._remote.invalidate_vcs_cache() |
|
679 | 679 | |
|
680 | 680 | def push(self, url): |
|
681 | 681 | refs = None |
|
682 | 682 | self._remote.sync_push(url, refs=refs) |
|
683 | 683 | |
|
684 | 684 | def set_refs(self, ref_name, commit_id): |
|
685 | 685 | self._remote.set_refs(ref_name, commit_id) |
|
686 | 686 | self._invalidate_prop_cache('_refs') |
|
687 | 687 | |
|
688 | 688 | def remove_ref(self, ref_name): |
|
689 | 689 | self._remote.remove_ref(ref_name) |
|
690 | 690 | self._invalidate_prop_cache('_refs') |
|
691 | 691 | |
|
692 | 692 | def run_gc(self, prune=True): |
|
693 | 693 | cmd = ['gc', '--aggressive'] |
|
694 | 694 | if prune: |
|
695 | 695 | cmd += ['--prune=now'] |
|
696 | 696 | _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False) |
|
697 | 697 | return stderr |
|
698 | 698 | |
|
699 | 699 | def _update_server_info(self): |
|
700 | 700 | """ |
|
701 | 701 | runs gits update-server-info command in this repo instance |
|
702 | 702 | """ |
|
703 | 703 | self._remote.update_server_info() |
|
704 | 704 | |
|
705 | 705 | def _current_branch(self): |
|
706 | 706 | """ |
|
707 | 707 | Return the name of the current branch. |
|
708 | 708 | |
|
709 | 709 | It only works for non bare repositories (i.e. repositories with a |
|
710 | 710 | working copy) |
|
711 | 711 | """ |
|
712 | 712 | if self.bare: |
|
713 | 713 | raise RepositoryError('Bare git repos do not have active branches') |
|
714 | 714 | |
|
715 | 715 | if self.is_empty(): |
|
716 | 716 | return None |
|
717 | 717 | |
|
718 | 718 | stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD']) |
|
719 | 719 | return stdout.strip() |
|
720 | 720 | |
|
721 | 721 | def _checkout(self, branch_name, create=False, force=False): |
|
722 | 722 | """ |
|
723 | 723 | Checkout a branch in the working directory. |
|
724 | 724 | |
|
725 | 725 | It tries to create the branch if create is True, failing if the branch |
|
726 | 726 | already exists. |
|
727 | 727 | |
|
728 | 728 | It only works for non bare repositories (i.e. repositories with a |
|
729 | 729 | working copy) |
|
730 | 730 | """ |
|
731 | 731 | if self.bare: |
|
732 | 732 | raise RepositoryError('Cannot checkout branches in a bare git repo') |
|
733 | 733 | |
|
734 | 734 | cmd = ['checkout'] |
|
735 | 735 | if force: |
|
736 | 736 | cmd.append('-f') |
|
737 | 737 | if create: |
|
738 | 738 | cmd.append('-b') |
|
739 | 739 | cmd.append(branch_name) |
|
740 | 740 | self.run_git_command(cmd, fail_on_stderr=False) |
|
741 | 741 | |
|
742 | 742 | def _create_branch(self, branch_name, commit_id): |
|
743 | 743 | """ |
|
744 | 744 | creates a branch in a GIT repo |
|
745 | 745 | """ |
|
746 | 746 | self._remote.create_branch(branch_name, commit_id) |
|
747 | 747 | |
|
748 | 748 | def _identify(self): |
|
749 | 749 | """ |
|
750 | 750 | Return the current state of the working directory. |
|
751 | 751 | """ |
|
752 | 752 | if self.bare: |
|
753 | 753 | raise RepositoryError('Bare git repos do not have active branches') |
|
754 | 754 | |
|
755 | 755 | if self.is_empty(): |
|
756 | 756 | return None |
|
757 | 757 | |
|
758 | 758 | stdout, _ = self.run_git_command(['rev-parse', 'HEAD']) |
|
759 | 759 | return stdout.strip() |
|
760 | 760 | |
|
761 | 761 | def _local_clone(self, clone_path, branch_name, source_branch=None): |
|
762 | 762 | """ |
|
763 | 763 | Create a local clone of the current repo. |
|
764 | 764 | """ |
|
765 | 765 | # N.B.(skreft): the --branch option is required as otherwise the shallow |
|
766 | 766 | # clone will only fetch the active branch. |
|
767 | 767 | cmd = ['clone', '--branch', branch_name, |
|
768 | 768 | self.path, os.path.abspath(clone_path)] |
|
769 | 769 | |
|
770 | 770 | self.run_git_command(cmd, fail_on_stderr=False) |
|
771 | 771 | |
|
772 | 772 | # if we get the different source branch, make sure we also fetch it for |
|
773 | 773 | # merge conditions |
|
774 | 774 | if source_branch and source_branch != branch_name: |
|
775 | 775 | # check if the ref exists. |
|
776 | 776 | shadow_repo = GitRepository(os.path.abspath(clone_path)) |
|
777 | 777 | if shadow_repo.get_remote_ref(source_branch): |
|
778 | 778 | cmd = ['fetch', self.path, source_branch] |
|
779 | 779 | self.run_git_command(cmd, fail_on_stderr=False) |
|
780 | 780 | |
|
781 | 781 | def _local_fetch(self, repository_path, branch_name, use_origin=False): |
|
782 | 782 | """ |
|
783 | 783 | Fetch a branch from a local repository. |
|
784 | 784 | """ |
|
785 | 785 | repository_path = os.path.abspath(repository_path) |
|
786 | 786 | if repository_path == self.path: |
|
787 | 787 | raise ValueError('Cannot fetch from the same repository') |
|
788 | 788 | |
|
789 | 789 | if use_origin: |
|
790 | 790 | branch_name = '+{branch}:refs/heads/{branch}'.format( |
|
791 | 791 | branch=branch_name) |
|
792 | 792 | |
|
793 | 793 | cmd = ['fetch', '--no-tags', '--update-head-ok', |
|
794 | 794 | repository_path, branch_name] |
|
795 | 795 | self.run_git_command(cmd, fail_on_stderr=False) |
|
796 | 796 | |
|
797 | 797 | def _local_reset(self, branch_name): |
|
798 | 798 | branch_name = f'{branch_name}' |
|
799 | 799 | cmd = ['reset', '--hard', branch_name, '--'] |
|
800 | 800 | self.run_git_command(cmd, fail_on_stderr=False) |
|
801 | 801 | |
|
802 | 802 | def _last_fetch_heads(self): |
|
803 | 803 | """ |
|
804 | 804 | Return the last fetched heads that need merging. |
|
805 | 805 | |
|
806 | 806 | The algorithm is defined at |
|
807 | 807 | https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283 |
|
808 | 808 | """ |
|
809 | 809 | if not self.bare: |
|
810 | 810 | fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD') |
|
811 | 811 | else: |
|
812 | 812 | fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD') |
|
813 | 813 | |
|
814 | 814 | heads = [] |
|
815 | 815 | with open(fetch_heads_path) as f: |
|
816 | 816 | for line in f: |
|
817 | 817 | if ' not-for-merge ' in line: |
|
818 | 818 | continue |
|
819 | 819 | line = re.sub('\t.*', '', line, flags=re.DOTALL) |
|
820 | 820 | heads.append(line) |
|
821 | 821 | |
|
822 | 822 | return heads |
|
823 | 823 | |
|
824 | 824 | def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False): |
|
825 | 825 | return GitRepository(shadow_repository_path, with_wire={"cache": cache}) |
|
826 | 826 | |
|
827 | 827 | def _local_pull(self, repository_path, branch_name, ff_only=True): |
|
828 | 828 | """ |
|
829 | 829 | Pull a branch from a local repository. |
|
830 | 830 | """ |
|
831 | 831 | if self.bare: |
|
832 | 832 | raise RepositoryError('Cannot pull into a bare git repository') |
|
833 | 833 | # N.B.(skreft): The --ff-only option is to make sure this is a |
|
834 | 834 | # fast-forward (i.e., we are only pulling new changes and there are no |
|
835 | 835 | # conflicts with our current branch) |
|
836 | 836 | # Additionally, that option needs to go before --no-tags, otherwise git |
|
837 | 837 | # pull complains about it being an unknown flag. |
|
838 | 838 | cmd = ['pull'] |
|
839 | 839 | if ff_only: |
|
840 | 840 | cmd.append('--ff-only') |
|
841 | 841 | cmd.extend(['--no-tags', repository_path, branch_name]) |
|
842 | 842 | self.run_git_command(cmd, fail_on_stderr=False) |
|
843 | 843 | |
|
844 | 844 | def _local_merge(self, merge_message, user_name, user_email, heads): |
|
845 | 845 | """ |
|
846 | 846 | Merge the given head into the checked out branch. |
|
847 | 847 | |
|
848 | 848 | It will force a merge commit. |
|
849 | 849 | |
|
850 | 850 | Currently it raises an error if the repo is empty, as it is not possible |
|
851 | 851 | to create a merge commit in an empty repo. |
|
852 | 852 | |
|
853 | 853 | :param merge_message: The message to use for the merge commit. |
|
854 | 854 | :param heads: the heads to merge. |
|
855 | 855 | """ |
|
856 | 856 | if self.bare: |
|
857 | 857 | raise RepositoryError('Cannot merge into a bare git repository') |
|
858 | 858 | |
|
859 | 859 | if not heads: |
|
860 | 860 | return |
|
861 | 861 | |
|
862 | 862 | if self.is_empty(): |
|
863 | 863 | # TODO(skreft): do something more robust in this case. |
|
864 | 864 | raise RepositoryError('Do not know how to merge into empty repositories yet') |
|
865 | 865 | unresolved = None |
|
866 | 866 | |
|
867 | 867 | # N.B.(skreft): the --no-ff option is used to enforce the creation of a |
|
868 | 868 | # commit message. We also specify the user who is doing the merge. |
|
869 | 869 | cmd = ['-c', f'user.name="{user_name}"', |
|
870 | 870 | '-c', f'user.email={user_email}', |
|
871 | 871 | 'merge', '--no-ff', '-m', safe_str(merge_message)] |
|
872 | 872 | |
|
873 | 873 | merge_cmd = cmd + heads |
|
874 | 874 | |
|
875 | 875 | try: |
|
876 | 876 | self.run_git_command(merge_cmd, fail_on_stderr=False) |
|
877 | 877 | except RepositoryError: |
|
878 | 878 | files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'], |
|
879 | 879 | fail_on_stderr=False)[0].splitlines() |
|
880 | 880 | # NOTE(marcink): we add U notation for consistent with HG backend output |
|
881 | 881 | unresolved = [f'U {f}' for f in files] |
|
882 | 882 | |
|
883 | 883 | # Cleanup any merge leftovers |
|
884 | 884 | self._remote.invalidate_vcs_cache() |
|
885 | 885 | self.run_git_command(['merge', '--abort'], fail_on_stderr=False) |
|
886 | 886 | |
|
887 | 887 | if unresolved: |
|
888 | 888 | raise UnresolvedFilesInRepo(unresolved) |
|
889 | 889 | else: |
|
890 | 890 | raise |
|
891 | 891 | |
|
892 | 892 | def _local_push( |
|
893 | 893 | self, source_branch, repository_path, target_branch, |
|
894 | 894 | enable_hooks=False, rc_scm_data=None): |
|
895 | 895 | """ |
|
896 | 896 | Push the source_branch to the given repository and target_branch. |
|
897 | 897 | |
|
898 | 898 | Currently it if the target_branch is not master and the target repo is |
|
899 | 899 | empty, the push will work, but then GitRepository won't be able to find |
|
900 | 900 | the pushed branch or the commits. As the HEAD will be corrupted (i.e., |
|
901 | 901 | pointing to master, which does not exist). |
|
902 | 902 | |
|
903 | 903 | It does not run the hooks in the target repo. |
|
904 | 904 | """ |
|
905 | 905 | # TODO(skreft): deal with the case in which the target repo is empty, |
|
906 | 906 | # and the target_branch is not master. |
|
907 | 907 | target_repo = GitRepository(repository_path) |
|
908 | 908 | if (not target_repo.bare and |
|
909 | 909 | target_repo._current_branch() == target_branch): |
|
910 | 910 | # Git prevents pushing to the checked out branch, so simulate it by |
|
911 | 911 | # pulling into the target repository. |
|
912 | 912 | target_repo._local_pull(self.path, source_branch) |
|
913 | 913 | else: |
|
914 | 914 | cmd = ['push', os.path.abspath(repository_path), |
|
915 | 915 | f'{source_branch}:{target_branch}'] |
|
916 | 916 | gitenv = {} |
|
917 | 917 | if rc_scm_data: |
|
918 | 918 | gitenv.update({'RC_SCM_DATA': rc_scm_data}) |
|
919 | 919 | |
|
920 | 920 | if not enable_hooks: |
|
921 | 921 | gitenv['RC_SKIP_HOOKS'] = '1' |
|
922 | 922 | self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv) |
|
923 | 923 | |
|
924 | 924 | def _get_new_pr_branch(self, source_branch, target_branch): |
|
925 | 925 | prefix = f'pr_{source_branch}-{target_branch}_' |
|
926 | 926 | pr_branches = [] |
|
927 | 927 | for branch in self.branches: |
|
928 | 928 | if branch.startswith(prefix): |
|
929 | 929 | pr_branches.append(int(branch[len(prefix):])) |
|
930 | 930 | |
|
931 | 931 | if not pr_branches: |
|
932 | 932 | branch_id = 0 |
|
933 | 933 | else: |
|
934 | 934 | branch_id = max(pr_branches) + 1 |
|
935 | 935 | |
|
936 | 936 | return '%s%d' % (prefix, branch_id) |
|
937 | 937 | |
|
938 | 938 | def _maybe_prepare_merge_workspace( |
|
939 | 939 | self, repo_id, workspace_id, target_ref, source_ref): |
|
940 | 940 | shadow_repository_path = self._get_shadow_repository_path( |
|
941 | 941 | self.path, repo_id, workspace_id) |
|
942 | 942 | if not os.path.exists(shadow_repository_path): |
|
943 | 943 | self._local_clone( |
|
944 | 944 | shadow_repository_path, target_ref.name, source_ref.name) |
|
945 | 945 | log.debug('Prepared %s shadow repository in %s', |
|
946 | 946 | self.alias, shadow_repository_path) |
|
947 | 947 | |
|
948 | 948 | return shadow_repository_path |
|
949 | 949 | |
|
950 | 950 | def _merge_repo(self, repo_id, workspace_id, target_ref, |
|
951 | 951 | source_repo, source_ref, merge_message, |
|
952 | 952 | merger_name, merger_email, dry_run=False, |
|
953 | 953 | use_rebase=False, close_branch=False): |
|
954 | 954 | |
|
955 | 955 | log.debug('Executing merge_repo with %s strategy, dry_run mode:%s', |
|
956 | 956 | 'rebase' if use_rebase else 'merge', dry_run) |
|
957 | ||
|
957 | 958 | if target_ref.commit_id != self.branches[target_ref.name]: |
|
958 | 959 | log.warning('Target ref %s commit mismatch %s vs %s', target_ref, |
|
959 | 960 | target_ref.commit_id, self.branches[target_ref.name]) |
|
960 | 961 | return MergeResponse( |
|
961 | 962 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD, |
|
962 | 963 | metadata={'target_ref': target_ref}) |
|
963 | 964 | |
|
964 | 965 | shadow_repository_path = self._maybe_prepare_merge_workspace( |
|
965 | 966 | repo_id, workspace_id, target_ref, source_ref) |
|
966 | 967 | shadow_repo = self.get_shadow_instance(shadow_repository_path) |
|
967 | 968 | |
|
968 | 969 | # checkout source, if it's different. Otherwise we could not |
|
969 | 970 | # fetch proper commits for merge testing |
|
970 | 971 | if source_ref.name != target_ref.name: |
|
971 | 972 | if shadow_repo.get_remote_ref(source_ref.name): |
|
972 | 973 | shadow_repo._checkout(source_ref.name, force=True) |
|
973 | 974 | |
|
974 | 975 | # checkout target, and fetch changes |
|
975 | 976 | shadow_repo._checkout(target_ref.name, force=True) |
|
976 | 977 | |
|
977 | 978 | # fetch/reset pull the target, in case it is changed |
|
978 | 979 | # this handles even force changes |
|
979 | 980 | shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True) |
|
980 | 981 | shadow_repo._local_reset(target_ref.name) |
|
981 | 982 | |
|
982 | 983 | # Need to reload repo to invalidate the cache, or otherwise we cannot |
|
983 | 984 | # retrieve the last target commit. |
|
984 | 985 | shadow_repo = self.get_shadow_instance(shadow_repository_path) |
|
985 | 986 | if target_ref.commit_id != shadow_repo.branches[target_ref.name]: |
|
986 | 987 | log.warning('Shadow Target ref %s commit mismatch %s vs %s', |
|
987 | 988 | target_ref, target_ref.commit_id, |
|
988 | 989 | shadow_repo.branches[target_ref.name]) |
|
989 | 990 | return MergeResponse( |
|
990 | 991 | False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD, |
|
991 | 992 | metadata={'target_ref': target_ref}) |
|
992 | 993 | |
|
993 | 994 | # calculate new branch |
|
994 | 995 | pr_branch = shadow_repo._get_new_pr_branch( |
|
995 | 996 | source_ref.name, target_ref.name) |
|
996 | 997 | log.debug('using pull-request merge branch: `%s`', pr_branch) |
|
997 | 998 | # checkout to temp branch, and fetch changes |
|
998 | 999 | shadow_repo._checkout(pr_branch, create=True) |
|
999 | 1000 | try: |
|
1000 | 1001 | shadow_repo._local_fetch(source_repo.path, source_ref.name) |
|
1001 | 1002 | except RepositoryError: |
|
1002 | 1003 | log.exception('Failure when doing local fetch on ' |
|
1003 | 1004 | 'shadow repo: %s', shadow_repo) |
|
1004 | 1005 | return MergeResponse( |
|
1005 | 1006 | False, False, None, MergeFailureReason.MISSING_SOURCE_REF, |
|
1006 | 1007 | metadata={'source_ref': source_ref}) |
|
1007 | 1008 | |
|
1008 | 1009 | merge_ref = None |
|
1009 | 1010 | merge_failure_reason = MergeFailureReason.NONE |
|
1010 | 1011 | metadata = {} |
|
1011 | 1012 | try: |
|
1012 | 1013 | shadow_repo._local_merge(merge_message, merger_name, merger_email, |
|
1013 | 1014 | [source_ref.commit_id]) |
|
1014 | 1015 | merge_possible = True |
|
1015 | 1016 | |
|
1016 | 1017 | # Need to invalidate the cache, or otherwise we |
|
1017 | 1018 | # cannot retrieve the merge commit. |
|
1018 | 1019 | shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path) |
|
1019 | 1020 | merge_commit_id = shadow_repo.branches[pr_branch] |
|
1020 | 1021 | |
|
1021 | 1022 | # Set a reference pointing to the merge commit. This reference may |
|
1022 | 1023 | # be used to easily identify the last successful merge commit in |
|
1023 | 1024 | # the shadow repository. |
|
1024 | 1025 | shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id) |
|
1025 | 1026 | merge_ref = Reference('branch', 'pr-merge', merge_commit_id) |
|
1026 | 1027 | except RepositoryError as e: |
|
1027 | 1028 | log.exception('Failure when doing local merge on git shadow repo') |
|
1028 | 1029 | if isinstance(e, UnresolvedFilesInRepo): |
|
1029 | 1030 | metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0])) |
|
1030 | 1031 | |
|
1031 | 1032 | merge_possible = False |
|
1032 | 1033 | merge_failure_reason = MergeFailureReason.MERGE_FAILED |
|
1033 | 1034 | |
|
1034 | 1035 | if merge_possible and not dry_run: |
|
1035 | 1036 | try: |
|
1036 | 1037 | shadow_repo._local_push( |
|
1037 | 1038 | pr_branch, self.path, target_ref.name, enable_hooks=True, |
|
1038 | 1039 | rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA')) |
|
1039 | 1040 | merge_succeeded = True |
|
1040 | 1041 | except RepositoryError: |
|
1041 | 1042 | log.exception( |
|
1042 | 1043 | 'Failure when doing local push from the shadow ' |
|
1043 | 1044 | 'repository to the target repository at %s.', self.path) |
|
1044 | 1045 | merge_succeeded = False |
|
1045 | 1046 | merge_failure_reason = MergeFailureReason.PUSH_FAILED |
|
1046 | 1047 | metadata['target'] = 'git shadow repo' |
|
1047 | 1048 | metadata['merge_commit'] = pr_branch |
|
1048 | 1049 | else: |
|
1049 | 1050 | merge_succeeded = False |
|
1050 | 1051 | |
|
1051 | 1052 | return MergeResponse( |
|
1052 | 1053 | merge_possible, merge_succeeded, merge_ref, merge_failure_reason, |
|
1053 | 1054 | metadata=metadata) |
@@ -1,427 +1,428 b'' | |||
|
1 | 1 | |
|
2 | 2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
3 | 3 | # |
|
4 | 4 | # This program is free software: you can redistribute it and/or modify |
|
5 | 5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | 6 | # (only), as published by the Free Software Foundation. |
|
7 | 7 | # |
|
8 | 8 | # This program is distributed in the hope that it will be useful, |
|
9 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | 11 | # GNU General Public License for more details. |
|
12 | 12 | # |
|
13 | 13 | # You should have received a copy of the GNU Affero General Public License |
|
14 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | 15 | # |
|
16 | 16 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | 19 | |
|
20 | 20 | """ |
|
21 | 21 | Helpers for fixture generation |
|
22 | 22 | """ |
|
23 | 23 | |
|
24 | 24 | import os |
|
25 | 25 | import time |
|
26 | 26 | import tempfile |
|
27 | 27 | import shutil |
|
28 | 28 | import configparser |
|
29 | 29 | |
|
30 | 30 | from rhodecode.model.settings import SettingsModel |
|
31 | from rhodecode.tests import * | |
|
32 | 31 | from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap |
|
33 | 32 | from rhodecode.model.meta import Session |
|
34 | 33 | from rhodecode.model.repo import RepoModel |
|
35 | 34 | from rhodecode.model.user import UserModel |
|
36 | 35 | from rhodecode.model.repo_group import RepoGroupModel |
|
37 | 36 | from rhodecode.model.user_group import UserGroupModel |
|
38 | 37 | from rhodecode.model.gist import GistModel |
|
39 | 38 | from rhodecode.model.auth_token import AuthTokenModel |
|
40 | 39 | from rhodecode.model.scm import ScmModel |
|
41 | 40 | from rhodecode.authentication.plugins.auth_rhodecode import \ |
|
42 | 41 | RhodeCodeAuthPlugin |
|
43 | 42 | |
|
43 | from rhodecode.tests import TEST_USER_ADMIN_LOGIN | |
|
44 | ||
|
44 | 45 | dn = os.path.dirname |
|
45 | 46 | FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures') |
|
46 | 47 | |
|
47 | 48 | |
|
48 | 49 | def error_function(*args, **kwargs): |
|
49 | 50 | raise Exception('Total Crash !') |
|
50 | 51 | |
|
51 | 52 | |
|
52 | 53 | class TestINI(object): |
|
53 | 54 | """ |
|
54 | 55 | Allows to create a new test.ini file as a copy of existing one with edited |
|
55 | 56 | data. Example usage:: |
|
56 | 57 | |
|
57 | 58 | with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path: |
|
58 | 59 | print('paster server %s' % new_test_ini) |
|
59 | 60 | """ |
|
60 | 61 | |
|
61 | 62 | def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT', |
|
62 | 63 | destroy=True, dir=None): |
|
63 | 64 | self.ini_file_path = ini_file_path |
|
64 | 65 | self.ini_params = ini_params |
|
65 | 66 | self.new_path = None |
|
66 | 67 | self.new_path_prefix = new_file_prefix |
|
67 | 68 | self._destroy = destroy |
|
68 | 69 | self._dir = dir |
|
69 | 70 | |
|
70 | 71 | def __enter__(self): |
|
71 | 72 | return self.create() |
|
72 | 73 | |
|
73 | 74 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
74 | 75 | self.destroy() |
|
75 | 76 | |
|
76 | 77 | def create(self): |
|
77 | 78 | parser = configparser.ConfigParser() |
|
78 | 79 | parser.read(self.ini_file_path) |
|
79 | 80 | |
|
80 | 81 | for data in self.ini_params: |
|
81 | 82 | section, ini_params = list(data.items())[0] |
|
82 | 83 | |
|
83 | 84 | for key, val in ini_params.items(): |
|
84 | 85 | parser[section][key] = str(val) |
|
85 | 86 | |
|
86 | 87 | with tempfile.NamedTemporaryFile( |
|
87 | 88 | mode='w', |
|
88 | 89 | prefix=self.new_path_prefix, suffix='.ini', dir=self._dir, |
|
89 | 90 | delete=False) as new_ini_file: |
|
90 | 91 | parser.write(new_ini_file) |
|
91 | 92 | self.new_path = new_ini_file.name |
|
92 | 93 | |
|
93 | 94 | return self.new_path |
|
94 | 95 | |
|
95 | 96 | def destroy(self): |
|
96 | 97 | if self._destroy: |
|
97 | 98 | os.remove(self.new_path) |
|
98 | 99 | |
|
99 | 100 | |
|
100 | 101 | class Fixture(object): |
|
101 | 102 | |
|
102 | 103 | def anon_access(self, status): |
|
103 | 104 | """ |
|
104 | 105 | Context process for disabling anonymous access. use like: |
|
105 | 106 | fixture = Fixture() |
|
106 | 107 | with fixture.anon_access(False): |
|
107 | 108 | #tests |
|
108 | 109 | |
|
109 | 110 | after this block anon access will be set to `not status` |
|
110 | 111 | """ |
|
111 | 112 | |
|
112 | 113 | class context(object): |
|
113 | 114 | def __enter__(self): |
|
114 | 115 | anon = User.get_default_user() |
|
115 | 116 | anon.active = status |
|
116 | 117 | Session().add(anon) |
|
117 | 118 | Session().commit() |
|
118 | 119 | time.sleep(1.5) # must sleep for cache (1s to expire) |
|
119 | 120 | |
|
120 | 121 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
121 | 122 | anon = User.get_default_user() |
|
122 | 123 | anon.active = not status |
|
123 | 124 | Session().add(anon) |
|
124 | 125 | Session().commit() |
|
125 | 126 | |
|
126 | 127 | return context() |
|
127 | 128 | |
|
128 | 129 | def auth_restriction(self, registry, auth_restriction): |
|
129 | 130 | """ |
|
130 | 131 | Context process for changing the builtin rhodecode plugin auth restrictions. |
|
131 | 132 | Use like: |
|
132 | 133 | fixture = Fixture() |
|
133 | 134 | with fixture.auth_restriction('super_admin'): |
|
134 | 135 | #tests |
|
135 | 136 | |
|
136 | 137 | after this block auth restriction will be taken off |
|
137 | 138 | """ |
|
138 | 139 | |
|
139 | 140 | class context(object): |
|
140 | 141 | def _get_plugin(self): |
|
141 | 142 | plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid) |
|
142 | 143 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
143 | 144 | return plugin |
|
144 | 145 | |
|
145 | 146 | def __enter__(self): |
|
146 | 147 | |
|
147 | 148 | plugin = self._get_plugin() |
|
148 | 149 | plugin.create_or_update_setting('auth_restriction', auth_restriction) |
|
149 | 150 | Session().commit() |
|
150 | 151 | SettingsModel().invalidate_settings_cache(hard=True) |
|
151 | 152 | |
|
152 | 153 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
153 | 154 | |
|
154 | 155 | plugin = self._get_plugin() |
|
155 | 156 | plugin.create_or_update_setting( |
|
156 | 157 | 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE) |
|
157 | 158 | Session().commit() |
|
158 | 159 | SettingsModel().invalidate_settings_cache(hard=True) |
|
159 | 160 | |
|
160 | 161 | return context() |
|
161 | 162 | |
|
162 | 163 | def scope_restriction(self, registry, scope_restriction): |
|
163 | 164 | """ |
|
164 | 165 | Context process for changing the builtin rhodecode plugin scope restrictions. |
|
165 | 166 | Use like: |
|
166 | 167 | fixture = Fixture() |
|
167 | 168 | with fixture.scope_restriction('scope_http'): |
|
168 | 169 | #tests |
|
169 | 170 | |
|
170 | 171 | after this block scope restriction will be taken off |
|
171 | 172 | """ |
|
172 | 173 | |
|
173 | 174 | class context(object): |
|
174 | 175 | def _get_plugin(self): |
|
175 | 176 | plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid) |
|
176 | 177 | plugin = RhodeCodeAuthPlugin(plugin_id) |
|
177 | 178 | return plugin |
|
178 | 179 | |
|
179 | 180 | def __enter__(self): |
|
180 | 181 | plugin = self._get_plugin() |
|
181 | 182 | plugin.create_or_update_setting('scope_restriction', scope_restriction) |
|
182 | 183 | Session().commit() |
|
183 | 184 | SettingsModel().invalidate_settings_cache(hard=True) |
|
184 | 185 | |
|
185 | 186 | def __exit__(self, exc_type, exc_val, exc_tb): |
|
186 | 187 | plugin = self._get_plugin() |
|
187 | 188 | plugin.create_or_update_setting( |
|
188 | 189 | 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL) |
|
189 | 190 | Session().commit() |
|
190 | 191 | SettingsModel().invalidate_settings_cache(hard=True) |
|
191 | 192 | |
|
192 | 193 | return context() |
|
193 | 194 | |
|
194 | 195 | def _get_repo_create_params(self, **custom): |
|
195 | 196 | repo_type = custom.get('repo_type') or 'hg' |
|
196 | 197 | |
|
197 | 198 | default_landing_ref, landing_ref_lbl = ScmModel.backend_landing_ref(repo_type) |
|
198 | 199 | |
|
199 | 200 | defs = { |
|
200 | 201 | 'repo_name': None, |
|
201 | 202 | 'repo_type': repo_type, |
|
202 | 203 | 'clone_uri': '', |
|
203 | 204 | 'push_uri': '', |
|
204 | 205 | 'repo_group': '-1', |
|
205 | 206 | 'repo_description': 'DESC', |
|
206 | 207 | 'repo_private': False, |
|
207 | 208 | 'repo_landing_commit_ref': default_landing_ref, |
|
208 | 209 | 'repo_copy_permissions': False, |
|
209 | 210 | 'repo_state': Repository.STATE_CREATED, |
|
210 | 211 | } |
|
211 | 212 | defs.update(custom) |
|
212 | 213 | if 'repo_name_full' not in custom: |
|
213 | 214 | defs.update({'repo_name_full': defs['repo_name']}) |
|
214 | 215 | |
|
215 | 216 | # fix the repo name if passed as repo_name_full |
|
216 | 217 | if defs['repo_name']: |
|
217 | 218 | defs['repo_name'] = defs['repo_name'].split('/')[-1] |
|
218 | 219 | |
|
219 | 220 | return defs |
|
220 | 221 | |
|
221 | 222 | def _get_group_create_params(self, **custom): |
|
222 | 223 | defs = { |
|
223 | 224 | 'group_name': None, |
|
224 | 225 | 'group_description': 'DESC', |
|
225 | 226 | 'perm_updates': [], |
|
226 | 227 | 'perm_additions': [], |
|
227 | 228 | 'perm_deletions': [], |
|
228 | 229 | 'group_parent_id': -1, |
|
229 | 230 | 'enable_locking': False, |
|
230 | 231 | 'recursive': False, |
|
231 | 232 | } |
|
232 | 233 | defs.update(custom) |
|
233 | 234 | |
|
234 | 235 | return defs |
|
235 | 236 | |
|
236 | 237 | def _get_user_create_params(self, name, **custom): |
|
237 | 238 | defs = { |
|
238 | 239 | 'username': name, |
|
239 | 240 | 'password': 'qweqwe', |
|
240 | 241 | 'email': '%s+test@rhodecode.org' % name, |
|
241 | 242 | 'firstname': 'TestUser', |
|
242 | 243 | 'lastname': 'Test', |
|
243 | 244 | 'description': 'test description', |
|
244 | 245 | 'active': True, |
|
245 | 246 | 'admin': False, |
|
246 | 247 | 'extern_type': 'rhodecode', |
|
247 | 248 | 'extern_name': None, |
|
248 | 249 | } |
|
249 | 250 | defs.update(custom) |
|
250 | 251 | |
|
251 | 252 | return defs |
|
252 | 253 | |
|
253 | 254 | def _get_user_group_create_params(self, name, **custom): |
|
254 | 255 | defs = { |
|
255 | 256 | 'users_group_name': name, |
|
256 | 257 | 'user_group_description': 'DESC', |
|
257 | 258 | 'users_group_active': True, |
|
258 | 259 | 'user_group_data': {}, |
|
259 | 260 | } |
|
260 | 261 | defs.update(custom) |
|
261 | 262 | |
|
262 | 263 | return defs |
|
263 | 264 | |
|
264 | 265 | def create_repo(self, name, **kwargs): |
|
265 | 266 | repo_group = kwargs.get('repo_group') |
|
266 | 267 | if isinstance(repo_group, RepoGroup): |
|
267 | 268 | kwargs['repo_group'] = repo_group.group_id |
|
268 | 269 | name = name.split(Repository.NAME_SEP)[-1] |
|
269 | 270 | name = Repository.NAME_SEP.join((repo_group.group_name, name)) |
|
270 | 271 | |
|
271 | 272 | if 'skip_if_exists' in kwargs: |
|
272 | 273 | del kwargs['skip_if_exists'] |
|
273 | 274 | r = Repository.get_by_repo_name(name) |
|
274 | 275 | if r: |
|
275 | 276 | return r |
|
276 | 277 | |
|
277 | 278 | form_data = self._get_repo_create_params(repo_name=name, **kwargs) |
|
278 | 279 | cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
279 | 280 | RepoModel().create(form_data, cur_user) |
|
280 | 281 | Session().commit() |
|
281 | 282 | repo = Repository.get_by_repo_name(name) |
|
282 | 283 | assert repo |
|
283 | 284 | return repo |
|
284 | 285 | |
|
285 | 286 | def create_fork(self, repo_to_fork, fork_name, **kwargs): |
|
286 | 287 | repo_to_fork = Repository.get_by_repo_name(repo_to_fork) |
|
287 | 288 | |
|
288 | 289 | form_data = self._get_repo_create_params( |
|
289 | 290 | repo_name=fork_name, |
|
290 | 291 | fork_parent_id=repo_to_fork.repo_id, |
|
291 | 292 | repo_type=repo_to_fork.repo_type, |
|
292 | 293 | **kwargs) |
|
293 | 294 | |
|
294 | #TODO: fix it !! | |
|
295 | # TODO: fix it !! | |
|
295 | 296 | form_data['description'] = form_data['repo_description'] |
|
296 | 297 | form_data['private'] = form_data['repo_private'] |
|
297 | 298 | form_data['landing_rev'] = form_data['repo_landing_commit_ref'] |
|
298 | 299 | |
|
299 | 300 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
300 | 301 | RepoModel().create_fork(form_data, cur_user=owner) |
|
301 | 302 | Session().commit() |
|
302 | 303 | r = Repository.get_by_repo_name(fork_name) |
|
303 | 304 | assert r |
|
304 | 305 | return r |
|
305 | 306 | |
|
306 | 307 | def destroy_repo(self, repo_name, **kwargs): |
|
307 | 308 | RepoModel().delete(repo_name, pull_requests='delete', **kwargs) |
|
308 | 309 | Session().commit() |
|
309 | 310 | |
|
310 | 311 | def destroy_repo_on_filesystem(self, repo_name): |
|
311 | 312 | rm_path = os.path.join(RepoModel().repos_path, repo_name) |
|
312 | 313 | if os.path.isdir(rm_path): |
|
313 | 314 | shutil.rmtree(rm_path) |
|
314 | 315 | |
|
315 | 316 | def create_repo_group(self, name, **kwargs): |
|
316 | 317 | if 'skip_if_exists' in kwargs: |
|
317 | 318 | del kwargs['skip_if_exists'] |
|
318 | 319 | gr = RepoGroup.get_by_group_name(group_name=name) |
|
319 | 320 | if gr: |
|
320 | 321 | return gr |
|
321 | 322 | form_data = self._get_group_create_params(group_name=name, **kwargs) |
|
322 | 323 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
323 | 324 | gr = RepoGroupModel().create( |
|
324 | 325 | group_name=form_data['group_name'], |
|
325 | 326 | group_description=form_data['group_name'], |
|
326 | 327 | owner=owner) |
|
327 | 328 | Session().commit() |
|
328 | 329 | gr = RepoGroup.get_by_group_name(gr.group_name) |
|
329 | 330 | return gr |
|
330 | 331 | |
|
331 | 332 | def destroy_repo_group(self, repogroupid): |
|
332 | 333 | RepoGroupModel().delete(repogroupid) |
|
333 | 334 | Session().commit() |
|
334 | 335 | |
|
335 | 336 | def create_user(self, name, **kwargs): |
|
336 | 337 | if 'skip_if_exists' in kwargs: |
|
337 | 338 | del kwargs['skip_if_exists'] |
|
338 | 339 | user = User.get_by_username(name) |
|
339 | 340 | if user: |
|
340 | 341 | return user |
|
341 | 342 | form_data = self._get_user_create_params(name, **kwargs) |
|
342 | 343 | user = UserModel().create(form_data) |
|
343 | 344 | |
|
344 | 345 | # create token for user |
|
345 | 346 | AuthTokenModel().create( |
|
346 | 347 | user=user, description=u'TEST_USER_TOKEN') |
|
347 | 348 | |
|
348 | 349 | Session().commit() |
|
349 | 350 | user = User.get_by_username(user.username) |
|
350 | 351 | return user |
|
351 | 352 | |
|
352 | 353 | def destroy_user(self, userid): |
|
353 | 354 | UserModel().delete(userid) |
|
354 | 355 | Session().commit() |
|
355 | 356 | |
|
356 | 357 | def create_additional_user_email(self, user, email): |
|
357 | 358 | uem = UserEmailMap() |
|
358 | 359 | uem.user = user |
|
359 | 360 | uem.email = email |
|
360 | 361 | Session().add(uem) |
|
361 | 362 | return uem |
|
362 | 363 | |
|
363 | 364 | def destroy_users(self, userid_iter): |
|
364 | 365 | for user_id in userid_iter: |
|
365 | 366 | if User.get_by_username(user_id): |
|
366 | 367 | UserModel().delete(user_id) |
|
367 | 368 | Session().commit() |
|
368 | 369 | |
|
369 | 370 | def create_user_group(self, name, **kwargs): |
|
370 | 371 | if 'skip_if_exists' in kwargs: |
|
371 | 372 | del kwargs['skip_if_exists'] |
|
372 | 373 | gr = UserGroup.get_by_group_name(group_name=name) |
|
373 | 374 | if gr: |
|
374 | 375 | return gr |
|
375 | 376 | # map active flag to the real attribute. For API consistency of fixtures |
|
376 | 377 | if 'active' in kwargs: |
|
377 | 378 | kwargs['users_group_active'] = kwargs['active'] |
|
378 | 379 | del kwargs['active'] |
|
379 | 380 | form_data = self._get_user_group_create_params(name, **kwargs) |
|
380 | 381 | owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN) |
|
381 | 382 | user_group = UserGroupModel().create( |
|
382 | 383 | name=form_data['users_group_name'], |
|
383 | 384 | description=form_data['user_group_description'], |
|
384 | 385 | owner=owner, active=form_data['users_group_active'], |
|
385 | 386 | group_data=form_data['user_group_data']) |
|
386 | 387 | Session().commit() |
|
387 | 388 | user_group = UserGroup.get_by_group_name(user_group.users_group_name) |
|
388 | 389 | return user_group |
|
389 | 390 | |
|
390 | 391 | def destroy_user_group(self, usergroupid): |
|
391 | 392 | UserGroupModel().delete(user_group=usergroupid, force=True) |
|
392 | 393 | Session().commit() |
|
393 | 394 | |
|
394 | 395 | def create_gist(self, **kwargs): |
|
395 | 396 | form_data = { |
|
396 | 397 | 'description': 'new-gist', |
|
397 | 398 | 'owner': TEST_USER_ADMIN_LOGIN, |
|
398 | 399 | 'gist_type': GistModel.cls.GIST_PUBLIC, |
|
399 | 400 | 'lifetime': -1, |
|
400 | 401 | 'acl_level': Gist.ACL_LEVEL_PUBLIC, |
|
401 | 402 | 'gist_mapping': {b'filename1.txt': {'content': b'hello world'},} |
|
402 | 403 | } |
|
403 | 404 | form_data.update(kwargs) |
|
404 | 405 | gist = GistModel().create( |
|
405 | 406 | description=form_data['description'], owner=form_data['owner'], |
|
406 | 407 | gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'], |
|
407 | 408 | lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level'] |
|
408 | 409 | ) |
|
409 | 410 | Session().commit() |
|
410 | 411 | return gist |
|
411 | 412 | |
|
412 | 413 | def destroy_gists(self, gistid=None): |
|
413 | 414 | for g in GistModel.cls.get_all(): |
|
414 | 415 | if gistid: |
|
415 | 416 | if gistid == g.gist_access_id: |
|
416 | 417 | GistModel().delete(g) |
|
417 | 418 | else: |
|
418 | 419 | GistModel().delete(g) |
|
419 | 420 | Session().commit() |
|
420 | 421 | |
|
421 | 422 | def load_resource(self, resource_name, strip=False): |
|
422 | 423 | with open(os.path.join(FIXTURES, resource_name), 'rb') as f: |
|
423 | 424 | source = f.read() |
|
424 | 425 | if strip: |
|
425 | 426 | source = source.strip() |
|
426 | 427 | |
|
427 | 428 | return source |
@@ -1,825 +1,826 b'' | |||
|
1 | 1 | |
|
2 | 2 | # Copyright (C) 2010-2023 RhodeCode GmbH |
|
3 | 3 | # |
|
4 | 4 | # This program is free software: you can redistribute it and/or modify |
|
5 | 5 | # it under the terms of the GNU Affero General Public License, version 3 |
|
6 | 6 | # (only), as published by the Free Software Foundation. |
|
7 | 7 | # |
|
8 | 8 | # This program is distributed in the hope that it will be useful, |
|
9 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
10 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
11 | 11 | # GNU General Public License for more details. |
|
12 | 12 | # |
|
13 | 13 | # You should have received a copy of the GNU Affero General Public License |
|
14 | 14 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
|
15 | 15 | # |
|
16 | 16 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | 17 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | 18 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | 19 | |
|
20 | 20 | import textwrap |
|
21 | 21 | |
|
22 | 22 | import mock |
|
23 | 23 | import pytest |
|
24 | 24 | |
|
25 | 25 | from rhodecode.lib.codeblocks import DiffSet |
|
26 | 26 | from rhodecode.lib.diffs import ( |
|
27 | 27 | DiffProcessor, |
|
28 | 28 | NEW_FILENODE, DEL_FILENODE, MOD_FILENODE, RENAMED_FILENODE, |
|
29 | 29 | CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE) |
|
30 | from rhodecode.lib.str_utils import safe_bytes | |
|
30 | ||
|
31 | 31 | from rhodecode.lib.utils2 import AttributeDict |
|
32 | 32 | from rhodecode.lib.vcs.backends.git import GitCommit |
|
33 |
from rhodecode.tests.fixture import Fixture |
|
|
33 | from rhodecode.tests.fixture import Fixture | |
|
34 | from rhodecode.tests import no_newline_id_generator | |
|
34 | 35 | from rhodecode.lib.vcs.backends.git.repository import GitDiff |
|
35 | 36 | from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff |
|
36 | 37 | from rhodecode.lib.vcs.backends.svn.repository import SubversionDiff |
|
37 | 38 | |
|
38 | 39 | fixture = Fixture() |
|
39 | 40 | |
|
40 | 41 | |
|
41 | 42 | class TestMixedFilenameEncodings(object): |
|
42 | 43 | |
|
43 | 44 | @pytest.fixture(scope="class") |
|
44 | 45 | def raw_diff(self): |
|
45 | 46 | return fixture.load_resource( |
|
46 | 47 | 'hg_diff_mixed_filename_encodings.diff') |
|
47 | 48 | |
|
48 | 49 | @pytest.fixture() |
|
49 | 50 | def processor(self, raw_diff): |
|
50 | 51 | diff = MercurialDiff(raw_diff) |
|
51 | 52 | processor = DiffProcessor(diff, diff_format='newdiff') |
|
52 | 53 | return processor |
|
53 | 54 | |
|
54 | 55 | def test_filenames_are_decoded_to_unicode(self, processor): |
|
55 | 56 | diff_data = processor.prepare() |
|
56 | 57 | filenames = [item['filename'] for item in diff_data] |
|
57 | 58 | assert filenames == [ |
|
58 | 59 | u'spΓ€cial-utf8.txt', u'spοΏ½cial-cp1252.txt', u'spοΏ½cial-latin1.txt'] |
|
59 | 60 | |
|
60 | 61 | def test_raw_diff_is_decoded_to_unicode(self, processor): |
|
61 | 62 | diff_data = processor.prepare() |
|
62 | 63 | raw_diffs = [item['raw_diff'] for item in diff_data] |
|
63 | 64 | new_file_message = u'\nnew file mode 100644\n' |
|
64 | 65 | expected_raw_diffs = [ |
|
65 | 66 | u' a/spΓ€cial-utf8.txt b/spΓ€cial-utf8.txt' + new_file_message, |
|
66 | 67 | u' a/spοΏ½cial-cp1252.txt b/spοΏ½cial-cp1252.txt' + new_file_message, |
|
67 | 68 | u' a/spοΏ½cial-latin1.txt b/spοΏ½cial-latin1.txt' + new_file_message] |
|
68 | 69 | assert raw_diffs == expected_raw_diffs |
|
69 | 70 | |
|
70 | 71 | def test_as_raw_preserves_the_encoding(self, processor, raw_diff): |
|
71 | 72 | assert processor.as_raw() == raw_diff |
|
72 | 73 | |
|
73 | 74 | |
|
74 | 75 | # TODO: mikhail: format the following data structure properly |
|
75 | 76 | DIFF_FIXTURES = [ |
|
76 | 77 | ('hg', |
|
77 | 78 | 'hg_diff_add_single_binary_file.diff', |
|
78 | 79 | [('US Warszawa.jpg', 'A', |
|
79 | 80 | {'added': 0, |
|
80 | 81 | 'deleted': 0, |
|
81 | 82 | 'binary': True, |
|
82 | 83 | 'old_mode': '', |
|
83 | 84 | 'new_mode': '100755', |
|
84 | 85 | 'ops': {NEW_FILENODE: 'new file 100755', |
|
85 | 86 | BIN_FILENODE: 'binary diff hidden'}}), |
|
86 | 87 | ]), |
|
87 | 88 | ('hg', |
|
88 | 89 | 'hg_diff_mod_single_binary_file.diff', |
|
89 | 90 | [('US Warszawa.jpg', 'M', |
|
90 | 91 | {'added': 0, |
|
91 | 92 | 'deleted': 0, |
|
92 | 93 | 'binary': True, |
|
93 | 94 | 'old_mode': '', |
|
94 | 95 | 'new_mode': '', |
|
95 | 96 | 'ops': {MOD_FILENODE: 'modified file', |
|
96 | 97 | BIN_FILENODE: 'binary diff hidden'}}), |
|
97 | 98 | ]), |
|
98 | 99 | ('hg', |
|
99 | 100 | 'hg_diff_mod_single_file_and_rename_and_chmod.diff', |
|
100 | 101 | [('README', 'M', |
|
101 | 102 | {'added': 3, |
|
102 | 103 | 'deleted': 0, |
|
103 | 104 | 'binary': False, |
|
104 | 105 | 'old_mode': '100755', |
|
105 | 106 | 'new_mode': '100644', |
|
106 | 107 | 'renamed': ('README.rst', 'README'), |
|
107 | 108 | 'ops': {MOD_FILENODE: 'modified file', |
|
108 | 109 | RENAMED_FILENODE: 'file renamed from README.rst to README', |
|
109 | 110 | CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}), |
|
110 | 111 | ]), |
|
111 | 112 | ('hg', |
|
112 | 113 | 'hg_diff_no_newline.diff', |
|
113 | 114 | [('server.properties', 'M', |
|
114 | 115 | {'added': 2, |
|
115 | 116 | 'deleted': 1, |
|
116 | 117 | 'binary': False, |
|
117 | 118 | 'old_mode': '', |
|
118 | 119 | 'new_mode': '', |
|
119 | 120 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
120 | 121 | ]), |
|
121 | 122 | ('hg', |
|
122 | 123 | 'hg_diff_mod_file_and_rename.diff', |
|
123 | 124 | [('README.rst', 'M', |
|
124 | 125 | {'added': 3, |
|
125 | 126 | 'deleted': 0, |
|
126 | 127 | 'binary': False, |
|
127 | 128 | 'old_mode': '', |
|
128 | 129 | 'new_mode': '', |
|
129 | 130 | 'renamed': ('README', 'README.rst'), |
|
130 | 131 | 'ops': {MOD_FILENODE: 'modified file', |
|
131 | 132 | RENAMED_FILENODE: 'file renamed from README to README.rst'}}), |
|
132 | 133 | ]), |
|
133 | 134 | ('hg', |
|
134 | 135 | 'hg_diff_del_single_binary_file.diff', |
|
135 | 136 | [('US Warszawa.jpg', 'D', |
|
136 | 137 | {'added': 0, |
|
137 | 138 | 'deleted': 0, |
|
138 | 139 | 'binary': True, |
|
139 | 140 | 'old_mode': '', |
|
140 | 141 | 'new_mode': '', |
|
141 | 142 | 'ops': {DEL_FILENODE: 'deleted file', |
|
142 | 143 | BIN_FILENODE: 'binary diff hidden'}}), |
|
143 | 144 | ]), |
|
144 | 145 | ('hg', |
|
145 | 146 | 'hg_diff_chmod_and_mod_single_binary_file.diff', |
|
146 | 147 | [('gravatar.png', 'M', |
|
147 | 148 | {'added': 0, |
|
148 | 149 | 'deleted': 0, |
|
149 | 150 | 'binary': True, |
|
150 | 151 | 'old_mode': '100644', |
|
151 | 152 | 'new_mode': '100755', |
|
152 | 153 | 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755', |
|
153 | 154 | BIN_FILENODE: 'binary diff hidden'}}), |
|
154 | 155 | ]), |
|
155 | 156 | ('hg', |
|
156 | 157 | 'hg_diff_chmod.diff', |
|
157 | 158 | [('file', 'M', |
|
158 | 159 | {'added': 0, |
|
159 | 160 | 'deleted': 0, |
|
160 | 161 | 'binary': True, |
|
161 | 162 | 'old_mode': '100755', |
|
162 | 163 | 'new_mode': '100644', |
|
163 | 164 | 'ops': {CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}), |
|
164 | 165 | ]), |
|
165 | 166 | ('hg', |
|
166 | 167 | 'hg_diff_rename_file.diff', |
|
167 | 168 | [('file_renamed', 'M', |
|
168 | 169 | {'added': 0, |
|
169 | 170 | 'deleted': 0, |
|
170 | 171 | 'binary': True, |
|
171 | 172 | 'old_mode': '', |
|
172 | 173 | 'new_mode': '', |
|
173 | 174 | 'renamed': ('file', 'file_renamed'), |
|
174 | 175 | 'ops': {RENAMED_FILENODE: 'file renamed from file to file_renamed'}}), |
|
175 | 176 | ]), |
|
176 | 177 | ('hg', |
|
177 | 178 | 'hg_diff_rename_and_chmod_file.diff', |
|
178 | 179 | [('README', 'M', |
|
179 | 180 | {'added': 0, |
|
180 | 181 | 'deleted': 0, |
|
181 | 182 | 'binary': True, |
|
182 | 183 | 'old_mode': '100644', |
|
183 | 184 | 'new_mode': '100755', |
|
184 | 185 | 'renamed': ('README.rst', 'README'), |
|
185 | 186 | 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755', |
|
186 | 187 | RENAMED_FILENODE: 'file renamed from README.rst to README'}}), |
|
187 | 188 | ]), |
|
188 | 189 | ('hg', |
|
189 | 190 | 'hg_diff_binary_and_normal.diff', |
|
190 | 191 | [('img/baseline-10px.png', 'A', |
|
191 | 192 | {'added': 0, |
|
192 | 193 | 'deleted': 0, |
|
193 | 194 | 'binary': True, |
|
194 | 195 | 'new_mode': '100644', |
|
195 | 196 | 'old_mode': '', |
|
196 | 197 | 'ops': {NEW_FILENODE: 'new file 100644', |
|
197 | 198 | BIN_FILENODE: 'binary diff hidden'}}), |
|
198 | 199 | ('js/jquery/hashgrid.js', 'A', |
|
199 | 200 | {'added': 340, |
|
200 | 201 | 'deleted': 0, |
|
201 | 202 | 'binary': False, |
|
202 | 203 | 'new_mode': '100755', |
|
203 | 204 | 'old_mode': '', |
|
204 | 205 | 'ops': {NEW_FILENODE: 'new file 100755'}}), |
|
205 | 206 | ('index.html', 'M', |
|
206 | 207 | {'added': 3, |
|
207 | 208 | 'deleted': 2, |
|
208 | 209 | 'binary': False, |
|
209 | 210 | 'new_mode': '', |
|
210 | 211 | 'old_mode': '', |
|
211 | 212 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
212 | 213 | ('less/docs.less', 'M', |
|
213 | 214 | {'added': 34, |
|
214 | 215 | 'deleted': 0, |
|
215 | 216 | 'binary': False, |
|
216 | 217 | 'new_mode': '', |
|
217 | 218 | 'old_mode': '', |
|
218 | 219 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
219 | 220 | ('less/scaffolding.less', 'M', |
|
220 | 221 | {'added': 1, |
|
221 | 222 | 'deleted': 3, |
|
222 | 223 | 'binary': False, |
|
223 | 224 | 'new_mode': '', |
|
224 | 225 | 'old_mode': '', |
|
225 | 226 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
226 | 227 | ('readme.markdown', 'M', |
|
227 | 228 | {'added': 1, |
|
228 | 229 | 'deleted': 10, |
|
229 | 230 | 'binary': False, |
|
230 | 231 | 'new_mode': '', |
|
231 | 232 | 'old_mode': '', |
|
232 | 233 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
233 | 234 | ('img/baseline-20px.png', 'D', |
|
234 | 235 | {'added': 0, |
|
235 | 236 | 'deleted': 0, |
|
236 | 237 | 'binary': True, |
|
237 | 238 | 'new_mode': '', |
|
238 | 239 | 'old_mode': '', |
|
239 | 240 | 'ops': {DEL_FILENODE: 'deleted file', |
|
240 | 241 | BIN_FILENODE: 'binary diff hidden'}}), |
|
241 | 242 | ('js/global.js', 'D', |
|
242 | 243 | {'added': 0, |
|
243 | 244 | 'deleted': 75, |
|
244 | 245 | 'binary': False, |
|
245 | 246 | 'new_mode': '', |
|
246 | 247 | 'old_mode': '', |
|
247 | 248 | 'ops': {DEL_FILENODE: 'deleted file'}}) |
|
248 | 249 | ]), |
|
249 | 250 | ('git', |
|
250 | 251 | 'git_diff_chmod.diff', |
|
251 | 252 | [('work-horus.xls', 'M', |
|
252 | 253 | {'added': 0, |
|
253 | 254 | 'deleted': 0, |
|
254 | 255 | 'binary': True, |
|
255 | 256 | 'old_mode': '100644', |
|
256 | 257 | 'new_mode': '100755', |
|
257 | 258 | 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}) |
|
258 | 259 | ]), |
|
259 | 260 | ('git', |
|
260 | 261 | 'git_diff_js_chars.diff', |
|
261 | 262 | [('\\"><img src=x onerror=prompt(0)>/\\"><img src=x onerror=prompt(1)>.txt', 'M', |
|
262 | 263 | {'added': 1, |
|
263 | 264 | 'deleted': 0, |
|
264 | 265 | 'binary': False, |
|
265 | 266 | 'old_mode': '', |
|
266 | 267 | 'new_mode': '100644', |
|
267 | 268 | 'ops': {MOD_FILENODE: 'modified file'}}) |
|
268 | 269 | ]), |
|
269 | 270 | ('git', |
|
270 | 271 | 'git_diff_rename_file.diff', |
|
271 | 272 | [('file.xls', 'M', |
|
272 | 273 | {'added': 0, |
|
273 | 274 | 'deleted': 0, |
|
274 | 275 | 'binary': True, |
|
275 | 276 | 'old_mode': '', |
|
276 | 277 | 'new_mode': '', |
|
277 | 278 | 'renamed': ('work-horus.xls', 'file.xls'), |
|
278 | 279 | 'ops': { |
|
279 | 280 | RENAMED_FILENODE: 'file renamed from work-horus.xls to file.xls'}}) |
|
280 | 281 | ]), |
|
281 | 282 | ('git', |
|
282 | 283 | 'git_diff_mod_single_binary_file.diff', |
|
283 | 284 | [('US Warszawa.jpg', 'M', |
|
284 | 285 | {'added': 0, |
|
285 | 286 | 'deleted': 0, |
|
286 | 287 | 'binary': True, |
|
287 | 288 | 'old_mode': '', |
|
288 | 289 | 'new_mode': '', |
|
289 | 290 | 'ops': {MOD_FILENODE: 'modified file', |
|
290 | 291 | BIN_FILENODE: 'binary diff hidden'}}) |
|
291 | 292 | ]), |
|
292 | 293 | ('git', |
|
293 | 294 | 'git_diff_binary_and_normal.diff', |
|
294 | 295 | [('img/baseline-10px.png', 'A', |
|
295 | 296 | {'added': 0, |
|
296 | 297 | 'deleted': 0, |
|
297 | 298 | 'binary': True, |
|
298 | 299 | 'old_mode': '', |
|
299 | 300 | 'new_mode': '100644', |
|
300 | 301 | 'ops': {NEW_FILENODE: 'new file 100644', |
|
301 | 302 | BIN_FILENODE: 'binary diff hidden'}}), |
|
302 | 303 | ('js/jquery/hashgrid.js', 'A', |
|
303 | 304 | {'added': 340, |
|
304 | 305 | 'deleted': 0, |
|
305 | 306 | 'binary': False, |
|
306 | 307 | 'old_mode': '', |
|
307 | 308 | 'new_mode': '100755', |
|
308 | 309 | 'ops': {NEW_FILENODE: 'new file 100755'}}), |
|
309 | 310 | ('index.html', 'M', |
|
310 | 311 | {'added': 3, |
|
311 | 312 | 'deleted': 2, |
|
312 | 313 | 'binary': False, |
|
313 | 314 | 'old_mode': '', |
|
314 | 315 | 'new_mode': '100644', |
|
315 | 316 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
316 | 317 | ('less/docs.less', 'M', |
|
317 | 318 | {'added': 34, |
|
318 | 319 | 'deleted': 0, |
|
319 | 320 | 'binary': False, |
|
320 | 321 | 'old_mode': '', |
|
321 | 322 | 'new_mode': '100644', |
|
322 | 323 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
323 | 324 | ('less/scaffolding.less', 'M', |
|
324 | 325 | {'added': 1, |
|
325 | 326 | 'deleted': 3, |
|
326 | 327 | 'binary': False, |
|
327 | 328 | 'old_mode': '', |
|
328 | 329 | 'new_mode': '100644', |
|
329 | 330 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
330 | 331 | ('readme.markdown', 'M', |
|
331 | 332 | {'added': 1, |
|
332 | 333 | 'deleted': 10, |
|
333 | 334 | 'binary': False, |
|
334 | 335 | 'old_mode': '', |
|
335 | 336 | 'new_mode': '100644', |
|
336 | 337 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
337 | 338 | ('img/baseline-20px.png', 'D', |
|
338 | 339 | {'added': 0, |
|
339 | 340 | 'deleted': 0, |
|
340 | 341 | 'binary': True, |
|
341 | 342 | 'old_mode': '', |
|
342 | 343 | 'new_mode': '', |
|
343 | 344 | 'ops': {DEL_FILENODE: 'deleted file', |
|
344 | 345 | BIN_FILENODE: 'binary diff hidden'}}), |
|
345 | 346 | ('js/global.js', 'D', |
|
346 | 347 | {'added': 0, |
|
347 | 348 | 'deleted': 75, |
|
348 | 349 | 'binary': False, |
|
349 | 350 | 'old_mode': '', |
|
350 | 351 | 'new_mode': '', |
|
351 | 352 | 'ops': {DEL_FILENODE: 'deleted file'}}), |
|
352 | 353 | ]), |
|
353 | 354 | ('hg', |
|
354 | 355 | 'diff_with_diff_data.diff', |
|
355 | 356 | [('vcs/backends/base.py', 'M', |
|
356 | 357 | {'added': 18, |
|
357 | 358 | 'deleted': 2, |
|
358 | 359 | 'binary': False, |
|
359 | 360 | 'old_mode': '', |
|
360 | 361 | 'new_mode': '100644', |
|
361 | 362 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
362 | 363 | ('vcs/backends/git/repository.py', 'M', |
|
363 | 364 | {'added': 46, |
|
364 | 365 | 'deleted': 15, |
|
365 | 366 | 'binary': False, |
|
366 | 367 | 'old_mode': '', |
|
367 | 368 | 'new_mode': '100644', |
|
368 | 369 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
369 | 370 | ('vcs/backends/hg.py', 'M', |
|
370 | 371 | {'added': 22, |
|
371 | 372 | 'deleted': 3, |
|
372 | 373 | 'binary': False, |
|
373 | 374 | 'old_mode': '', |
|
374 | 375 | 'new_mode': '100644', |
|
375 | 376 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
376 | 377 | ('vcs/tests/test_git.py', 'M', |
|
377 | 378 | {'added': 5, |
|
378 | 379 | 'deleted': 5, |
|
379 | 380 | 'binary': False, |
|
380 | 381 | 'old_mode': '', |
|
381 | 382 | 'new_mode': '100644', |
|
382 | 383 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
383 | 384 | ('vcs/tests/test_repository.py', 'M', |
|
384 | 385 | {'added': 174, |
|
385 | 386 | 'deleted': 2, |
|
386 | 387 | 'binary': False, |
|
387 | 388 | 'old_mode': '', |
|
388 | 389 | 'new_mode': '100644', |
|
389 | 390 | 'ops': {MOD_FILENODE: 'modified file'}}), |
|
390 | 391 | ]), |
|
391 | 392 | ('hg', |
|
392 | 393 | 'hg_diff_copy_file.diff', |
|
393 | 394 | [('file2', 'M', |
|
394 | 395 | {'added': 0, |
|
395 | 396 | 'deleted': 0, |
|
396 | 397 | 'binary': True, |
|
397 | 398 | 'old_mode': '', |
|
398 | 399 | 'new_mode': '', |
|
399 | 400 | 'copied': ('file1', 'file2'), |
|
400 | 401 | 'ops': {COPIED_FILENODE: 'file copied from file1 to file2'}}), |
|
401 | 402 | ]), |
|
402 | 403 | ('hg', |
|
403 | 404 | 'hg_diff_copy_and_modify_file.diff', |
|
404 | 405 | [('file3', 'M', |
|
405 | 406 | {'added': 1, |
|
406 | 407 | 'deleted': 0, |
|
407 | 408 | 'binary': False, |
|
408 | 409 | 'old_mode': '', |
|
409 | 410 | 'new_mode': '', |
|
410 | 411 | 'copied': ('file2', 'file3'), |
|
411 | 412 | 'ops': {COPIED_FILENODE: 'file copied from file2 to file3', |
|
412 | 413 | MOD_FILENODE: 'modified file'}}), |
|
413 | 414 | ]), |
|
414 | 415 | ('hg', |
|
415 | 416 | 'hg_diff_copy_and_chmod_file.diff', |
|
416 | 417 | [('file4', 'M', |
|
417 | 418 | {'added': 0, |
|
418 | 419 | 'deleted': 0, |
|
419 | 420 | 'binary': True, |
|
420 | 421 | 'old_mode': '100644', |
|
421 | 422 | 'new_mode': '100755', |
|
422 | 423 | 'copied': ('file3', 'file4'), |
|
423 | 424 | 'ops': {COPIED_FILENODE: 'file copied from file3 to file4', |
|
424 | 425 | CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}), |
|
425 | 426 | ]), |
|
426 | 427 | ('hg', |
|
427 | 428 | 'hg_diff_copy_chmod_and_edit_file.diff', |
|
428 | 429 | [('file5', 'M', |
|
429 | 430 | {'added': 2, |
|
430 | 431 | 'deleted': 1, |
|
431 | 432 | 'binary': False, |
|
432 | 433 | 'old_mode': '100755', |
|
433 | 434 | 'new_mode': '100644', |
|
434 | 435 | 'copied': ('file4', 'file5'), |
|
435 | 436 | 'ops': {COPIED_FILENODE: 'file copied from file4 to file5', |
|
436 | 437 | CHMOD_FILENODE: 'modified file chmod 100755 => 100644', |
|
437 | 438 | MOD_FILENODE: 'modified file'}})]), |
|
438 | 439 | |
|
439 | 440 | # Diffs to validate rename and copy file with space in its name |
|
440 | 441 | ('git', |
|
441 | 442 | 'git_diff_rename_file_with_spaces.diff', |
|
442 | 443 | [('file_with_ two spaces.txt', 'M', |
|
443 | 444 | {'added': 0, |
|
444 | 445 | 'deleted': 0, |
|
445 | 446 | 'binary': True, |
|
446 | 447 | 'old_mode': '', |
|
447 | 448 | 'new_mode': '', |
|
448 | 449 | 'renamed': ('file_with_ spaces.txt', 'file_with_ two spaces.txt'), |
|
449 | 450 | 'ops': { |
|
450 | 451 | RENAMED_FILENODE: ( |
|
451 | 452 | 'file renamed from file_with_ spaces.txt to file_with_ ' |
|
452 | 453 | ' two spaces.txt')} |
|
453 | 454 | }), ]), |
|
454 | 455 | ('hg', |
|
455 | 456 | 'hg_diff_rename_file_with_spaces.diff', |
|
456 | 457 | [('file_changed _.txt', 'M', |
|
457 | 458 | {'added': 0, |
|
458 | 459 | 'deleted': 0, |
|
459 | 460 | 'binary': True, |
|
460 | 461 | 'old_mode': '', |
|
461 | 462 | 'new_mode': '', |
|
462 | 463 | 'renamed': ('file_ with update.txt', 'file_changed _.txt'), |
|
463 | 464 | 'ops': { |
|
464 | 465 | RENAMED_FILENODE: ( |
|
465 | 466 | 'file renamed from file_ with update.txt to file_changed _.txt')} |
|
466 | 467 | }), ]), |
|
467 | 468 | ('hg', |
|
468 | 469 | 'hg_diff_copy_file_with_spaces.diff', |
|
469 | 470 | [('file_copied_ with spaces.txt', 'M', |
|
470 | 471 | {'added': 0, |
|
471 | 472 | 'deleted': 0, |
|
472 | 473 | 'binary': True, |
|
473 | 474 | 'old_mode': '', |
|
474 | 475 | 'new_mode': '', |
|
475 | 476 | 'copied': ('file_changed_without_spaces.txt', 'file_copied_ with spaces.txt'), |
|
476 | 477 | 'ops': { |
|
477 | 478 | COPIED_FILENODE: ( |
|
478 | 479 | 'file copied from file_changed_without_spaces.txt to' |
|
479 | 480 | ' file_copied_ with spaces.txt')} |
|
480 | 481 | }), |
|
481 | 482 | ]), |
|
482 | 483 | |
|
483 | 484 | # special signs from git |
|
484 | 485 | ('git', |
|
485 | 486 | 'git_diff_binary_special_files.diff', |
|
486 | 487 | [('css/_Icon\\r', 'A', |
|
487 | 488 | {'added': 0, |
|
488 | 489 | 'deleted': 0, |
|
489 | 490 | 'binary': True, |
|
490 | 491 | 'old_mode': '', |
|
491 | 492 | 'new_mode': '100644', |
|
492 | 493 | 'ops': {NEW_FILENODE: 'new file 100644', |
|
493 | 494 | BIN_FILENODE: 'binary diff hidden'} |
|
494 | 495 | }), |
|
495 | 496 | ]), |
|
496 | 497 | ('git', |
|
497 | 498 | 'git_diff_binary_special_files_2.diff', |
|
498 | 499 | [('css/Icon\\r', 'A', |
|
499 | 500 | {'added': 0, |
|
500 | 501 | 'deleted': 0, |
|
501 | 502 | 'binary': True, |
|
502 | 503 | 'old_mode': '', |
|
503 | 504 | 'new_mode': '100644', |
|
504 | 505 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
505 | 506 | }), |
|
506 | 507 | ]), |
|
507 | 508 | |
|
508 | 509 | ('svn', |
|
509 | 510 | 'svn_diff_binary_add_file.diff', |
|
510 | 511 | [('intl.dll', 'A', |
|
511 | 512 | {'added': 0, |
|
512 | 513 | 'deleted': 0, |
|
513 | 514 | 'binary': False, |
|
514 | 515 | 'old_mode': '', |
|
515 | 516 | 'new_mode': '10644', |
|
516 | 517 | 'ops': {NEW_FILENODE: 'new file 10644', |
|
517 | 518 | #TODO(Marcink): depends on binary detection on svn patches |
|
518 | 519 | # BIN_FILENODE: 'binary diff hidden' |
|
519 | 520 | } |
|
520 | 521 | }), |
|
521 | 522 | ]), |
|
522 | 523 | |
|
523 | 524 | ('svn', |
|
524 | 525 | 'svn_diff_multiple_changes.diff', |
|
525 | 526 | [('trunk/doc/images/SettingsOverlay.png', 'M', |
|
526 | 527 | {'added': 0, |
|
527 | 528 | 'deleted': 0, |
|
528 | 529 | 'binary': False, |
|
529 | 530 | 'old_mode': '', |
|
530 | 531 | 'new_mode': '', |
|
531 | 532 | 'ops': {MOD_FILENODE: 'modified file', |
|
532 | 533 | #TODO(Marcink): depends on binary detection on svn patches |
|
533 | 534 | # BIN_FILENODE: 'binary diff hidden' |
|
534 | 535 | } |
|
535 | 536 | }), |
|
536 | 537 | ('trunk/doc/source/de/tsvn_ch04.xml', 'M', |
|
537 | 538 | {'added': 89, |
|
538 | 539 | 'deleted': 34, |
|
539 | 540 | 'binary': False, |
|
540 | 541 | 'old_mode': '', |
|
541 | 542 | 'new_mode': '', |
|
542 | 543 | 'ops': {MOD_FILENODE: 'modified file'} |
|
543 | 544 | }), |
|
544 | 545 | ('trunk/doc/source/en/tsvn_ch04.xml', 'M', |
|
545 | 546 | {'added': 66, |
|
546 | 547 | 'deleted': 21, |
|
547 | 548 | 'binary': False, |
|
548 | 549 | 'old_mode': '', |
|
549 | 550 | 'new_mode': '', |
|
550 | 551 | 'ops': {MOD_FILENODE: 'modified file'} |
|
551 | 552 | }), |
|
552 | 553 | ('trunk/src/Changelog.txt', 'M', |
|
553 | 554 | {'added': 2, |
|
554 | 555 | 'deleted': 0, |
|
555 | 556 | 'binary': False, |
|
556 | 557 | 'old_mode': '', |
|
557 | 558 | 'new_mode': '', |
|
558 | 559 | 'ops': {MOD_FILENODE: 'modified file'} |
|
559 | 560 | }), |
|
560 | 561 | ('trunk/src/Resources/TortoiseProcENG.rc', 'M', |
|
561 | 562 | {'added': 19, |
|
562 | 563 | 'deleted': 13, |
|
563 | 564 | 'binary': False, |
|
564 | 565 | 'old_mode': '', |
|
565 | 566 | 'new_mode': '', |
|
566 | 567 | 'ops': {MOD_FILENODE: 'modified file'} |
|
567 | 568 | }), |
|
568 | 569 | ('trunk/src/TortoiseProc/SetOverlayPage.cpp', 'M', |
|
569 | 570 | {'added': 16, |
|
570 | 571 | 'deleted': 1, |
|
571 | 572 | 'binary': False, |
|
572 | 573 | 'old_mode': '', |
|
573 | 574 | 'new_mode': '', |
|
574 | 575 | 'ops': {MOD_FILENODE: 'modified file'} |
|
575 | 576 | }), |
|
576 | 577 | ('trunk/src/TortoiseProc/SetOverlayPage.h', 'M', |
|
577 | 578 | {'added': 3, |
|
578 | 579 | 'deleted': 0, |
|
579 | 580 | 'binary': False, |
|
580 | 581 | 'old_mode': '', |
|
581 | 582 | 'new_mode': '', |
|
582 | 583 | 'ops': {MOD_FILENODE: 'modified file'} |
|
583 | 584 | }), |
|
584 | 585 | ('trunk/src/TortoiseProc/resource.h', 'M', |
|
585 | 586 | {'added': 2, |
|
586 | 587 | 'deleted': 0, |
|
587 | 588 | 'binary': False, |
|
588 | 589 | 'old_mode': '', |
|
589 | 590 | 'new_mode': '', |
|
590 | 591 | 'ops': {MOD_FILENODE: 'modified file'} |
|
591 | 592 | }), |
|
592 | 593 | ('trunk/src/TortoiseShell/ShellCache.h', 'M', |
|
593 | 594 | {'added': 50, |
|
594 | 595 | 'deleted': 1, |
|
595 | 596 | 'binary': False, |
|
596 | 597 | 'old_mode': '', |
|
597 | 598 | 'new_mode': '', |
|
598 | 599 | 'ops': {MOD_FILENODE: 'modified file'} |
|
599 | 600 | }), |
|
600 | 601 | ]), |
|
601 | 602 | |
|
602 | 603 | ] |
|
603 | 604 | |
|
604 | 605 | DIFF_FIXTURES_WITH_CONTENT = [ |
|
605 | 606 | ( |
|
606 | 607 | 'hg', 'hg_diff_single_file_change_newline.diff', |
|
607 | 608 | [ |
|
608 | 609 | ( |
|
609 | 610 | 'file_b', # filename |
|
610 | 611 | 'A', # change |
|
611 | 612 | { # stats |
|
612 | 613 | 'added': 1, |
|
613 | 614 | 'deleted': 0, |
|
614 | 615 | 'binary': False, |
|
615 | 616 | 'old_mode': '', |
|
616 | 617 | 'new_mode': '100644', |
|
617 | 618 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
618 | 619 | }, |
|
619 | 620 | '@@ -0,0 +1 @@\n+test_content b\n' # diff |
|
620 | 621 | ), |
|
621 | 622 | ], |
|
622 | 623 | ), |
|
623 | 624 | ( |
|
624 | 625 | 'hg', 'hg_diff_double_file_change_newline.diff', |
|
625 | 626 | [ |
|
626 | 627 | ( |
|
627 | 628 | 'file_b', # filename |
|
628 | 629 | 'A', # change |
|
629 | 630 | { # stats |
|
630 | 631 | 'added': 1, |
|
631 | 632 | 'deleted': 0, |
|
632 | 633 | 'binary': False, |
|
633 | 634 | 'old_mode': '', |
|
634 | 635 | 'new_mode': '100644', |
|
635 | 636 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
636 | 637 | }, |
|
637 | 638 | '@@ -0,0 +1 @@\n+test_content b\n' # diff |
|
638 | 639 | ), |
|
639 | 640 | ( |
|
640 | 641 | 'file_c', # filename |
|
641 | 642 | 'A', # change |
|
642 | 643 | { # stats |
|
643 | 644 | 'added': 1, |
|
644 | 645 | 'deleted': 0, |
|
645 | 646 | 'binary': False, |
|
646 | 647 | 'old_mode': '', |
|
647 | 648 | 'new_mode': '100644', |
|
648 | 649 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
649 | 650 | }, |
|
650 | 651 | '@@ -0,0 +1 @@\n+test_content c\n' # diff |
|
651 | 652 | ), |
|
652 | 653 | ], |
|
653 | 654 | ), |
|
654 | 655 | ( |
|
655 | 656 | 'hg', 'hg_diff_double_file_change_double_newline.diff', |
|
656 | 657 | [ |
|
657 | 658 | ( |
|
658 | 659 | 'file_b', # filename |
|
659 | 660 | 'A', # change |
|
660 | 661 | { # stats |
|
661 | 662 | 'added': 1, |
|
662 | 663 | 'deleted': 0, |
|
663 | 664 | 'binary': False, |
|
664 | 665 | 'old_mode': '', |
|
665 | 666 | 'new_mode': '100644', |
|
666 | 667 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
667 | 668 | }, |
|
668 | 669 | '@@ -0,0 +1 @@\n+test_content b\n\n' # diff |
|
669 | 670 | ), |
|
670 | 671 | ( |
|
671 | 672 | 'file_c', # filename |
|
672 | 673 | 'A', # change |
|
673 | 674 | { # stats |
|
674 | 675 | 'added': 1, |
|
675 | 676 | 'deleted': 0, |
|
676 | 677 | 'binary': False, |
|
677 | 678 | 'old_mode': '', |
|
678 | 679 | 'new_mode': '100644', |
|
679 | 680 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
680 | 681 | }, |
|
681 | 682 | '@@ -0,0 +1 @@\n+test_content c\n' # diff |
|
682 | 683 | ), |
|
683 | 684 | ], |
|
684 | 685 | ), |
|
685 | 686 | ( |
|
686 | 687 | 'hg', 'hg_diff_four_file_change_newline.diff', |
|
687 | 688 | [ |
|
688 | 689 | ( |
|
689 | 690 | 'file', # filename |
|
690 | 691 | 'A', # change |
|
691 | 692 | { # stats |
|
692 | 693 | 'added': 1, |
|
693 | 694 | 'deleted': 0, |
|
694 | 695 | 'binary': False, |
|
695 | 696 | 'old_mode': '', |
|
696 | 697 | 'new_mode': '100644', |
|
697 | 698 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
698 | 699 | }, |
|
699 | 700 | '@@ -0,0 +1,1 @@\n+file\n' # diff |
|
700 | 701 | ), |
|
701 | 702 | ( |
|
702 | 703 | 'file2', # filename |
|
703 | 704 | 'A', # change |
|
704 | 705 | { # stats |
|
705 | 706 | 'added': 1, |
|
706 | 707 | 'deleted': 0, |
|
707 | 708 | 'binary': False, |
|
708 | 709 | 'old_mode': '', |
|
709 | 710 | 'new_mode': '100644', |
|
710 | 711 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
711 | 712 | }, |
|
712 | 713 | '@@ -0,0 +1,1 @@\n+another line\n' # diff |
|
713 | 714 | ), |
|
714 | 715 | ( |
|
715 | 716 | 'file3', # filename |
|
716 | 717 | 'A', # change |
|
717 | 718 | { # stats |
|
718 | 719 | 'added': 1, |
|
719 | 720 | 'deleted': 0, |
|
720 | 721 | 'binary': False, |
|
721 | 722 | 'old_mode': '', |
|
722 | 723 | 'new_mode': '100644', |
|
723 | 724 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
724 | 725 | }, |
|
725 | 726 | '@@ -0,0 +1,1 @@\n+newline\n' # diff |
|
726 | 727 | ), |
|
727 | 728 | ( |
|
728 | 729 | 'file4', # filename |
|
729 | 730 | 'A', # change |
|
730 | 731 | { # stats |
|
731 | 732 | 'added': 1, |
|
732 | 733 | 'deleted': 0, |
|
733 | 734 | 'binary': False, |
|
734 | 735 | 'old_mode': '', |
|
735 | 736 | 'new_mode': '100644', |
|
736 | 737 | 'ops': {NEW_FILENODE: 'new file 100644', } |
|
737 | 738 | }, |
|
738 | 739 | '@@ -0,0 +1,1 @@\n+fil4\n\\ No newline at end of file' # diff |
|
739 | 740 | ), |
|
740 | 741 | ], |
|
741 | 742 | ), |
|
742 | 743 | |
|
743 | 744 | ] |
|
744 | 745 | |
|
745 | 746 | |
|
746 | 747 | diff_class = { |
|
747 | 748 | 'git': GitDiff, |
|
748 | 749 | 'hg': MercurialDiff, |
|
749 | 750 | 'svn': SubversionDiff, |
|
750 | 751 | } |
|
751 | 752 | |
|
752 | 753 | |
|
753 | 754 | @pytest.mark.parametrize('vcs_type, diff_file, expected_data', DIFF_FIXTURES) |
|
754 | 755 | def test_diff_lib(vcs_type, diff_file, expected_data): |
|
755 | 756 | diff_txt = fixture.load_resource(diff_file) |
|
756 | 757 | diff = diff_class[vcs_type](diff_txt) |
|
757 | 758 | |
|
758 | 759 | diff_proc = DiffProcessor(diff, diff_format='newdiff') |
|
759 | 760 | diff_proc_d = diff_proc.prepare() |
|
760 | 761 | data = [(x['filename'], x['operation'], x['stats']) |
|
761 | 762 | for x in diff_proc_d] |
|
762 | 763 | assert expected_data == data |
|
763 | 764 | |
|
764 | 765 | |
|
765 | 766 | @pytest.mark.parametrize('vcs_type, diff_file, expected_data', DIFF_FIXTURES_WITH_CONTENT) |
|
766 | 767 | def test_diff_lib_newlines(vcs_type, diff_file, expected_data): |
|
767 | 768 | diff_txt = fixture.load_resource(diff_file) |
|
768 | 769 | diff = diff_class[vcs_type](diff_txt) |
|
769 | 770 | |
|
770 | 771 | diff_proc = DiffProcessor(diff, diff_format='newdiff') |
|
771 | 772 | diff_proc_d = diff_proc.prepare() |
|
772 | 773 | data = [(x['filename'], x['operation'], x['stats'], x['raw_diff']) |
|
773 | 774 | for x in diff_proc_d] |
|
774 | 775 | assert expected_data == data |
|
775 | 776 | |
|
776 | 777 | |
|
777 | 778 | @pytest.mark.parametrize('input_str', [ |
|
778 | 779 | b'', |
|
779 | 780 | b'\n', |
|
780 | 781 | b'\n\n', |
|
781 | 782 | b'First\n+second', |
|
782 | 783 | b'First\n+second\n', |
|
783 | 784 | |
|
784 | 785 | b'\n\n\n Multi \n\n\n', |
|
785 | 786 | b'\n\n\n Multi beginning', |
|
786 | 787 | b'Multi end \n\n\n', |
|
787 | 788 | b'Multi end', |
|
788 | 789 | b'@@ -0,0 +1 @@\n+test_content \n\n b\n' |
|
789 | 790 | ], ids=no_newline_id_generator) |
|
790 | 791 | def test_splitlines(input_str): |
|
791 | 792 | result = DiffProcessor.diff_splitter(input_str) |
|
792 | 793 | assert list(result) == input_str.splitlines(True) |
|
793 | 794 | |
|
794 | 795 | |
|
795 | 796 | def test_diff_over_limit(request): |
|
796 | 797 | |
|
797 | 798 | diff_limit = 1024 |
|
798 | 799 | file_limit = 1024 |
|
799 | 800 | |
|
800 | 801 | raw_diff = fixture.load_resource('large_diff.diff') |
|
801 | 802 | vcs_diff = GitDiff(raw_diff) |
|
802 | 803 | diff_processor = DiffProcessor(vcs_diff, diff_format='newdiff', |
|
803 | 804 | diff_limit=diff_limit, file_limit=file_limit, |
|
804 | 805 | show_full_diff=False) |
|
805 | 806 | |
|
806 | 807 | _parsed = diff_processor.prepare() |
|
807 | 808 | |
|
808 | 809 | commit1 = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1) |
|
809 | 810 | commit2 = GitCommit(repository=mock.Mock(), raw_id='abcdef34', idx=2) |
|
810 | 811 | |
|
811 | 812 | diffset = DiffSet( |
|
812 | 813 | repo_name='repo_name', |
|
813 | 814 | source_node_getter=lambda *a, **kw: AttributeDict({'commit': commit1}), |
|
814 | 815 | target_node_getter=lambda *a, **kw: AttributeDict({'commit': commit2}) |
|
815 | 816 | ) |
|
816 | 817 | |
|
817 | 818 | diffset = diffset.render_patchset(_parsed, commit1, commit2) |
|
818 | 819 | |
|
819 | 820 | assert len(diffset.files) == 2 |
|
820 | 821 | assert diffset.limited_diff is True |
|
821 | 822 | assert diffset.files[0].patch['filename'] == 'example.go' |
|
822 | 823 | assert diffset.files[0].limited_diff is True |
|
823 | 824 | |
|
824 | 825 | assert diffset.files[1].patch['filename'] == 'README.md' |
|
825 | 826 | assert diffset.files[1].limited_diff is False |
General Comments 0
You need to be logged in to leave comments.
Login now