##// END OF EJS Templates
code: import fix/pep8
super-admin -
r5178:30100c99 default
parent child Browse files
Show More
@@ -1,2534 +1,2533 b''
1 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import time
20 import time
21
21
22 import rhodecode
22 import rhodecode
23 from rhodecode.api import (
23 from rhodecode.api import (
24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
25 from rhodecode.api.utils import (
25 from rhodecode.api.utils import (
26 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
26 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
27 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
27 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
28 get_perm_or_error, parse_args, get_origin, build_commit_data,
28 get_perm_or_error, parse_args, get_origin, build_commit_data,
29 validate_set_owner_permissions)
29 validate_set_owner_permissions)
30 from rhodecode.lib import audit_logger, rc_cache, channelstream
30 from rhodecode.lib import audit_logger, rc_cache, channelstream
31 from rhodecode.lib import repo_maintenance
31 from rhodecode.lib import repo_maintenance
32 from rhodecode.lib.auth import (
32 from rhodecode.lib.auth import (
33 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
33 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
34 HasRepoPermissionAnyApi)
34 HasRepoPermissionAnyApi)
35 from rhodecode.lib.celerylib.utils import get_task_id
35 from rhodecode.lib.celerylib.utils import get_task_id
36 from rhodecode.lib.utils2 import (
36 from rhodecode.lib.utils2 import (
37 str2bool, time_to_datetime, safe_str, safe_int)
37 str2bool, time_to_datetime, safe_str, safe_int)
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.exceptions import (
39 from rhodecode.lib.exceptions import (
40 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
40 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
41 from rhodecode.lib.vcs import RepositoryError
41 from rhodecode.lib.vcs import RepositoryError
42 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
42 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
43 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
46 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
47 ChangesetComment)
47 ChangesetComment)
48 from rhodecode.model.permission import PermissionModel
48 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.pull_request import PullRequestModel
49 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.scm import ScmModel, RepoList
51 from rhodecode.model.scm import ScmModel, RepoList
52 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
52 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
53 from rhodecode.model import validation_schema
53 from rhodecode.model import validation_schema
54 from rhodecode.model.validation_schema.schemas import repo_schema
54 from rhodecode.model.validation_schema.schemas import repo_schema
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 @jsonrpc_method()
59 @jsonrpc_method()
60 def get_repo(request, apiuser, repoid, cache=Optional(True)):
60 def get_repo(request, apiuser, repoid, cache=Optional(True)):
61 """
61 """
62 Gets an existing repository by its name or repository_id.
62 Gets an existing repository by its name or repository_id.
63
63
64 The members section so the output returns users groups or users
64 The members section so the output returns users groups or users
65 associated with that repository.
65 associated with that repository.
66
66
67 This command can only be run using an |authtoken| with admin rights,
67 This command can only be run using an |authtoken| with admin rights,
68 or users with at least read rights to the |repo|.
68 or users with at least read rights to the |repo|.
69
69
70 :param apiuser: This is filled automatically from the |authtoken|.
70 :param apiuser: This is filled automatically from the |authtoken|.
71 :type apiuser: AuthUser
71 :type apiuser: AuthUser
72 :param repoid: The repository name or repository id.
72 :param repoid: The repository name or repository id.
73 :type repoid: str or int
73 :type repoid: str or int
74 :param cache: use the cached value for last changeset
74 :param cache: use the cached value for last changeset
75 :type: cache: Optional(bool)
75 :type: cache: Optional(bool)
76
76
77 Example output:
77 Example output:
78
78
79 .. code-block:: bash
79 .. code-block:: bash
80
80
81 {
81 {
82 "error": null,
82 "error": null,
83 "id": <repo_id>,
83 "id": <repo_id>,
84 "result": {
84 "result": {
85 "clone_uri": null,
85 "clone_uri": null,
86 "created_on": "timestamp",
86 "created_on": "timestamp",
87 "description": "repo description",
87 "description": "repo description",
88 "enable_downloads": false,
88 "enable_downloads": false,
89 "enable_locking": false,
89 "enable_locking": false,
90 "enable_statistics": false,
90 "enable_statistics": false,
91 "followers": [
91 "followers": [
92 {
92 {
93 "active": true,
93 "active": true,
94 "admin": false,
94 "admin": false,
95 "api_key": "****************************************",
95 "api_key": "****************************************",
96 "api_keys": [
96 "api_keys": [
97 "****************************************"
97 "****************************************"
98 ],
98 ],
99 "email": "user@example.com",
99 "email": "user@example.com",
100 "emails": [
100 "emails": [
101 "user@example.com"
101 "user@example.com"
102 ],
102 ],
103 "extern_name": "rhodecode",
103 "extern_name": "rhodecode",
104 "extern_type": "rhodecode",
104 "extern_type": "rhodecode",
105 "firstname": "username",
105 "firstname": "username",
106 "ip_addresses": [],
106 "ip_addresses": [],
107 "language": null,
107 "language": null,
108 "last_login": "2015-09-16T17:16:35.854",
108 "last_login": "2015-09-16T17:16:35.854",
109 "lastname": "surname",
109 "lastname": "surname",
110 "user_id": <user_id>,
110 "user_id": <user_id>,
111 "username": "name"
111 "username": "name"
112 }
112 }
113 ],
113 ],
114 "fork_of": "parent-repo",
114 "fork_of": "parent-repo",
115 "landing_rev": [
115 "landing_rev": [
116 "rev",
116 "rev",
117 "tip"
117 "tip"
118 ],
118 ],
119 "last_changeset": {
119 "last_changeset": {
120 "author": "User <user@example.com>",
120 "author": "User <user@example.com>",
121 "branch": "default",
121 "branch": "default",
122 "date": "timestamp",
122 "date": "timestamp",
123 "message": "last commit message",
123 "message": "last commit message",
124 "parents": [
124 "parents": [
125 {
125 {
126 "raw_id": "commit-id"
126 "raw_id": "commit-id"
127 }
127 }
128 ],
128 ],
129 "raw_id": "commit-id",
129 "raw_id": "commit-id",
130 "revision": <revision number>,
130 "revision": <revision number>,
131 "short_id": "short id"
131 "short_id": "short id"
132 },
132 },
133 "lock_reason": null,
133 "lock_reason": null,
134 "locked_by": null,
134 "locked_by": null,
135 "locked_date": null,
135 "locked_date": null,
136 "owner": "owner-name",
136 "owner": "owner-name",
137 "permissions": [
137 "permissions": [
138 {
138 {
139 "name": "super-admin-name",
139 "name": "super-admin-name",
140 "origin": "super-admin",
140 "origin": "super-admin",
141 "permission": "repository.admin",
141 "permission": "repository.admin",
142 "type": "user"
142 "type": "user"
143 },
143 },
144 {
144 {
145 "name": "owner-name",
145 "name": "owner-name",
146 "origin": "owner",
146 "origin": "owner",
147 "permission": "repository.admin",
147 "permission": "repository.admin",
148 "type": "user"
148 "type": "user"
149 },
149 },
150 {
150 {
151 "name": "user-group-name",
151 "name": "user-group-name",
152 "origin": "permission",
152 "origin": "permission",
153 "permission": "repository.write",
153 "permission": "repository.write",
154 "type": "user_group"
154 "type": "user_group"
155 }
155 }
156 ],
156 ],
157 "private": true,
157 "private": true,
158 "repo_id": 676,
158 "repo_id": 676,
159 "repo_name": "user-group/repo-name",
159 "repo_name": "user-group/repo-name",
160 "repo_type": "hg"
160 "repo_type": "hg"
161 }
161 }
162 }
162 }
163 """
163 """
164
164
165 repo = get_repo_or_error(repoid)
165 repo = get_repo_or_error(repoid)
166 cache = Optional.extract(cache)
166 cache = Optional.extract(cache)
167
167
168 include_secrets = False
168 include_secrets = False
169 if has_superadmin_permission(apiuser):
169 if has_superadmin_permission(apiuser):
170 include_secrets = True
170 include_secrets = True
171 else:
171 else:
172 # check if we have at least read permission for this repo !
172 # check if we have at least read permission for this repo !
173 _perms = (
173 _perms = (
174 'repository.admin', 'repository.write', 'repository.read',)
174 'repository.admin', 'repository.write', 'repository.read',)
175 validate_repo_permissions(apiuser, repoid, repo, _perms)
175 validate_repo_permissions(apiuser, repoid, repo, _perms)
176
176
177 permissions = []
177 permissions = []
178 for _user in repo.permissions():
178 for _user in repo.permissions():
179 user_data = {
179 user_data = {
180 'name': _user.username,
180 'name': _user.username,
181 'permission': _user.permission,
181 'permission': _user.permission,
182 'origin': get_origin(_user),
182 'origin': get_origin(_user),
183 'type': "user",
183 'type': "user",
184 }
184 }
185 permissions.append(user_data)
185 permissions.append(user_data)
186
186
187 for _user_group in repo.permission_user_groups():
187 for _user_group in repo.permission_user_groups():
188 user_group_data = {
188 user_group_data = {
189 'name': _user_group.users_group_name,
189 'name': _user_group.users_group_name,
190 'permission': _user_group.permission,
190 'permission': _user_group.permission,
191 'origin': get_origin(_user_group),
191 'origin': get_origin(_user_group),
192 'type': "user_group",
192 'type': "user_group",
193 }
193 }
194 permissions.append(user_group_data)
194 permissions.append(user_group_data)
195
195
196 following_users = [
196 following_users = [
197 user.user.get_api_data(include_secrets=include_secrets)
197 user.user.get_api_data(include_secrets=include_secrets)
198 for user in repo.followers]
198 for user in repo.followers]
199
199
200 if not cache:
200 if not cache:
201 repo.update_commit_cache()
201 repo.update_commit_cache()
202 data = repo.get_api_data(include_secrets=include_secrets)
202 data = repo.get_api_data(include_secrets=include_secrets)
203 data['permissions'] = permissions
203 data['permissions'] = permissions
204 data['followers'] = following_users
204 data['followers'] = following_users
205
205
206 return data
206 return data
207
207
208
208
209 @jsonrpc_method()
209 @jsonrpc_method()
210 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
210 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
211 """
211 """
212 Lists all existing repositories.
212 Lists all existing repositories.
213
213
214 This command can only be run using an |authtoken| with admin rights,
214 This command can only be run using an |authtoken| with admin rights,
215 or users with at least read rights to |repos|.
215 or users with at least read rights to |repos|.
216
216
217 :param apiuser: This is filled automatically from the |authtoken|.
217 :param apiuser: This is filled automatically from the |authtoken|.
218 :type apiuser: AuthUser
218 :type apiuser: AuthUser
219 :param root: specify root repository group to fetch repositories.
219 :param root: specify root repository group to fetch repositories.
220 filters the returned repositories to be members of given root group.
220 filters the returned repositories to be members of given root group.
221 :type root: Optional(None)
221 :type root: Optional(None)
222 :param traverse: traverse given root into subrepositories. With this flag
222 :param traverse: traverse given root into subrepositories. With this flag
223 set to False, it will only return top-level repositories from `root`.
223 set to False, it will only return top-level repositories from `root`.
224 if root is empty it will return just top-level repositories.
224 if root is empty it will return just top-level repositories.
225 :type traverse: Optional(True)
225 :type traverse: Optional(True)
226
226
227
227
228 Example output:
228 Example output:
229
229
230 .. code-block:: bash
230 .. code-block:: bash
231
231
232 id : <id_given_in_input>
232 id : <id_given_in_input>
233 result: [
233 result: [
234 {
234 {
235 "repo_id" : "<repo_id>",
235 "repo_id" : "<repo_id>",
236 "repo_name" : "<reponame>"
236 "repo_name" : "<reponame>"
237 "repo_type" : "<repo_type>",
237 "repo_type" : "<repo_type>",
238 "clone_uri" : "<clone_uri>",
238 "clone_uri" : "<clone_uri>",
239 "private": : "<bool>",
239 "private": : "<bool>",
240 "created_on" : "<datetimecreated>",
240 "created_on" : "<datetimecreated>",
241 "description" : "<description>",
241 "description" : "<description>",
242 "landing_rev": "<landing_rev>",
242 "landing_rev": "<landing_rev>",
243 "owner": "<repo_owner>",
243 "owner": "<repo_owner>",
244 "fork_of": "<name_of_fork_parent>",
244 "fork_of": "<name_of_fork_parent>",
245 "enable_downloads": "<bool>",
245 "enable_downloads": "<bool>",
246 "enable_locking": "<bool>",
246 "enable_locking": "<bool>",
247 "enable_statistics": "<bool>",
247 "enable_statistics": "<bool>",
248 },
248 },
249 ...
249 ...
250 ]
250 ]
251 error: null
251 error: null
252 """
252 """
253
253
254 include_secrets = has_superadmin_permission(apiuser)
254 include_secrets = has_superadmin_permission(apiuser)
255 _perms = ('repository.read', 'repository.write', 'repository.admin',)
255 _perms = ('repository.read', 'repository.write', 'repository.admin',)
256 extras = {'user': apiuser}
256 extras = {'user': apiuser}
257
257
258 root = Optional.extract(root)
258 root = Optional.extract(root)
259 traverse = Optional.extract(traverse, binary=True)
259 traverse = Optional.extract(traverse, binary=True)
260
260
261 if root:
261 if root:
262 # verify parent existance, if it's empty return an error
262 # verify parent existance, if it's empty return an error
263 parent = RepoGroup.get_by_group_name(root)
263 parent = RepoGroup.get_by_group_name(root)
264 if not parent:
264 if not parent:
265 raise JSONRPCError(
265 raise JSONRPCError(
266 f'Root repository group `{root}` does not exist')
266 f'Root repository group `{root}` does not exist')
267
267
268 if traverse:
268 if traverse:
269 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
269 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
270 else:
270 else:
271 repos = RepoModel().get_repos_for_root(root=parent)
271 repos = RepoModel().get_repos_for_root(root=parent)
272 else:
272 else:
273 if traverse:
273 if traverse:
274 repos = RepoModel().get_all()
274 repos = RepoModel().get_all()
275 else:
275 else:
276 # return just top-level
276 # return just top-level
277 repos = RepoModel().get_repos_for_root(root=None)
277 repos = RepoModel().get_repos_for_root(root=None)
278
278
279 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
279 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
280 return [repo.get_api_data(include_secrets=include_secrets)
280 return [repo.get_api_data(include_secrets=include_secrets)
281 for repo in repo_list]
281 for repo in repo_list]
282
282
283
283
284 @jsonrpc_method()
284 @jsonrpc_method()
285 def get_repo_changeset(request, apiuser, repoid, revision,
285 def get_repo_changeset(request, apiuser, repoid, revision,
286 details=Optional('basic')):
286 details=Optional('basic')):
287 """
287 """
288 Returns information about a changeset.
288 Returns information about a changeset.
289
289
290 Additionally parameters define the amount of details returned by
290 Additionally parameters define the amount of details returned by
291 this function.
291 this function.
292
292
293 This command can only be run using an |authtoken| with admin rights,
293 This command can only be run using an |authtoken| with admin rights,
294 or users with at least read rights to the |repo|.
294 or users with at least read rights to the |repo|.
295
295
296 :param apiuser: This is filled automatically from the |authtoken|.
296 :param apiuser: This is filled automatically from the |authtoken|.
297 :type apiuser: AuthUser
297 :type apiuser: AuthUser
298 :param repoid: The repository name or repository id
298 :param repoid: The repository name or repository id
299 :type repoid: str or int
299 :type repoid: str or int
300 :param revision: revision for which listing should be done
300 :param revision: revision for which listing should be done
301 :type revision: str
301 :type revision: str
302 :param details: details can be 'basic|extended|full' full gives diff
302 :param details: details can be 'basic|extended|full' full gives diff
303 info details like the diff itself, and number of changed files etc.
303 info details like the diff itself, and number of changed files etc.
304 :type details: Optional(str)
304 :type details: Optional(str)
305
305
306 """
306 """
307 repo = get_repo_or_error(repoid)
307 repo = get_repo_or_error(repoid)
308 if not has_superadmin_permission(apiuser):
308 if not has_superadmin_permission(apiuser):
309 _perms = ('repository.admin', 'repository.write', 'repository.read',)
309 _perms = ('repository.admin', 'repository.write', 'repository.read',)
310 validate_repo_permissions(apiuser, repoid, repo, _perms)
310 validate_repo_permissions(apiuser, repoid, repo, _perms)
311
311
312 changes_details = Optional.extract(details)
312 changes_details = Optional.extract(details)
313 _changes_details_types = ['basic', 'extended', 'full']
313 _changes_details_types = ['basic', 'extended', 'full']
314 if changes_details not in _changes_details_types:
314 if changes_details not in _changes_details_types:
315 raise JSONRPCError(
315 raise JSONRPCError(
316 'ret_type must be one of %s' % (
316 'ret_type must be one of %s' % (
317 ','.join(_changes_details_types)))
317 ','.join(_changes_details_types)))
318
318
319 vcs_repo = repo.scm_instance()
319 vcs_repo = repo.scm_instance()
320 pre_load = ['author', 'branch', 'date', 'message', 'parents',
320 pre_load = ['author', 'branch', 'date', 'message', 'parents',
321 'status', '_commit', '_file_paths']
321 'status', '_commit', '_file_paths']
322
322
323 try:
323 try:
324 commit = repo.get_commit(commit_id=revision, pre_load=pre_load)
324 commit = repo.get_commit(commit_id=revision, pre_load=pre_load)
325 except TypeError as e:
325 except TypeError as e:
326 raise JSONRPCError(safe_str(e))
326 raise JSONRPCError(safe_str(e))
327 _cs_json = commit.__json__()
327 _cs_json = commit.__json__()
328 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
328 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
329 if changes_details == 'full':
329 if changes_details == 'full':
330 _cs_json['refs'] = commit._get_refs()
330 _cs_json['refs'] = commit._get_refs()
331 return _cs_json
331 return _cs_json
332
332
333
333
334 @jsonrpc_method()
334 @jsonrpc_method()
335 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
335 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
336 details=Optional('basic')):
336 details=Optional('basic')):
337 """
337 """
338 Returns a set of commits limited by the number starting
338 Returns a set of commits limited by the number starting
339 from the `start_rev` option.
339 from the `start_rev` option.
340
340
341 Additional parameters define the amount of details returned by this
341 Additional parameters define the amount of details returned by this
342 function.
342 function.
343
343
344 This command can only be run using an |authtoken| with admin rights,
344 This command can only be run using an |authtoken| with admin rights,
345 or users with at least read rights to |repos|.
345 or users with at least read rights to |repos|.
346
346
347 :param apiuser: This is filled automatically from the |authtoken|.
347 :param apiuser: This is filled automatically from the |authtoken|.
348 :type apiuser: AuthUser
348 :type apiuser: AuthUser
349 :param repoid: The repository name or repository ID.
349 :param repoid: The repository name or repository ID.
350 :type repoid: str or int
350 :type repoid: str or int
351 :param start_rev: The starting revision from where to get changesets.
351 :param start_rev: The starting revision from where to get changesets.
352 :type start_rev: str
352 :type start_rev: str
353 :param limit: Limit the number of commits to this amount
353 :param limit: Limit the number of commits to this amount
354 :type limit: str or int
354 :type limit: str or int
355 :param details: Set the level of detail returned. Valid option are:
355 :param details: Set the level of detail returned. Valid option are:
356 ``basic``, ``extended`` and ``full``.
356 ``basic``, ``extended`` and ``full``.
357 :type details: Optional(str)
357 :type details: Optional(str)
358
358
359 .. note::
359 .. note::
360
360
361 Setting the parameter `details` to the value ``full`` is extensive
361 Setting the parameter `details` to the value ``full`` is extensive
362 and returns details like the diff itself, and the number
362 and returns details like the diff itself, and the number
363 of changed files.
363 of changed files.
364
364
365 """
365 """
366 repo = get_repo_or_error(repoid)
366 repo = get_repo_or_error(repoid)
367 if not has_superadmin_permission(apiuser):
367 if not has_superadmin_permission(apiuser):
368 _perms = ('repository.admin', 'repository.write', 'repository.read',)
368 _perms = ('repository.admin', 'repository.write', 'repository.read',)
369 validate_repo_permissions(apiuser, repoid, repo, _perms)
369 validate_repo_permissions(apiuser, repoid, repo, _perms)
370
370
371 changes_details = Optional.extract(details)
371 changes_details = Optional.extract(details)
372 _changes_details_types = ['basic', 'extended', 'full']
372 _changes_details_types = ['basic', 'extended', 'full']
373 if changes_details not in _changes_details_types:
373 if changes_details not in _changes_details_types:
374 raise JSONRPCError(
374 raise JSONRPCError(
375 'ret_type must be one of %s' % (
375 'ret_type must be one of %s' % (
376 ','.join(_changes_details_types)))
376 ','.join(_changes_details_types)))
377
377
378 limit = int(limit)
378 limit = int(limit)
379 pre_load = ['author', 'branch', 'date', 'message', 'parents',
379 pre_load = ['author', 'branch', 'date', 'message', 'parents',
380 'status', '_commit', '_file_paths']
380 'status', '_commit', '_file_paths']
381
381
382 vcs_repo = repo.scm_instance()
382 vcs_repo = repo.scm_instance()
383 # SVN needs a special case to distinguish its index and commit id
383 # SVN needs a special case to distinguish its index and commit id
384 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
384 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
385 start_rev = vcs_repo.commit_ids[0]
385 start_rev = vcs_repo.commit_ids[0]
386
386
387 try:
387 try:
388 commits = vcs_repo.get_commits(
388 commits = vcs_repo.get_commits(
389 start_id=start_rev, pre_load=pre_load, translate_tags=False)
389 start_id=start_rev, pre_load=pre_load, translate_tags=False)
390 except TypeError as e:
390 except TypeError as e:
391 raise JSONRPCError(safe_str(e))
391 raise JSONRPCError(safe_str(e))
392 except Exception:
392 except Exception:
393 log.exception('Fetching of commits failed')
393 log.exception('Fetching of commits failed')
394 raise JSONRPCError('Error occurred during commit fetching')
394 raise JSONRPCError('Error occurred during commit fetching')
395
395
396 ret = []
396 ret = []
397 for cnt, commit in enumerate(commits):
397 for cnt, commit in enumerate(commits):
398 if cnt >= limit != -1:
398 if cnt >= limit != -1:
399 break
399 break
400 _cs_json = commit.__json__()
400 _cs_json = commit.__json__()
401 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
401 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
402 if changes_details == 'full':
402 if changes_details == 'full':
403 _cs_json['refs'] = {
403 _cs_json['refs'] = {
404 'branches': [commit.branch],
404 'branches': [commit.branch],
405 'bookmarks': getattr(commit, 'bookmarks', []),
405 'bookmarks': getattr(commit, 'bookmarks', []),
406 'tags': commit.tags
406 'tags': commit.tags
407 }
407 }
408 ret.append(_cs_json)
408 ret.append(_cs_json)
409 return ret
409 return ret
410
410
411
411
412 @jsonrpc_method()
412 @jsonrpc_method()
413 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
413 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
414 ret_type=Optional('all'), details=Optional('basic'),
414 ret_type=Optional('all'), details=Optional('basic'),
415 max_file_bytes=Optional(None)):
415 max_file_bytes=Optional(None)):
416 """
416 """
417 Returns a list of nodes and children in a flat list for a given
417 Returns a list of nodes and children in a flat list for a given
418 path at given revision.
418 path at given revision.
419
419
420 It's possible to specify ret_type to show only `files` or `dirs`.
420 It's possible to specify ret_type to show only `files` or `dirs`.
421
421
422 This command can only be run using an |authtoken| with admin rights,
422 This command can only be run using an |authtoken| with admin rights,
423 or users with at least read rights to |repos|.
423 or users with at least read rights to |repos|.
424
424
425 :param apiuser: This is filled automatically from the |authtoken|.
425 :param apiuser: This is filled automatically from the |authtoken|.
426 :type apiuser: AuthUser
426 :type apiuser: AuthUser
427 :param repoid: The repository name or repository ID.
427 :param repoid: The repository name or repository ID.
428 :type repoid: str or int
428 :type repoid: str or int
429 :param revision: The revision for which listing should be done.
429 :param revision: The revision for which listing should be done.
430 :type revision: str
430 :type revision: str
431 :param root_path: The path from which to start displaying.
431 :param root_path: The path from which to start displaying.
432 :type root_path: str
432 :type root_path: str
433 :param ret_type: Set the return type. Valid options are
433 :param ret_type: Set the return type. Valid options are
434 ``all`` (default), ``files`` and ``dirs``.
434 ``all`` (default), ``files`` and ``dirs``.
435 :type ret_type: Optional(str)
435 :type ret_type: Optional(str)
436 :param details: Returns extended information about nodes, such as
436 :param details: Returns extended information about nodes, such as
437 md5, binary, and or content.
437 md5, binary, and or content.
438 The valid options are ``basic`` and ``full``.
438 The valid options are ``basic`` and ``full``.
439 :type details: Optional(str)
439 :type details: Optional(str)
440 :param max_file_bytes: Only return file content under this file size bytes
440 :param max_file_bytes: Only return file content under this file size bytes
441 :type details: Optional(int)
441 :type details: Optional(int)
442
442
443 Example output:
443 Example output:
444
444
445 .. code-block:: bash
445 .. code-block:: bash
446
446
447 id : <id_given_in_input>
447 id : <id_given_in_input>
448 result: [
448 result: [
449 {
449 {
450 "binary": false,
450 "binary": false,
451 "content": "File line",
451 "content": "File line",
452 "extension": "md",
452 "extension": "md",
453 "lines": 2,
453 "lines": 2,
454 "md5": "059fa5d29b19c0657e384749480f6422",
454 "md5": "059fa5d29b19c0657e384749480f6422",
455 "mimetype": "text/x-minidsrc",
455 "mimetype": "text/x-minidsrc",
456 "name": "file.md",
456 "name": "file.md",
457 "size": 580,
457 "size": 580,
458 "type": "file"
458 "type": "file"
459 },
459 },
460 ...
460 ...
461 ]
461 ]
462 error: null
462 error: null
463 """
463 """
464
464
465 repo = get_repo_or_error(repoid)
465 repo = get_repo_or_error(repoid)
466 if not has_superadmin_permission(apiuser):
466 if not has_superadmin_permission(apiuser):
467 _perms = ('repository.admin', 'repository.write', 'repository.read',)
467 _perms = ('repository.admin', 'repository.write', 'repository.read',)
468 validate_repo_permissions(apiuser, repoid, repo, _perms)
468 validate_repo_permissions(apiuser, repoid, repo, _perms)
469
469
470 ret_type = Optional.extract(ret_type)
470 ret_type = Optional.extract(ret_type)
471 details = Optional.extract(details)
471 details = Optional.extract(details)
472 max_file_bytes = Optional.extract(max_file_bytes)
472 max_file_bytes = Optional.extract(max_file_bytes)
473
473
474 _extended_types = ['basic', 'full']
474 _extended_types = ['basic', 'full']
475 if details not in _extended_types:
475 if details not in _extended_types:
476 ret_types = ','.join(_extended_types)
476 ret_types = ','.join(_extended_types)
477 raise JSONRPCError(f'ret_type must be one of {ret_types}')
477 raise JSONRPCError(f'ret_type must be one of {ret_types}')
478
478
479 extended_info = False
479 extended_info = False
480 content = False
480 content = False
481 if details == 'basic':
481 if details == 'basic':
482 extended_info = True
482 extended_info = True
483
483
484 if details == 'full':
484 if details == 'full':
485 extended_info = content = True
485 extended_info = content = True
486
486
487 _map = {}
487 _map = {}
488 try:
488 try:
489 # check if repo is not empty by any chance, skip quicker if it is.
489 # check if repo is not empty by any chance, skip quicker if it is.
490 _scm = repo.scm_instance()
490 _scm = repo.scm_instance()
491 if _scm.is_empty():
491 if _scm.is_empty():
492 return []
492 return []
493
493
494 _d, _f = ScmModel().get_nodes(
494 _d, _f = ScmModel().get_nodes(
495 repo, revision, root_path, flat=False,
495 repo, revision, root_path, flat=False,
496 extended_info=extended_info, content=content,
496 extended_info=extended_info, content=content,
497 max_file_bytes=max_file_bytes)
497 max_file_bytes=max_file_bytes)
498
498
499 _map = {
499 _map = {
500 'all': _d + _f,
500 'all': _d + _f,
501 'files': _f,
501 'files': _f,
502 'dirs': _d,
502 'dirs': _d,
503 }
503 }
504
504
505 return _map[ret_type]
505 return _map[ret_type]
506 except KeyError:
506 except KeyError:
507 keys = ','.join(sorted(_map.keys()))
507 keys = ','.join(sorted(_map.keys()))
508 raise JSONRPCError(f'ret_type must be one of {keys}')
508 raise JSONRPCError(f'ret_type must be one of {keys}')
509 except Exception:
509 except Exception:
510 log.exception("Exception occurred while trying to get repo nodes")
510 log.exception("Exception occurred while trying to get repo nodes")
511 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
511 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
512
512
513
513
514 @jsonrpc_method()
514 @jsonrpc_method()
515 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
515 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
516 max_file_bytes=Optional(0), details=Optional('basic'),
516 max_file_bytes=Optional(0), details=Optional('basic'),
517 cache=Optional(True)):
517 cache=Optional(True)):
518 """
518 """
519 Returns a single file from repository at given revision.
519 Returns a single file from repository at given revision.
520
520
521 This command can only be run using an |authtoken| with admin rights,
521 This command can only be run using an |authtoken| with admin rights,
522 or users with at least read rights to |repos|.
522 or users with at least read rights to |repos|.
523
523
524 :param apiuser: This is filled automatically from the |authtoken|.
524 :param apiuser: This is filled automatically from the |authtoken|.
525 :type apiuser: AuthUser
525 :type apiuser: AuthUser
526 :param repoid: The repository name or repository ID.
526 :param repoid: The repository name or repository ID.
527 :type repoid: str or int
527 :type repoid: str or int
528 :param commit_id: The revision for which listing should be done.
528 :param commit_id: The revision for which listing should be done.
529 :type commit_id: str
529 :type commit_id: str
530 :param file_path: The path from which to start displaying.
530 :param file_path: The path from which to start displaying.
531 :type file_path: str
531 :type file_path: str
532 :param details: Returns different set of information about nodes.
532 :param details: Returns different set of information about nodes.
533 The valid options are ``minimal`` ``basic`` and ``full``.
533 The valid options are ``minimal`` ``basic`` and ``full``.
534 :type details: Optional(str)
534 :type details: Optional(str)
535 :param max_file_bytes: Only return file content under this file size bytes
535 :param max_file_bytes: Only return file content under this file size bytes
536 :type max_file_bytes: Optional(int)
536 :type max_file_bytes: Optional(int)
537 :param cache: Use internal caches for fetching files. If disabled fetching
537 :param cache: Use internal caches for fetching files. If disabled fetching
538 files is slower but more memory efficient
538 files is slower but more memory efficient
539 :type cache: Optional(bool)
539 :type cache: Optional(bool)
540
540
541 Example output:
541 Example output:
542
542
543 .. code-block:: bash
543 .. code-block:: bash
544
544
545 id : <id_given_in_input>
545 id : <id_given_in_input>
546 result: {
546 result: {
547 "binary": false,
547 "binary": false,
548 "extension": "py",
548 "extension": "py",
549 "lines": 35,
549 "lines": 35,
550 "content": "....",
550 "content": "....",
551 "md5": "76318336366b0f17ee249e11b0c99c41",
551 "md5": "76318336366b0f17ee249e11b0c99c41",
552 "mimetype": "text/x-python",
552 "mimetype": "text/x-python",
553 "name": "python.py",
553 "name": "python.py",
554 "size": 817,
554 "size": 817,
555 "type": "file",
555 "type": "file",
556 }
556 }
557 error: null
557 error: null
558 """
558 """
559
559
560 repo = get_repo_or_error(repoid)
560 repo = get_repo_or_error(repoid)
561 if not has_superadmin_permission(apiuser):
561 if not has_superadmin_permission(apiuser):
562 _perms = ('repository.admin', 'repository.write', 'repository.read',)
562 _perms = ('repository.admin', 'repository.write', 'repository.read',)
563 validate_repo_permissions(apiuser, repoid, repo, _perms)
563 validate_repo_permissions(apiuser, repoid, repo, _perms)
564
564
565 cache = Optional.extract(cache, binary=True)
565 cache = Optional.extract(cache, binary=True)
566 details = Optional.extract(details)
566 details = Optional.extract(details)
567 max_file_bytes = Optional.extract(max_file_bytes)
567 max_file_bytes = Optional.extract(max_file_bytes)
568
568
569 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
569 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
570 if details not in _extended_types:
570 if details not in _extended_types:
571 ret_types = ','.join(_extended_types)
571 ret_types = ','.join(_extended_types)
572 raise JSONRPCError(f'ret_type must be one of %s, got {ret_types}', details)
572 raise JSONRPCError(f'ret_type must be one of %s, got {ret_types}', details)
573 extended_info = False
573 extended_info = False
574 content = False
574 content = False
575
575
576 if details == 'minimal':
576 if details == 'minimal':
577 extended_info = False
577 extended_info = False
578
578
579 elif details == 'basic':
579 elif details == 'basic':
580 extended_info = True
580 extended_info = True
581
581
582 elif details == 'full':
582 elif details == 'full':
583 extended_info = content = True
583 extended_info = content = True
584
584
585 file_path = safe_str(file_path)
585 file_path = safe_str(file_path)
586 try:
586 try:
587 # check if repo is not empty by any chance, skip quicker if it is.
587 # check if repo is not empty by any chance, skip quicker if it is.
588 _scm = repo.scm_instance()
588 _scm = repo.scm_instance()
589 if _scm.is_empty():
589 if _scm.is_empty():
590 return None
590 return None
591
591
592 node = ScmModel().get_node(
592 node = ScmModel().get_node(
593 repo, commit_id, file_path, extended_info=extended_info,
593 repo, commit_id, file_path, extended_info=extended_info,
594 content=content, max_file_bytes=max_file_bytes, cache=cache)
594 content=content, max_file_bytes=max_file_bytes, cache=cache)
595
595
596 except NodeDoesNotExistError:
596 except NodeDoesNotExistError:
597 raise JSONRPCError(
597 raise JSONRPCError(
598 f'There is no file in repo: `{repo.repo_name}` at path `{file_path}` for commit: `{commit_id}`')
598 f'There is no file in repo: `{repo.repo_name}` at path `{file_path}` for commit: `{commit_id}`')
599 except Exception:
599 except Exception:
600 log.exception("Exception occurred while trying to get repo %s file",
600 log.exception("Exception occurred while trying to get repo %s file",
601 repo.repo_name)
601 repo.repo_name)
602 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` file at path {file_path}')
602 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` file at path {file_path}')
603
603
604 return node
604 return node
605
605
606
606
607 @jsonrpc_method()
607 @jsonrpc_method()
608 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
608 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
609 """
609 """
610 Returns a list of tree nodes for path at given revision. This api is built
610 Returns a list of tree nodes for path at given revision. This api is built
611 strictly for usage in full text search building, and shouldn't be consumed
611 strictly for usage in full text search building, and shouldn't be consumed
612
612
613 This command can only be run using an |authtoken| with admin rights,
613 This command can only be run using an |authtoken| with admin rights,
614 or users with at least read rights to |repos|.
614 or users with at least read rights to |repos|.
615
615
616 """
616 """
617
617
618 repo = get_repo_or_error(repoid)
618 repo = get_repo_or_error(repoid)
619 if not has_superadmin_permission(apiuser):
619 if not has_superadmin_permission(apiuser):
620 _perms = ('repository.admin', 'repository.write', 'repository.read',)
620 _perms = ('repository.admin', 'repository.write', 'repository.read',)
621 validate_repo_permissions(apiuser, repoid, repo, _perms)
621 validate_repo_permissions(apiuser, repoid, repo, _perms)
622
622
623 repo_id = repo.repo_id
623 repo_id = repo.repo_id
624 cache_seconds = rhodecode.ConfigGet().get_int('rc_cache.cache_repo.expiration_time')
624 cache_seconds = rhodecode.ConfigGet().get_int('rc_cache.cache_repo.expiration_time')
625 cache_on = cache_seconds > 0
625 cache_on = cache_seconds > 0
626
626
627 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
627 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
628 rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
628 rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
629
629
630 def compute_fts_tree(repo_id, commit_id, root_path):
630 def compute_fts_tree(repo_id, commit_id, root_path):
631 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
631 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
632
632
633 try:
633 try:
634 # check if repo is not empty by any chance, skip quicker if it is.
634 # check if repo is not empty by any chance, skip quicker if it is.
635 _scm = repo.scm_instance()
635 _scm = repo.scm_instance()
636 if not _scm or _scm.is_empty():
636 if not _scm or _scm.is_empty():
637 return []
637 return []
638 except RepositoryError:
638 except RepositoryError:
639 log.exception("Exception occurred while trying to get repo nodes")
639 log.exception("Exception occurred while trying to get repo nodes")
640 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
640 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
641
641
642 try:
642 try:
643 # we need to resolve commit_id to a FULL sha for cache to work correctly.
643 # we need to resolve commit_id to a FULL sha for cache to work correctly.
644 # sending 'master' is a pointer that needs to be translated to current commit.
644 # sending 'master' is a pointer that needs to be translated to current commit.
645 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
645 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
646 log.debug(
646 log.debug(
647 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
647 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
648 'with caching: %s[TTL: %ss]' % (
648 'with caching: %s[TTL: %ss]' % (
649 repo_id, commit_id, cache_on, cache_seconds or 0))
649 repo_id, commit_id, cache_on, cache_seconds or 0))
650
650
651 tree_files = compute_fts_tree(repo_id, commit_id, root_path)
651 tree_files = compute_fts_tree(repo_id, commit_id, root_path)
652
652
653 return tree_files
653 return tree_files
654
654
655 except Exception:
655 except Exception:
656 log.exception("Exception occurred while trying to get repo nodes")
656 log.exception("Exception occurred while trying to get repo nodes")
657 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
657 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
658
658
659
659
660 @jsonrpc_method()
660 @jsonrpc_method()
661 def get_repo_refs(request, apiuser, repoid):
661 def get_repo_refs(request, apiuser, repoid):
662 """
662 """
663 Returns a dictionary of current references. It returns
663 Returns a dictionary of current references. It returns
664 bookmarks, branches, closed_branches, and tags for given repository
664 bookmarks, branches, closed_branches, and tags for given repository
665
665
666 It's possible to specify ret_type to show only `files` or `dirs`.
666 It's possible to specify ret_type to show only `files` or `dirs`.
667
667
668 This command can only be run using an |authtoken| with admin rights,
668 This command can only be run using an |authtoken| with admin rights,
669 or users with at least read rights to |repos|.
669 or users with at least read rights to |repos|.
670
670
671 :param apiuser: This is filled automatically from the |authtoken|.
671 :param apiuser: This is filled automatically from the |authtoken|.
672 :type apiuser: AuthUser
672 :type apiuser: AuthUser
673 :param repoid: The repository name or repository ID.
673 :param repoid: The repository name or repository ID.
674 :type repoid: str or int
674 :type repoid: str or int
675
675
676 Example output:
676 Example output:
677
677
678 .. code-block:: bash
678 .. code-block:: bash
679
679
680 id : <id_given_in_input>
680 id : <id_given_in_input>
681 "result": {
681 "result": {
682 "bookmarks": {
682 "bookmarks": {
683 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
683 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
684 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
684 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
685 },
685 },
686 "branches": {
686 "branches": {
687 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
687 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
688 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
688 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
689 },
689 },
690 "branches_closed": {},
690 "branches_closed": {},
691 "tags": {
691 "tags": {
692 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
692 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
693 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
693 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
694 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
694 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
695 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
695 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
696 }
696 }
697 }
697 }
698 error: null
698 error: null
699 """
699 """
700
700
701 repo = get_repo_or_error(repoid)
701 repo = get_repo_or_error(repoid)
702 if not has_superadmin_permission(apiuser):
702 if not has_superadmin_permission(apiuser):
703 _perms = ('repository.admin', 'repository.write', 'repository.read',)
703 _perms = ('repository.admin', 'repository.write', 'repository.read',)
704 validate_repo_permissions(apiuser, repoid, repo, _perms)
704 validate_repo_permissions(apiuser, repoid, repo, _perms)
705
705
706 try:
706 try:
707 # check if repo is not empty by any chance, skip quicker if it is.
707 # check if repo is not empty by any chance, skip quicker if it is.
708 vcs_instance = repo.scm_instance()
708 vcs_instance = repo.scm_instance()
709 refs = vcs_instance.refs()
709 refs = vcs_instance.refs()
710 return refs
710 return refs
711 except Exception:
711 except Exception:
712 log.exception("Exception occurred while trying to get repo refs")
712 log.exception("Exception occurred while trying to get repo refs")
713 raise JSONRPCError(
713 raise JSONRPCError(
714 'failed to get repo: `%s` references' % repo.repo_name
714 'failed to get repo: `%s` references' % repo.repo_name
715 )
715 )
716
716
717
717
718 @jsonrpc_method()
718 @jsonrpc_method()
719 def create_repo(
719 def create_repo(
720 request, apiuser, repo_name, repo_type,
720 request, apiuser, repo_name, repo_type,
721 owner=Optional(OAttr('apiuser')),
721 owner=Optional(OAttr('apiuser')),
722 description=Optional(''),
722 description=Optional(''),
723 private=Optional(False),
723 private=Optional(False),
724 clone_uri=Optional(None),
724 clone_uri=Optional(None),
725 push_uri=Optional(None),
725 push_uri=Optional(None),
726 landing_rev=Optional(None),
726 landing_rev=Optional(None),
727 enable_statistics=Optional(False),
727 enable_statistics=Optional(False),
728 enable_locking=Optional(False),
728 enable_locking=Optional(False),
729 enable_downloads=Optional(False),
729 enable_downloads=Optional(False),
730 copy_permissions=Optional(False)):
730 copy_permissions=Optional(False)):
731 """
731 """
732 Creates a repository.
732 Creates a repository.
733
733
734 * If the repository name contains "/", repository will be created inside
734 * If the repository name contains "/", repository will be created inside
735 a repository group or nested repository groups
735 a repository group or nested repository groups
736
736
737 For example "foo/bar/repo1" will create |repo| called "repo1" inside
737 For example "foo/bar/repo1" will create |repo| called "repo1" inside
738 group "foo/bar". You have to have permissions to access and write to
738 group "foo/bar". You have to have permissions to access and write to
739 the last repository group ("bar" in this example)
739 the last repository group ("bar" in this example)
740
740
741 This command can only be run using an |authtoken| with at least
741 This command can only be run using an |authtoken| with at least
742 permissions to create repositories, or write permissions to
742 permissions to create repositories, or write permissions to
743 parent repository groups.
743 parent repository groups.
744
744
745 :param apiuser: This is filled automatically from the |authtoken|.
745 :param apiuser: This is filled automatically from the |authtoken|.
746 :type apiuser: AuthUser
746 :type apiuser: AuthUser
747 :param repo_name: Set the repository name.
747 :param repo_name: Set the repository name.
748 :type repo_name: str
748 :type repo_name: str
749 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
749 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
750 :type repo_type: str
750 :type repo_type: str
751 :param owner: user_id or username
751 :param owner: user_id or username
752 :type owner: Optional(str)
752 :type owner: Optional(str)
753 :param description: Set the repository description.
753 :param description: Set the repository description.
754 :type description: Optional(str)
754 :type description: Optional(str)
755 :param private: set repository as private
755 :param private: set repository as private
756 :type private: bool
756 :type private: bool
757 :param clone_uri: set clone_uri
757 :param clone_uri: set clone_uri
758 :type clone_uri: str
758 :type clone_uri: str
759 :param push_uri: set push_uri
759 :param push_uri: set push_uri
760 :type push_uri: str
760 :type push_uri: str
761 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
761 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
762 :type landing_rev: str
762 :type landing_rev: str
763 :param enable_locking:
763 :param enable_locking:
764 :type enable_locking: bool
764 :type enable_locking: bool
765 :param enable_downloads:
765 :param enable_downloads:
766 :type enable_downloads: bool
766 :type enable_downloads: bool
767 :param enable_statistics:
767 :param enable_statistics:
768 :type enable_statistics: bool
768 :type enable_statistics: bool
769 :param copy_permissions: Copy permission from group in which the
769 :param copy_permissions: Copy permission from group in which the
770 repository is being created.
770 repository is being created.
771 :type copy_permissions: bool
771 :type copy_permissions: bool
772
772
773
773
774 Example output:
774 Example output:
775
775
776 .. code-block:: bash
776 .. code-block:: bash
777
777
778 id : <id_given_in_input>
778 id : <id_given_in_input>
779 result: {
779 result: {
780 "msg": "Created new repository `<reponame>`",
780 "msg": "Created new repository `<reponame>`",
781 "success": true,
781 "success": true,
782 "task": "<celery task id or None if done sync>"
782 "task": "<celery task id or None if done sync>"
783 }
783 }
784 error: null
784 error: null
785
785
786
786
787 Example error output:
787 Example error output:
788
788
789 .. code-block:: bash
789 .. code-block:: bash
790
790
791 id : <id_given_in_input>
791 id : <id_given_in_input>
792 result : null
792 result : null
793 error : {
793 error : {
794 'failed to create repository `<repo_name>`'
794 'failed to create repository `<repo_name>`'
795 }
795 }
796
796
797 """
797 """
798
798
799 owner = validate_set_owner_permissions(apiuser, owner)
799 owner = validate_set_owner_permissions(apiuser, owner)
800
800
801 description = Optional.extract(description)
801 description = Optional.extract(description)
802 copy_permissions = Optional.extract(copy_permissions)
802 copy_permissions = Optional.extract(copy_permissions)
803 clone_uri = Optional.extract(clone_uri)
803 clone_uri = Optional.extract(clone_uri)
804 push_uri = Optional.extract(push_uri)
804 push_uri = Optional.extract(push_uri)
805
805
806 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
806 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
807 if isinstance(private, Optional):
807 if isinstance(private, Optional):
808 private = defs.get('repo_private') or Optional.extract(private)
808 private = defs.get('repo_private') or Optional.extract(private)
809 if isinstance(repo_type, Optional):
809 if isinstance(repo_type, Optional):
810 repo_type = defs.get('repo_type')
810 repo_type = defs.get('repo_type')
811 if isinstance(enable_statistics, Optional):
811 if isinstance(enable_statistics, Optional):
812 enable_statistics = defs.get('repo_enable_statistics')
812 enable_statistics = defs.get('repo_enable_statistics')
813 if isinstance(enable_locking, Optional):
813 if isinstance(enable_locking, Optional):
814 enable_locking = defs.get('repo_enable_locking')
814 enable_locking = defs.get('repo_enable_locking')
815 if isinstance(enable_downloads, Optional):
815 if isinstance(enable_downloads, Optional):
816 enable_downloads = defs.get('repo_enable_downloads')
816 enable_downloads = defs.get('repo_enable_downloads')
817
817
818 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
818 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
819 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
819 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
820 ref_choices = list(set(ref_choices + [landing_ref]))
820 ref_choices = list(set(ref_choices + [landing_ref]))
821
821
822 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
822 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
823
823
824 schema = repo_schema.RepoSchema().bind(
824 schema = repo_schema.RepoSchema().bind(
825 repo_type_options=rhodecode.BACKENDS.keys(),
825 repo_type_options=rhodecode.BACKENDS.keys(),
826 repo_ref_options=ref_choices,
826 repo_ref_options=ref_choices,
827 repo_type=repo_type,
827 repo_type=repo_type,
828 # user caller
828 # user caller
829 user=apiuser)
829 user=apiuser)
830
830
831 try:
831 try:
832 schema_data = schema.deserialize(dict(
832 schema_data = schema.deserialize(dict(
833 repo_name=repo_name,
833 repo_name=repo_name,
834 repo_type=repo_type,
834 repo_type=repo_type,
835 repo_owner=owner.username,
835 repo_owner=owner.username,
836 repo_description=description,
836 repo_description=description,
837 repo_landing_commit_ref=landing_commit_ref,
837 repo_landing_commit_ref=landing_commit_ref,
838 repo_clone_uri=clone_uri,
838 repo_clone_uri=clone_uri,
839 repo_push_uri=push_uri,
839 repo_push_uri=push_uri,
840 repo_private=private,
840 repo_private=private,
841 repo_copy_permissions=copy_permissions,
841 repo_copy_permissions=copy_permissions,
842 repo_enable_statistics=enable_statistics,
842 repo_enable_statistics=enable_statistics,
843 repo_enable_downloads=enable_downloads,
843 repo_enable_downloads=enable_downloads,
844 repo_enable_locking=enable_locking))
844 repo_enable_locking=enable_locking))
845 except validation_schema.Invalid as err:
845 except validation_schema.Invalid as err:
846 raise JSONRPCValidationError(colander_exc=err)
846 raise JSONRPCValidationError(colander_exc=err)
847
847
848 try:
848 try:
849 data = {
849 data = {
850 'owner': owner,
850 'owner': owner,
851 'repo_name': schema_data['repo_group']['repo_name_without_group'],
851 'repo_name': schema_data['repo_group']['repo_name_without_group'],
852 'repo_name_full': schema_data['repo_name'],
852 'repo_name_full': schema_data['repo_name'],
853 'repo_group': schema_data['repo_group']['repo_group_id'],
853 'repo_group': schema_data['repo_group']['repo_group_id'],
854 'repo_type': schema_data['repo_type'],
854 'repo_type': schema_data['repo_type'],
855 'repo_description': schema_data['repo_description'],
855 'repo_description': schema_data['repo_description'],
856 'repo_private': schema_data['repo_private'],
856 'repo_private': schema_data['repo_private'],
857 'clone_uri': schema_data['repo_clone_uri'],
857 'clone_uri': schema_data['repo_clone_uri'],
858 'push_uri': schema_data['repo_push_uri'],
858 'push_uri': schema_data['repo_push_uri'],
859 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
859 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
860 'enable_statistics': schema_data['repo_enable_statistics'],
860 'enable_statistics': schema_data['repo_enable_statistics'],
861 'enable_locking': schema_data['repo_enable_locking'],
861 'enable_locking': schema_data['repo_enable_locking'],
862 'enable_downloads': schema_data['repo_enable_downloads'],
862 'enable_downloads': schema_data['repo_enable_downloads'],
863 'repo_copy_permissions': schema_data['repo_copy_permissions'],
863 'repo_copy_permissions': schema_data['repo_copy_permissions'],
864 }
864 }
865
865
866 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
866 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
867 task_id = get_task_id(task)
867 task_id = get_task_id(task)
868 # no commit, it's done in RepoModel, or async via celery
868 # no commit, it's done in RepoModel, or async via celery
869 return {
869 return {
870 'msg': "Created new repository `{}`".format(schema_data['repo_name']),
870 'msg': "Created new repository `{}`".format(schema_data['repo_name']),
871 'success': True, # cannot return the repo data here since fork
871 'success': True, # cannot return the repo data here since fork
872 # can be done async
872 # can be done async
873 'task': task_id
873 'task': task_id
874 }
874 }
875 except Exception:
875 except Exception:
876 log.exception(
876 log.exception(
877 "Exception while trying to create the repository %s",
877 "Exception while trying to create the repository %s",
878 schema_data['repo_name'])
878 schema_data['repo_name'])
879 raise JSONRPCError(
879 raise JSONRPCError(
880 'failed to create repository `{}`'.format(schema_data['repo_name']))
880 'failed to create repository `{}`'.format(schema_data['repo_name']))
881
881
882
882
883 @jsonrpc_method()
883 @jsonrpc_method()
884 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
884 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
885 description=Optional('')):
885 description=Optional('')):
886 """
886 """
887 Adds an extra field to a repository.
887 Adds an extra field to a repository.
888
888
889 This command can only be run using an |authtoken| with at least
889 This command can only be run using an |authtoken| with at least
890 write permissions to the |repo|.
890 write permissions to the |repo|.
891
891
892 :param apiuser: This is filled automatically from the |authtoken|.
892 :param apiuser: This is filled automatically from the |authtoken|.
893 :type apiuser: AuthUser
893 :type apiuser: AuthUser
894 :param repoid: Set the repository name or repository id.
894 :param repoid: Set the repository name or repository id.
895 :type repoid: str or int
895 :type repoid: str or int
896 :param key: Create a unique field key for this repository.
896 :param key: Create a unique field key for this repository.
897 :type key: str
897 :type key: str
898 :param label:
898 :param label:
899 :type label: Optional(str)
899 :type label: Optional(str)
900 :param description:
900 :param description:
901 :type description: Optional(str)
901 :type description: Optional(str)
902 """
902 """
903 repo = get_repo_or_error(repoid)
903 repo = get_repo_or_error(repoid)
904 if not has_superadmin_permission(apiuser):
904 if not has_superadmin_permission(apiuser):
905 _perms = ('repository.admin',)
905 _perms = ('repository.admin',)
906 validate_repo_permissions(apiuser, repoid, repo, _perms)
906 validate_repo_permissions(apiuser, repoid, repo, _perms)
907
907
908 label = Optional.extract(label) or key
908 label = Optional.extract(label) or key
909 description = Optional.extract(description)
909 description = Optional.extract(description)
910
910
911 field = RepositoryField.get_by_key_name(key, repo)
911 field = RepositoryField.get_by_key_name(key, repo)
912 if field:
912 if field:
913 raise JSONRPCError('Field with key '
913 raise JSONRPCError(f'Field with key `{key}` exists for repo `{repoid}`')
914 '`%s` exists for repo `%s`' % (key, repoid))
915
914
916 try:
915 try:
917 RepoModel().add_repo_field(repo, key, field_label=label,
916 RepoModel().add_repo_field(repo, key, field_label=label,
918 field_desc=description)
917 field_desc=description)
919 Session().commit()
918 Session().commit()
920 return {
919 return {
921 'msg': f"Added new repository field `{key}`",
920 'msg': f"Added new repository field `{key}`",
922 'success': True,
921 'success': True,
923 }
922 }
924 except Exception:
923 except Exception:
925 log.exception("Exception occurred while trying to add field to repo")
924 log.exception("Exception occurred while trying to add field to repo")
926 raise JSONRPCError(
925 raise JSONRPCError(
927 f'failed to create new field for repository `{repoid}`')
926 f'failed to create new field for repository `{repoid}`')
928
927
929
928
930 @jsonrpc_method()
929 @jsonrpc_method()
931 def remove_field_from_repo(request, apiuser, repoid, key):
930 def remove_field_from_repo(request, apiuser, repoid, key):
932 """
931 """
933 Removes an extra field from a repository.
932 Removes an extra field from a repository.
934
933
935 This command can only be run using an |authtoken| with at least
934 This command can only be run using an |authtoken| with at least
936 write permissions to the |repo|.
935 write permissions to the |repo|.
937
936
938 :param apiuser: This is filled automatically from the |authtoken|.
937 :param apiuser: This is filled automatically from the |authtoken|.
939 :type apiuser: AuthUser
938 :type apiuser: AuthUser
940 :param repoid: Set the repository name or repository ID.
939 :param repoid: Set the repository name or repository ID.
941 :type repoid: str or int
940 :type repoid: str or int
942 :param key: Set the unique field key for this repository.
941 :param key: Set the unique field key for this repository.
943 :type key: str
942 :type key: str
944 """
943 """
945
944
946 repo = get_repo_or_error(repoid)
945 repo = get_repo_or_error(repoid)
947 if not has_superadmin_permission(apiuser):
946 if not has_superadmin_permission(apiuser):
948 _perms = ('repository.admin',)
947 _perms = ('repository.admin',)
949 validate_repo_permissions(apiuser, repoid, repo, _perms)
948 validate_repo_permissions(apiuser, repoid, repo, _perms)
950
949
951 field = RepositoryField.get_by_key_name(key, repo)
950 field = RepositoryField.get_by_key_name(key, repo)
952 if not field:
951 if not field:
953 raise JSONRPCError('Field with key `%s` does not '
952 raise JSONRPCError('Field with key `%s` does not '
954 'exists for repo `%s`' % (key, repoid))
953 'exists for repo `%s`' % (key, repoid))
955
954
956 try:
955 try:
957 RepoModel().delete_repo_field(repo, field_key=key)
956 RepoModel().delete_repo_field(repo, field_key=key)
958 Session().commit()
957 Session().commit()
959 return {
958 return {
960 'msg': f"Deleted repository field `{key}`",
959 'msg': f"Deleted repository field `{key}`",
961 'success': True,
960 'success': True,
962 }
961 }
963 except Exception:
962 except Exception:
964 log.exception(
963 log.exception(
965 "Exception occurred while trying to delete field from repo")
964 "Exception occurred while trying to delete field from repo")
966 raise JSONRPCError(
965 raise JSONRPCError(
967 f'failed to delete field for repository `{repoid}`')
966 f'failed to delete field for repository `{repoid}`')
968
967
969
968
970 @jsonrpc_method()
969 @jsonrpc_method()
971 def update_repo(
970 def update_repo(
972 request, apiuser, repoid, repo_name=Optional(None),
971 request, apiuser, repoid, repo_name=Optional(None),
973 owner=Optional(OAttr('apiuser')), description=Optional(''),
972 owner=Optional(OAttr('apiuser')), description=Optional(''),
974 private=Optional(False),
973 private=Optional(False),
975 clone_uri=Optional(None), push_uri=Optional(None),
974 clone_uri=Optional(None), push_uri=Optional(None),
976 landing_rev=Optional(None), fork_of=Optional(None),
975 landing_rev=Optional(None), fork_of=Optional(None),
977 enable_statistics=Optional(False),
976 enable_statistics=Optional(False),
978 enable_locking=Optional(False),
977 enable_locking=Optional(False),
979 enable_downloads=Optional(False), fields=Optional('')):
978 enable_downloads=Optional(False), fields=Optional('')):
980 r"""
979 r"""
981 Updates a repository with the given information.
980 Updates a repository with the given information.
982
981
983 This command can only be run using an |authtoken| with at least
982 This command can only be run using an |authtoken| with at least
984 admin permissions to the |repo|.
983 admin permissions to the |repo|.
985
984
986 * If the repository name contains "/", repository will be updated
985 * If the repository name contains "/", repository will be updated
987 accordingly with a repository group or nested repository groups
986 accordingly with a repository group or nested repository groups
988
987
989 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
988 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
990 called "repo-test" and place it inside group "foo/bar".
989 called "repo-test" and place it inside group "foo/bar".
991 You have to have permissions to access and write to the last repository
990 You have to have permissions to access and write to the last repository
992 group ("bar" in this example)
991 group ("bar" in this example)
993
992
994 :param apiuser: This is filled automatically from the |authtoken|.
993 :param apiuser: This is filled automatically from the |authtoken|.
995 :type apiuser: AuthUser
994 :type apiuser: AuthUser
996 :param repoid: repository name or repository ID.
995 :param repoid: repository name or repository ID.
997 :type repoid: str or int
996 :type repoid: str or int
998 :param repo_name: Update the |repo| name, including the
997 :param repo_name: Update the |repo| name, including the
999 repository group it's in.
998 repository group it's in.
1000 :type repo_name: str
999 :type repo_name: str
1001 :param owner: Set the |repo| owner.
1000 :param owner: Set the |repo| owner.
1002 :type owner: str
1001 :type owner: str
1003 :param fork_of: Set the |repo| as fork of another |repo|.
1002 :param fork_of: Set the |repo| as fork of another |repo|.
1004 :type fork_of: str
1003 :type fork_of: str
1005 :param description: Update the |repo| description.
1004 :param description: Update the |repo| description.
1006 :type description: str
1005 :type description: str
1007 :param private: Set the |repo| as private. (True | False)
1006 :param private: Set the |repo| as private. (True | False)
1008 :type private: bool
1007 :type private: bool
1009 :param clone_uri: Update the |repo| clone URI.
1008 :param clone_uri: Update the |repo| clone URI.
1010 :type clone_uri: str
1009 :type clone_uri: str
1011 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1010 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1012 :type landing_rev: str
1011 :type landing_rev: str
1013 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1012 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1014 :type enable_statistics: bool
1013 :type enable_statistics: bool
1015 :param enable_locking: Enable |repo| locking.
1014 :param enable_locking: Enable |repo| locking.
1016 :type enable_locking: bool
1015 :type enable_locking: bool
1017 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1016 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1018 :type enable_downloads: bool
1017 :type enable_downloads: bool
1019 :param fields: Add extra fields to the |repo|. Use the following
1018 :param fields: Add extra fields to the |repo|. Use the following
1020 example format: ``field_key=field_val,field_key2=fieldval2``.
1019 example format: ``field_key=field_val,field_key2=fieldval2``.
1021 Escape ', ' with \,
1020 Escape ', ' with \,
1022 :type fields: str
1021 :type fields: str
1023 """
1022 """
1024
1023
1025 repo = get_repo_or_error(repoid)
1024 repo = get_repo_or_error(repoid)
1026
1025
1027 include_secrets = False
1026 include_secrets = False
1028 if not has_superadmin_permission(apiuser):
1027 if not has_superadmin_permission(apiuser):
1029 _perms = ('repository.admin',)
1028 _perms = ('repository.admin',)
1030 validate_repo_permissions(apiuser, repoid, repo, _perms)
1029 validate_repo_permissions(apiuser, repoid, repo, _perms)
1031 else:
1030 else:
1032 include_secrets = True
1031 include_secrets = True
1033
1032
1034 updates = dict(
1033 updates = dict(
1035 repo_name=repo_name
1034 repo_name=repo_name
1036 if not isinstance(repo_name, Optional) else repo.repo_name,
1035 if not isinstance(repo_name, Optional) else repo.repo_name,
1037
1036
1038 fork_id=fork_of
1037 fork_id=fork_of
1039 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1038 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1040
1039
1041 user=owner
1040 user=owner
1042 if not isinstance(owner, Optional) else repo.user.username,
1041 if not isinstance(owner, Optional) else repo.user.username,
1043
1042
1044 repo_description=description
1043 repo_description=description
1045 if not isinstance(description, Optional) else repo.description,
1044 if not isinstance(description, Optional) else repo.description,
1046
1045
1047 repo_private=private
1046 repo_private=private
1048 if not isinstance(private, Optional) else repo.private,
1047 if not isinstance(private, Optional) else repo.private,
1049
1048
1050 clone_uri=clone_uri
1049 clone_uri=clone_uri
1051 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1050 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1052
1051
1053 push_uri=push_uri
1052 push_uri=push_uri
1054 if not isinstance(push_uri, Optional) else repo.push_uri,
1053 if not isinstance(push_uri, Optional) else repo.push_uri,
1055
1054
1056 repo_landing_rev=landing_rev
1055 repo_landing_rev=landing_rev
1057 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1056 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1058
1057
1059 repo_enable_statistics=enable_statistics
1058 repo_enable_statistics=enable_statistics
1060 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1059 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1061
1060
1062 repo_enable_locking=enable_locking
1061 repo_enable_locking=enable_locking
1063 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1062 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1064
1063
1065 repo_enable_downloads=enable_downloads
1064 repo_enable_downloads=enable_downloads
1066 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1065 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1067
1066
1068 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1067 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1069 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1068 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1070 request.translate, repo=repo)
1069 request.translate, repo=repo)
1071 ref_choices = list(set(ref_choices + [landing_ref]))
1070 ref_choices = list(set(ref_choices + [landing_ref]))
1072
1071
1073 old_values = repo.get_api_data()
1072 old_values = repo.get_api_data()
1074 repo_type = repo.repo_type
1073 repo_type = repo.repo_type
1075 schema = repo_schema.RepoSchema().bind(
1074 schema = repo_schema.RepoSchema().bind(
1076 repo_type_options=rhodecode.BACKENDS.keys(),
1075 repo_type_options=rhodecode.BACKENDS.keys(),
1077 repo_ref_options=ref_choices,
1076 repo_ref_options=ref_choices,
1078 repo_type=repo_type,
1077 repo_type=repo_type,
1079 # user caller
1078 # user caller
1080 user=apiuser,
1079 user=apiuser,
1081 old_values=old_values)
1080 old_values=old_values)
1082 try:
1081 try:
1083 schema_data = schema.deserialize(dict(
1082 schema_data = schema.deserialize(dict(
1084 # we save old value, users cannot change type
1083 # we save old value, users cannot change type
1085 repo_type=repo_type,
1084 repo_type=repo_type,
1086
1085
1087 repo_name=updates['repo_name'],
1086 repo_name=updates['repo_name'],
1088 repo_owner=updates['user'],
1087 repo_owner=updates['user'],
1089 repo_description=updates['repo_description'],
1088 repo_description=updates['repo_description'],
1090 repo_clone_uri=updates['clone_uri'],
1089 repo_clone_uri=updates['clone_uri'],
1091 repo_push_uri=updates['push_uri'],
1090 repo_push_uri=updates['push_uri'],
1092 repo_fork_of=updates['fork_id'],
1091 repo_fork_of=updates['fork_id'],
1093 repo_private=updates['repo_private'],
1092 repo_private=updates['repo_private'],
1094 repo_landing_commit_ref=updates['repo_landing_rev'],
1093 repo_landing_commit_ref=updates['repo_landing_rev'],
1095 repo_enable_statistics=updates['repo_enable_statistics'],
1094 repo_enable_statistics=updates['repo_enable_statistics'],
1096 repo_enable_downloads=updates['repo_enable_downloads'],
1095 repo_enable_downloads=updates['repo_enable_downloads'],
1097 repo_enable_locking=updates['repo_enable_locking']))
1096 repo_enable_locking=updates['repo_enable_locking']))
1098 except validation_schema.Invalid as err:
1097 except validation_schema.Invalid as err:
1099 raise JSONRPCValidationError(colander_exc=err)
1098 raise JSONRPCValidationError(colander_exc=err)
1100
1099
1101 # save validated data back into the updates dict
1100 # save validated data back into the updates dict
1102 validated_updates = dict(
1101 validated_updates = dict(
1103 repo_name=schema_data['repo_group']['repo_name_without_group'],
1102 repo_name=schema_data['repo_group']['repo_name_without_group'],
1104 repo_group=schema_data['repo_group']['repo_group_id'],
1103 repo_group=schema_data['repo_group']['repo_group_id'],
1105
1104
1106 user=schema_data['repo_owner'],
1105 user=schema_data['repo_owner'],
1107 repo_description=schema_data['repo_description'],
1106 repo_description=schema_data['repo_description'],
1108 repo_private=schema_data['repo_private'],
1107 repo_private=schema_data['repo_private'],
1109 clone_uri=schema_data['repo_clone_uri'],
1108 clone_uri=schema_data['repo_clone_uri'],
1110 push_uri=schema_data['repo_push_uri'],
1109 push_uri=schema_data['repo_push_uri'],
1111 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1110 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1112 repo_enable_statistics=schema_data['repo_enable_statistics'],
1111 repo_enable_statistics=schema_data['repo_enable_statistics'],
1113 repo_enable_locking=schema_data['repo_enable_locking'],
1112 repo_enable_locking=schema_data['repo_enable_locking'],
1114 repo_enable_downloads=schema_data['repo_enable_downloads'],
1113 repo_enable_downloads=schema_data['repo_enable_downloads'],
1115 )
1114 )
1116
1115
1117 if schema_data['repo_fork_of']:
1116 if schema_data['repo_fork_of']:
1118 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1117 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1119 validated_updates['fork_id'] = fork_repo.repo_id
1118 validated_updates['fork_id'] = fork_repo.repo_id
1120
1119
1121 # extra fields
1120 # extra fields
1122 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1121 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1123 if fields:
1122 if fields:
1124 validated_updates.update(fields)
1123 validated_updates.update(fields)
1125
1124
1126 try:
1125 try:
1127 RepoModel().update(repo, **validated_updates)
1126 RepoModel().update(repo, **validated_updates)
1128 audit_logger.store_api(
1127 audit_logger.store_api(
1129 'repo.edit', action_data={'old_data': old_values},
1128 'repo.edit', action_data={'old_data': old_values},
1130 user=apiuser, repo=repo)
1129 user=apiuser, repo=repo)
1131 Session().commit()
1130 Session().commit()
1132 return {
1131 return {
1133 'msg': f'updated repo ID:{repo.repo_id} {repo.repo_name}',
1132 'msg': f'updated repo ID:{repo.repo_id} {repo.repo_name}',
1134 'repository': repo.get_api_data(include_secrets=include_secrets)
1133 'repository': repo.get_api_data(include_secrets=include_secrets)
1135 }
1134 }
1136 except Exception:
1135 except Exception:
1137 log.exception(
1136 log.exception(
1138 "Exception while trying to update the repository %s",
1137 "Exception while trying to update the repository %s",
1139 repoid)
1138 repoid)
1140 raise JSONRPCError('failed to update repo `%s`' % repoid)
1139 raise JSONRPCError('failed to update repo `%s`' % repoid)
1141
1140
1142
1141
1143 @jsonrpc_method()
1142 @jsonrpc_method()
1144 def fork_repo(request, apiuser, repoid, fork_name,
1143 def fork_repo(request, apiuser, repoid, fork_name,
1145 owner=Optional(OAttr('apiuser')),
1144 owner=Optional(OAttr('apiuser')),
1146 description=Optional(''),
1145 description=Optional(''),
1147 private=Optional(False),
1146 private=Optional(False),
1148 clone_uri=Optional(None),
1147 clone_uri=Optional(None),
1149 landing_rev=Optional(None),
1148 landing_rev=Optional(None),
1150 copy_permissions=Optional(False)):
1149 copy_permissions=Optional(False)):
1151 """
1150 """
1152 Creates a fork of the specified |repo|.
1151 Creates a fork of the specified |repo|.
1153
1152
1154 * If the fork_name contains "/", fork will be created inside
1153 * If the fork_name contains "/", fork will be created inside
1155 a repository group or nested repository groups
1154 a repository group or nested repository groups
1156
1155
1157 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1156 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1158 inside group "foo/bar". You have to have permissions to access and
1157 inside group "foo/bar". You have to have permissions to access and
1159 write to the last repository group ("bar" in this example)
1158 write to the last repository group ("bar" in this example)
1160
1159
1161 This command can only be run using an |authtoken| with minimum
1160 This command can only be run using an |authtoken| with minimum
1162 read permissions of the forked repo, create fork permissions for an user.
1161 read permissions of the forked repo, create fork permissions for an user.
1163
1162
1164 :param apiuser: This is filled automatically from the |authtoken|.
1163 :param apiuser: This is filled automatically from the |authtoken|.
1165 :type apiuser: AuthUser
1164 :type apiuser: AuthUser
1166 :param repoid: Set repository name or repository ID.
1165 :param repoid: Set repository name or repository ID.
1167 :type repoid: str or int
1166 :type repoid: str or int
1168 :param fork_name: Set the fork name, including it's repository group membership.
1167 :param fork_name: Set the fork name, including it's repository group membership.
1169 :type fork_name: str
1168 :type fork_name: str
1170 :param owner: Set the fork owner.
1169 :param owner: Set the fork owner.
1171 :type owner: str
1170 :type owner: str
1172 :param description: Set the fork description.
1171 :param description: Set the fork description.
1173 :type description: str
1172 :type description: str
1174 :param copy_permissions: Copy permissions from parent |repo|. The
1173 :param copy_permissions: Copy permissions from parent |repo|. The
1175 default is False.
1174 default is False.
1176 :type copy_permissions: bool
1175 :type copy_permissions: bool
1177 :param private: Make the fork private. The default is False.
1176 :param private: Make the fork private. The default is False.
1178 :type private: bool
1177 :type private: bool
1179 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1178 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1180
1179
1181 Example output:
1180 Example output:
1182
1181
1183 .. code-block:: bash
1182 .. code-block:: bash
1184
1183
1185 id : <id_for_response>
1184 id : <id_for_response>
1186 api_key : "<api_key>"
1185 api_key : "<api_key>"
1187 args: {
1186 args: {
1188 "repoid" : "<reponame or repo_id>",
1187 "repoid" : "<reponame or repo_id>",
1189 "fork_name": "<forkname>",
1188 "fork_name": "<forkname>",
1190 "owner": "<username or user_id = Optional(=apiuser)>",
1189 "owner": "<username or user_id = Optional(=apiuser)>",
1191 "description": "<description>",
1190 "description": "<description>",
1192 "copy_permissions": "<bool>",
1191 "copy_permissions": "<bool>",
1193 "private": "<bool>",
1192 "private": "<bool>",
1194 "landing_rev": "<landing_rev>"
1193 "landing_rev": "<landing_rev>"
1195 }
1194 }
1196
1195
1197 Example error output:
1196 Example error output:
1198
1197
1199 .. code-block:: bash
1198 .. code-block:: bash
1200
1199
1201 id : <id_given_in_input>
1200 id : <id_given_in_input>
1202 result: {
1201 result: {
1203 "msg": "Created fork of `<reponame>` as `<forkname>`",
1202 "msg": "Created fork of `<reponame>` as `<forkname>`",
1204 "success": true,
1203 "success": true,
1205 "task": "<celery task id or None if done sync>"
1204 "task": "<celery task id or None if done sync>"
1206 }
1205 }
1207 error: null
1206 error: null
1208
1207
1209 """
1208 """
1210
1209
1211 repo = get_repo_or_error(repoid)
1210 repo = get_repo_or_error(repoid)
1212 repo_name = repo.repo_name
1211 repo_name = repo.repo_name
1213
1212
1214 if not has_superadmin_permission(apiuser):
1213 if not has_superadmin_permission(apiuser):
1215 # check if we have at least read permission for
1214 # check if we have at least read permission for
1216 # this repo that we fork !
1215 # this repo that we fork !
1217 _perms = ('repository.admin', 'repository.write', 'repository.read')
1216 _perms = ('repository.admin', 'repository.write', 'repository.read')
1218 validate_repo_permissions(apiuser, repoid, repo, _perms)
1217 validate_repo_permissions(apiuser, repoid, repo, _perms)
1219
1218
1220 # check if the regular user has at least fork permissions as well
1219 # check if the regular user has at least fork permissions as well
1221 if not HasPermissionAnyApi(PermissionModel.FORKING_ENABLED)(user=apiuser):
1220 if not HasPermissionAnyApi(PermissionModel.FORKING_ENABLED)(user=apiuser):
1222 raise JSONRPCForbidden()
1221 raise JSONRPCForbidden()
1223
1222
1224 # check if user can set owner parameter
1223 # check if user can set owner parameter
1225 owner = validate_set_owner_permissions(apiuser, owner)
1224 owner = validate_set_owner_permissions(apiuser, owner)
1226
1225
1227 description = Optional.extract(description)
1226 description = Optional.extract(description)
1228 copy_permissions = Optional.extract(copy_permissions)
1227 copy_permissions = Optional.extract(copy_permissions)
1229 clone_uri = Optional.extract(clone_uri)
1228 clone_uri = Optional.extract(clone_uri)
1230
1229
1231 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1230 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1232 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1231 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1233 ref_choices = list(set(ref_choices + [landing_ref]))
1232 ref_choices = list(set(ref_choices + [landing_ref]))
1234 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1233 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1235
1234
1236 private = Optional.extract(private)
1235 private = Optional.extract(private)
1237
1236
1238 schema = repo_schema.RepoSchema().bind(
1237 schema = repo_schema.RepoSchema().bind(
1239 repo_type_options=rhodecode.BACKENDS.keys(),
1238 repo_type_options=rhodecode.BACKENDS.keys(),
1240 repo_ref_options=ref_choices,
1239 repo_ref_options=ref_choices,
1241 repo_type=repo.repo_type,
1240 repo_type=repo.repo_type,
1242 # user caller
1241 # user caller
1243 user=apiuser)
1242 user=apiuser)
1244
1243
1245 try:
1244 try:
1246 schema_data = schema.deserialize(dict(
1245 schema_data = schema.deserialize(dict(
1247 repo_name=fork_name,
1246 repo_name=fork_name,
1248 repo_type=repo.repo_type,
1247 repo_type=repo.repo_type,
1249 repo_owner=owner.username,
1248 repo_owner=owner.username,
1250 repo_description=description,
1249 repo_description=description,
1251 repo_landing_commit_ref=landing_commit_ref,
1250 repo_landing_commit_ref=landing_commit_ref,
1252 repo_clone_uri=clone_uri,
1251 repo_clone_uri=clone_uri,
1253 repo_private=private,
1252 repo_private=private,
1254 repo_copy_permissions=copy_permissions))
1253 repo_copy_permissions=copy_permissions))
1255 except validation_schema.Invalid as err:
1254 except validation_schema.Invalid as err:
1256 raise JSONRPCValidationError(colander_exc=err)
1255 raise JSONRPCValidationError(colander_exc=err)
1257
1256
1258 try:
1257 try:
1259 data = {
1258 data = {
1260 'fork_parent_id': repo.repo_id,
1259 'fork_parent_id': repo.repo_id,
1261
1260
1262 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1261 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1263 'repo_name_full': schema_data['repo_name'],
1262 'repo_name_full': schema_data['repo_name'],
1264 'repo_group': schema_data['repo_group']['repo_group_id'],
1263 'repo_group': schema_data['repo_group']['repo_group_id'],
1265 'repo_type': schema_data['repo_type'],
1264 'repo_type': schema_data['repo_type'],
1266 'description': schema_data['repo_description'],
1265 'description': schema_data['repo_description'],
1267 'private': schema_data['repo_private'],
1266 'private': schema_data['repo_private'],
1268 'copy_permissions': schema_data['repo_copy_permissions'],
1267 'copy_permissions': schema_data['repo_copy_permissions'],
1269 'landing_rev': schema_data['repo_landing_commit_ref'],
1268 'landing_rev': schema_data['repo_landing_commit_ref'],
1270 }
1269 }
1271
1270
1272 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1271 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1273 # no commit, it's done in RepoModel, or async via celery
1272 # no commit, it's done in RepoModel, or async via celery
1274 task_id = get_task_id(task)
1273 task_id = get_task_id(task)
1275
1274
1276 return {
1275 return {
1277 'msg': 'Created fork of `{}` as `{}`'.format(
1276 'msg': 'Created fork of `{}` as `{}`'.format(
1278 repo.repo_name, schema_data['repo_name']),
1277 repo.repo_name, schema_data['repo_name']),
1279 'success': True, # cannot return the repo data here since fork
1278 'success': True, # cannot return the repo data here since fork
1280 # can be done async
1279 # can be done async
1281 'task': task_id
1280 'task': task_id
1282 }
1281 }
1283 except Exception:
1282 except Exception:
1284 log.exception(
1283 log.exception(
1285 "Exception while trying to create fork %s",
1284 "Exception while trying to create fork %s",
1286 schema_data['repo_name'])
1285 schema_data['repo_name'])
1287 raise JSONRPCError(
1286 raise JSONRPCError(
1288 'failed to fork repository `{}` as `{}`'.format(
1287 'failed to fork repository `{}` as `{}`'.format(
1289 repo_name, schema_data['repo_name']))
1288 repo_name, schema_data['repo_name']))
1290
1289
1291
1290
1292 @jsonrpc_method()
1291 @jsonrpc_method()
1293 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1292 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1294 """
1293 """
1295 Deletes a repository.
1294 Deletes a repository.
1296
1295
1297 * When the `forks` parameter is set it's possible to detach or delete
1296 * When the `forks` parameter is set it's possible to detach or delete
1298 forks of deleted repository.
1297 forks of deleted repository.
1299
1298
1300 This command can only be run using an |authtoken| with admin
1299 This command can only be run using an |authtoken| with admin
1301 permissions on the |repo|.
1300 permissions on the |repo|.
1302
1301
1303 :param apiuser: This is filled automatically from the |authtoken|.
1302 :param apiuser: This is filled automatically from the |authtoken|.
1304 :type apiuser: AuthUser
1303 :type apiuser: AuthUser
1305 :param repoid: Set the repository name or repository ID.
1304 :param repoid: Set the repository name or repository ID.
1306 :type repoid: str or int
1305 :type repoid: str or int
1307 :param forks: Set to `detach` or `delete` forks from the |repo|.
1306 :param forks: Set to `detach` or `delete` forks from the |repo|.
1308 :type forks: Optional(str)
1307 :type forks: Optional(str)
1309
1308
1310 Example error output:
1309 Example error output:
1311
1310
1312 .. code-block:: bash
1311 .. code-block:: bash
1313
1312
1314 id : <id_given_in_input>
1313 id : <id_given_in_input>
1315 result: {
1314 result: {
1316 "msg": "Deleted repository `<reponame>`",
1315 "msg": "Deleted repository `<reponame>`",
1317 "success": true
1316 "success": true
1318 }
1317 }
1319 error: null
1318 error: null
1320 """
1319 """
1321
1320
1322 repo = get_repo_or_error(repoid)
1321 repo = get_repo_or_error(repoid)
1323 repo_name = repo.repo_name
1322 repo_name = repo.repo_name
1324 if not has_superadmin_permission(apiuser):
1323 if not has_superadmin_permission(apiuser):
1325 _perms = ('repository.admin',)
1324 _perms = ('repository.admin',)
1326 validate_repo_permissions(apiuser, repoid, repo, _perms)
1325 validate_repo_permissions(apiuser, repoid, repo, _perms)
1327
1326
1328 try:
1327 try:
1329 handle_forks = Optional.extract(forks)
1328 handle_forks = Optional.extract(forks)
1330 _forks_msg = ''
1329 _forks_msg = ''
1331 _forks = [f for f in repo.forks]
1330 _forks = [f for f in repo.forks]
1332 if handle_forks == 'detach':
1331 if handle_forks == 'detach':
1333 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1332 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1334 elif handle_forks == 'delete':
1333 elif handle_forks == 'delete':
1335 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1334 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1336 elif _forks:
1335 elif _forks:
1337 raise JSONRPCError(
1336 raise JSONRPCError(
1338 'Cannot delete `%s` it still contains attached forks' %
1337 'Cannot delete `%s` it still contains attached forks' %
1339 (repo.repo_name,)
1338 (repo.repo_name,)
1340 )
1339 )
1341 old_data = repo.get_api_data()
1340 old_data = repo.get_api_data()
1342 RepoModel().delete(repo, forks=forks)
1341 RepoModel().delete(repo, forks=forks)
1343
1342
1344 repo = audit_logger.RepoWrap(repo_id=None,
1343 repo = audit_logger.RepoWrap(repo_id=None,
1345 repo_name=repo.repo_name)
1344 repo_name=repo.repo_name)
1346
1345
1347 audit_logger.store_api(
1346 audit_logger.store_api(
1348 'repo.delete', action_data={'old_data': old_data},
1347 'repo.delete', action_data={'old_data': old_data},
1349 user=apiuser, repo=repo)
1348 user=apiuser, repo=repo)
1350
1349
1351 ScmModel().mark_for_invalidation(repo_name, delete=True)
1350 ScmModel().mark_for_invalidation(repo_name, delete=True)
1352 Session().commit()
1351 Session().commit()
1353 return {
1352 return {
1354 'msg': f'Deleted repository `{repo_name}`{_forks_msg}',
1353 'msg': f'Deleted repository `{repo_name}`{_forks_msg}',
1355 'success': True
1354 'success': True
1356 }
1355 }
1357 except Exception:
1356 except Exception:
1358 log.exception("Exception occurred while trying to delete repo")
1357 log.exception("Exception occurred while trying to delete repo")
1359 raise JSONRPCError(
1358 raise JSONRPCError(
1360 f'failed to delete repository `{repo_name}`'
1359 f'failed to delete repository `{repo_name}`'
1361 )
1360 )
1362
1361
1363
1362
1364 #TODO: marcink, change name ?
1363 #TODO: marcink, change name ?
1365 @jsonrpc_method()
1364 @jsonrpc_method()
1366 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1365 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1367 """
1366 """
1368 Invalidates the cache for the specified repository.
1367 Invalidates the cache for the specified repository.
1369
1368
1370 This command can only be run using an |authtoken| with admin rights to
1369 This command can only be run using an |authtoken| with admin rights to
1371 the specified repository.
1370 the specified repository.
1372
1371
1373 This command takes the following options:
1372 This command takes the following options:
1374
1373
1375 :param apiuser: This is filled automatically from |authtoken|.
1374 :param apiuser: This is filled automatically from |authtoken|.
1376 :type apiuser: AuthUser
1375 :type apiuser: AuthUser
1377 :param repoid: Sets the repository name or repository ID.
1376 :param repoid: Sets the repository name or repository ID.
1378 :type repoid: str or int
1377 :type repoid: str or int
1379 :param delete_keys: This deletes the invalidated keys instead of
1378 :param delete_keys: This deletes the invalidated keys instead of
1380 just flagging them.
1379 just flagging them.
1381 :type delete_keys: Optional(``True`` | ``False``)
1380 :type delete_keys: Optional(``True`` | ``False``)
1382
1381
1383 Example output:
1382 Example output:
1384
1383
1385 .. code-block:: bash
1384 .. code-block:: bash
1386
1385
1387 id : <id_given_in_input>
1386 id : <id_given_in_input>
1388 result : {
1387 result : {
1389 'msg': Cache for repository `<repository name>` was invalidated,
1388 'msg': Cache for repository `<repository name>` was invalidated,
1390 'repository': <repository name>
1389 'repository': <repository name>
1391 }
1390 }
1392 error : null
1391 error : null
1393
1392
1394 Example error output:
1393 Example error output:
1395
1394
1396 .. code-block:: bash
1395 .. code-block:: bash
1397
1396
1398 id : <id_given_in_input>
1397 id : <id_given_in_input>
1399 result : null
1398 result : null
1400 error : {
1399 error : {
1401 'Error occurred during cache invalidation action'
1400 'Error occurred during cache invalidation action'
1402 }
1401 }
1403
1402
1404 """
1403 """
1405
1404
1406 repo = get_repo_or_error(repoid)
1405 repo = get_repo_or_error(repoid)
1407 if not has_superadmin_permission(apiuser):
1406 if not has_superadmin_permission(apiuser):
1408 _perms = ('repository.admin', 'repository.write',)
1407 _perms = ('repository.admin', 'repository.write',)
1409 validate_repo_permissions(apiuser, repoid, repo, _perms)
1408 validate_repo_permissions(apiuser, repoid, repo, _perms)
1410
1409
1411 delete = Optional.extract(delete_keys)
1410 delete = Optional.extract(delete_keys)
1412 try:
1411 try:
1413 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1412 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1414 return {
1413 return {
1415 'msg': f'Cache for repository `{repoid}` was invalidated',
1414 'msg': f'Cache for repository `{repoid}` was invalidated',
1416 'repository': repo.repo_name
1415 'repository': repo.repo_name
1417 }
1416 }
1418 except Exception:
1417 except Exception:
1419 log.exception(
1418 log.exception(
1420 "Exception occurred while trying to invalidate repo cache")
1419 "Exception occurred while trying to invalidate repo cache")
1421 raise JSONRPCError(
1420 raise JSONRPCError(
1422 'Error occurred during cache invalidation action'
1421 'Error occurred during cache invalidation action'
1423 )
1422 )
1424
1423
1425
1424
1426 #TODO: marcink, change name ?
1425 #TODO: marcink, change name ?
1427 @jsonrpc_method()
1426 @jsonrpc_method()
1428 def lock(request, apiuser, repoid, locked=Optional(None),
1427 def lock(request, apiuser, repoid, locked=Optional(None),
1429 userid=Optional(OAttr('apiuser'))):
1428 userid=Optional(OAttr('apiuser'))):
1430 """
1429 """
1431 Sets the lock state of the specified |repo| by the given user.
1430 Sets the lock state of the specified |repo| by the given user.
1432 From more information, see :ref:`repo-locking`.
1431 From more information, see :ref:`repo-locking`.
1433
1432
1434 * If the ``userid`` option is not set, the repository is locked to the
1433 * If the ``userid`` option is not set, the repository is locked to the
1435 user who called the method.
1434 user who called the method.
1436 * If the ``locked`` parameter is not set, the current lock state of the
1435 * If the ``locked`` parameter is not set, the current lock state of the
1437 repository is displayed.
1436 repository is displayed.
1438
1437
1439 This command can only be run using an |authtoken| with admin rights to
1438 This command can only be run using an |authtoken| with admin rights to
1440 the specified repository.
1439 the specified repository.
1441
1440
1442 This command takes the following options:
1441 This command takes the following options:
1443
1442
1444 :param apiuser: This is filled automatically from the |authtoken|.
1443 :param apiuser: This is filled automatically from the |authtoken|.
1445 :type apiuser: AuthUser
1444 :type apiuser: AuthUser
1446 :param repoid: Sets the repository name or repository ID.
1445 :param repoid: Sets the repository name or repository ID.
1447 :type repoid: str or int
1446 :type repoid: str or int
1448 :param locked: Sets the lock state.
1447 :param locked: Sets the lock state.
1449 :type locked: Optional(``True`` | ``False``)
1448 :type locked: Optional(``True`` | ``False``)
1450 :param userid: Set the repository lock to this user.
1449 :param userid: Set the repository lock to this user.
1451 :type userid: Optional(str or int)
1450 :type userid: Optional(str or int)
1452
1451
1453 Example error output:
1452 Example error output:
1454
1453
1455 .. code-block:: bash
1454 .. code-block:: bash
1456
1455
1457 id : <id_given_in_input>
1456 id : <id_given_in_input>
1458 result : {
1457 result : {
1459 'repo': '<reponame>',
1458 'repo': '<reponame>',
1460 'locked': <bool: lock state>,
1459 'locked': <bool: lock state>,
1461 'locked_since': <int: lock timestamp>,
1460 'locked_since': <int: lock timestamp>,
1462 'locked_by': <username of person who made the lock>,
1461 'locked_by': <username of person who made the lock>,
1463 'lock_reason': <str: reason for locking>,
1462 'lock_reason': <str: reason for locking>,
1464 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1463 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1465 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1464 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1466 or
1465 or
1467 'msg': 'Repo `<repository name>` not locked.'
1466 'msg': 'Repo `<repository name>` not locked.'
1468 or
1467 or
1469 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1468 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1470 }
1469 }
1471 error : null
1470 error : null
1472
1471
1473 Example error output:
1472 Example error output:
1474
1473
1475 .. code-block:: bash
1474 .. code-block:: bash
1476
1475
1477 id : <id_given_in_input>
1476 id : <id_given_in_input>
1478 result : null
1477 result : null
1479 error : {
1478 error : {
1480 'Error occurred locking repository `<reponame>`'
1479 'Error occurred locking repository `<reponame>`'
1481 }
1480 }
1482 """
1481 """
1483
1482
1484 repo = get_repo_or_error(repoid)
1483 repo = get_repo_or_error(repoid)
1485 if not has_superadmin_permission(apiuser):
1484 if not has_superadmin_permission(apiuser):
1486 # check if we have at least write permission for this repo !
1485 # check if we have at least write permission for this repo !
1487 _perms = ('repository.admin', 'repository.write',)
1486 _perms = ('repository.admin', 'repository.write',)
1488 validate_repo_permissions(apiuser, repoid, repo, _perms)
1487 validate_repo_permissions(apiuser, repoid, repo, _perms)
1489
1488
1490 # make sure normal user does not pass someone else userid,
1489 # make sure normal user does not pass someone else userid,
1491 # he is not allowed to do that
1490 # he is not allowed to do that
1492 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1491 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1493 raise JSONRPCError('userid is not the same as your user')
1492 raise JSONRPCError('userid is not the same as your user')
1494
1493
1495 if isinstance(userid, Optional):
1494 if isinstance(userid, Optional):
1496 userid = apiuser.user_id
1495 userid = apiuser.user_id
1497
1496
1498 user = get_user_or_error(userid)
1497 user = get_user_or_error(userid)
1499
1498
1500 if isinstance(locked, Optional):
1499 if isinstance(locked, Optional):
1501 lockobj = repo.locked
1500 lockobj = repo.locked
1502
1501
1503 if lockobj[0] is None:
1502 if lockobj[0] is None:
1504 _d = {
1503 _d = {
1505 'repo': repo.repo_name,
1504 'repo': repo.repo_name,
1506 'locked': False,
1505 'locked': False,
1507 'locked_since': None,
1506 'locked_since': None,
1508 'locked_by': None,
1507 'locked_by': None,
1509 'lock_reason': None,
1508 'lock_reason': None,
1510 'lock_state_changed': False,
1509 'lock_state_changed': False,
1511 'msg': 'Repo `%s` not locked.' % repo.repo_name
1510 'msg': 'Repo `%s` not locked.' % repo.repo_name
1512 }
1511 }
1513 return _d
1512 return _d
1514 else:
1513 else:
1515 _user_id, _time, _reason = lockobj
1514 _user_id, _time, _reason = lockobj
1516 lock_user = get_user_or_error(userid)
1515 lock_user = get_user_or_error(userid)
1517 _d = {
1516 _d = {
1518 'repo': repo.repo_name,
1517 'repo': repo.repo_name,
1519 'locked': True,
1518 'locked': True,
1520 'locked_since': _time,
1519 'locked_since': _time,
1521 'locked_by': lock_user.username,
1520 'locked_by': lock_user.username,
1522 'lock_reason': _reason,
1521 'lock_reason': _reason,
1523 'lock_state_changed': False,
1522 'lock_state_changed': False,
1524 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1523 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1525 % (repo.repo_name, lock_user.username,
1524 % (repo.repo_name, lock_user.username,
1526 json.dumps(time_to_datetime(_time))))
1525 json.dumps(time_to_datetime(_time))))
1527 }
1526 }
1528 return _d
1527 return _d
1529
1528
1530 # force locked state through a flag
1529 # force locked state through a flag
1531 else:
1530 else:
1532 locked = str2bool(locked)
1531 locked = str2bool(locked)
1533 lock_reason = Repository.LOCK_API
1532 lock_reason = Repository.LOCK_API
1534 try:
1533 try:
1535 if locked:
1534 if locked:
1536 lock_time = time.time()
1535 lock_time = time.time()
1537 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1536 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1538 else:
1537 else:
1539 lock_time = None
1538 lock_time = None
1540 Repository.unlock(repo)
1539 Repository.unlock(repo)
1541 _d = {
1540 _d = {
1542 'repo': repo.repo_name,
1541 'repo': repo.repo_name,
1543 'locked': locked,
1542 'locked': locked,
1544 'locked_since': lock_time,
1543 'locked_since': lock_time,
1545 'locked_by': user.username,
1544 'locked_by': user.username,
1546 'lock_reason': lock_reason,
1545 'lock_reason': lock_reason,
1547 'lock_state_changed': True,
1546 'lock_state_changed': True,
1548 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1547 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1549 % (user.username, repo.repo_name, locked))
1548 % (user.username, repo.repo_name, locked))
1550 }
1549 }
1551 return _d
1550 return _d
1552 except Exception:
1551 except Exception:
1553 log.exception(
1552 log.exception(
1554 "Exception occurred while trying to lock repository")
1553 "Exception occurred while trying to lock repository")
1555 raise JSONRPCError(
1554 raise JSONRPCError(
1556 'Error occurred locking repository `%s`' % repo.repo_name
1555 'Error occurred locking repository `%s`' % repo.repo_name
1557 )
1556 )
1558
1557
1559
1558
1560 @jsonrpc_method()
1559 @jsonrpc_method()
1561 def comment_commit(
1560 def comment_commit(
1562 request, apiuser, repoid, commit_id, message, status=Optional(None),
1561 request, apiuser, repoid, commit_id, message, status=Optional(None),
1563 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1562 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1564 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1563 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1565 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1564 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1566 """
1565 """
1567 Set a commit comment, and optionally change the status of the commit.
1566 Set a commit comment, and optionally change the status of the commit.
1568
1567
1569 :param apiuser: This is filled automatically from the |authtoken|.
1568 :param apiuser: This is filled automatically from the |authtoken|.
1570 :type apiuser: AuthUser
1569 :type apiuser: AuthUser
1571 :param repoid: Set the repository name or repository ID.
1570 :param repoid: Set the repository name or repository ID.
1572 :type repoid: str or int
1571 :type repoid: str or int
1573 :param commit_id: Specify the commit_id for which to set a comment.
1572 :param commit_id: Specify the commit_id for which to set a comment.
1574 :type commit_id: str
1573 :type commit_id: str
1575 :param message: The comment text.
1574 :param message: The comment text.
1576 :type message: str
1575 :type message: str
1577 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1576 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1578 'approved', 'rejected', 'under_review'
1577 'approved', 'rejected', 'under_review'
1579 :type status: str
1578 :type status: str
1580 :param comment_type: Comment type, one of: 'note', 'todo'
1579 :param comment_type: Comment type, one of: 'note', 'todo'
1581 :type comment_type: Optional(str), default: 'note'
1580 :type comment_type: Optional(str), default: 'note'
1582 :param resolves_comment_id: id of comment which this one will resolve
1581 :param resolves_comment_id: id of comment which this one will resolve
1583 :type resolves_comment_id: Optional(int)
1582 :type resolves_comment_id: Optional(int)
1584 :param extra_recipients: list of user ids or usernames to add
1583 :param extra_recipients: list of user ids or usernames to add
1585 notifications for this comment. Acts like a CC for notification
1584 notifications for this comment. Acts like a CC for notification
1586 :type extra_recipients: Optional(list)
1585 :type extra_recipients: Optional(list)
1587 :param userid: Set the user name of the comment creator.
1586 :param userid: Set the user name of the comment creator.
1588 :type userid: Optional(str or int)
1587 :type userid: Optional(str or int)
1589 :param send_email: Define if this comment should also send email notification
1588 :param send_email: Define if this comment should also send email notification
1590 :type send_email: Optional(bool)
1589 :type send_email: Optional(bool)
1591
1590
1592 Example error output:
1591 Example error output:
1593
1592
1594 .. code-block:: bash
1593 .. code-block:: bash
1595
1594
1596 {
1595 {
1597 "id" : <id_given_in_input>,
1596 "id" : <id_given_in_input>,
1598 "result" : {
1597 "result" : {
1599 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1598 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1600 "status_change": null or <status>,
1599 "status_change": null or <status>,
1601 "success": true
1600 "success": true
1602 },
1601 },
1603 "error" : null
1602 "error" : null
1604 }
1603 }
1605
1604
1606 """
1605 """
1607 _ = request.translate
1606 _ = request.translate
1608
1607
1609 repo = get_repo_or_error(repoid)
1608 repo = get_repo_or_error(repoid)
1610 if not has_superadmin_permission(apiuser):
1609 if not has_superadmin_permission(apiuser):
1611 _perms = ('repository.read', 'repository.write', 'repository.admin')
1610 _perms = ('repository.read', 'repository.write', 'repository.admin')
1612 validate_repo_permissions(apiuser, repoid, repo, _perms)
1611 validate_repo_permissions(apiuser, repoid, repo, _perms)
1613 db_repo_name = repo.repo_name
1612 db_repo_name = repo.repo_name
1614
1613
1615 try:
1614 try:
1616 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1615 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1617 commit_id = commit.raw_id
1616 commit_id = commit.raw_id
1618 except Exception as e:
1617 except Exception as e:
1619 log.exception('Failed to fetch commit')
1618 log.exception('Failed to fetch commit')
1620 raise JSONRPCError(safe_str(e))
1619 raise JSONRPCError(safe_str(e))
1621
1620
1622 if isinstance(userid, Optional):
1621 if isinstance(userid, Optional):
1623 userid = apiuser.user_id
1622 userid = apiuser.user_id
1624
1623
1625 user = get_user_or_error(userid)
1624 user = get_user_or_error(userid)
1626 status = Optional.extract(status)
1625 status = Optional.extract(status)
1627 comment_type = Optional.extract(comment_type)
1626 comment_type = Optional.extract(comment_type)
1628 resolves_comment_id = Optional.extract(resolves_comment_id)
1627 resolves_comment_id = Optional.extract(resolves_comment_id)
1629 extra_recipients = Optional.extract(extra_recipients)
1628 extra_recipients = Optional.extract(extra_recipients)
1630 send_email = Optional.extract(send_email, binary=True)
1629 send_email = Optional.extract(send_email, binary=True)
1631
1630
1632 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1631 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1633 if status and status not in allowed_statuses:
1632 if status and status not in allowed_statuses:
1634 raise JSONRPCError('Bad status, must be on '
1633 raise JSONRPCError('Bad status, must be on '
1635 'of %s got %s' % (allowed_statuses, status,))
1634 'of %s got %s' % (allowed_statuses, status,))
1636
1635
1637 if resolves_comment_id:
1636 if resolves_comment_id:
1638 comment = ChangesetComment.get(resolves_comment_id)
1637 comment = ChangesetComment.get(resolves_comment_id)
1639 if not comment:
1638 if not comment:
1640 raise JSONRPCError(
1639 raise JSONRPCError(
1641 'Invalid resolves_comment_id `%s` for this commit.'
1640 'Invalid resolves_comment_id `%s` for this commit.'
1642 % resolves_comment_id)
1641 % resolves_comment_id)
1643 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1642 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1644 raise JSONRPCError(
1643 raise JSONRPCError(
1645 'Comment `%s` is wrong type for setting status to resolved.'
1644 'Comment `%s` is wrong type for setting status to resolved.'
1646 % resolves_comment_id)
1645 % resolves_comment_id)
1647
1646
1648 try:
1647 try:
1649 rc_config = SettingsModel().get_all_settings()
1648 rc_config = SettingsModel().get_all_settings()
1650 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1649 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1651 status_change_label = ChangesetStatus.get_status_lbl(status)
1650 status_change_label = ChangesetStatus.get_status_lbl(status)
1652 comment = CommentsModel().create(
1651 comment = CommentsModel().create(
1653 message, repo, user, commit_id=commit_id,
1652 message, repo, user, commit_id=commit_id,
1654 status_change=status_change_label,
1653 status_change=status_change_label,
1655 status_change_type=status,
1654 status_change_type=status,
1656 renderer=renderer,
1655 renderer=renderer,
1657 comment_type=comment_type,
1656 comment_type=comment_type,
1658 resolves_comment_id=resolves_comment_id,
1657 resolves_comment_id=resolves_comment_id,
1659 auth_user=apiuser,
1658 auth_user=apiuser,
1660 extra_recipients=extra_recipients,
1659 extra_recipients=extra_recipients,
1661 send_email=send_email
1660 send_email=send_email
1662 )
1661 )
1663 is_inline = comment.is_inline
1662 is_inline = comment.is_inline
1664
1663
1665 if status:
1664 if status:
1666 # also do a status change
1665 # also do a status change
1667 try:
1666 try:
1668 ChangesetStatusModel().set_status(
1667 ChangesetStatusModel().set_status(
1669 repo, status, user, comment, revision=commit_id,
1668 repo, status, user, comment, revision=commit_id,
1670 dont_allow_on_closed_pull_request=True
1669 dont_allow_on_closed_pull_request=True
1671 )
1670 )
1672 except StatusChangeOnClosedPullRequestError:
1671 except StatusChangeOnClosedPullRequestError:
1673 log.exception(
1672 log.exception(
1674 "Exception occurred while trying to change repo commit status")
1673 "Exception occurred while trying to change repo commit status")
1675 msg = ('Changing status on a commit associated with '
1674 msg = ('Changing status on a commit associated with '
1676 'a closed pull request is not allowed')
1675 'a closed pull request is not allowed')
1677 raise JSONRPCError(msg)
1676 raise JSONRPCError(msg)
1678
1677
1679 CommentsModel().trigger_commit_comment_hook(
1678 CommentsModel().trigger_commit_comment_hook(
1680 repo, apiuser, 'create',
1679 repo, apiuser, 'create',
1681 data={'comment': comment, 'commit': commit})
1680 data={'comment': comment, 'commit': commit})
1682
1681
1683 Session().commit()
1682 Session().commit()
1684
1683
1685 comment_broadcast_channel = channelstream.comment_channel(
1684 comment_broadcast_channel = channelstream.comment_channel(
1686 db_repo_name, commit_obj=commit)
1685 db_repo_name, commit_obj=commit)
1687
1686
1688 comment_data = {'comment': comment, 'comment_id': comment.comment_id}
1687 comment_data = {'comment': comment, 'comment_id': comment.comment_id}
1689 comment_type = 'inline' if is_inline else 'general'
1688 comment_type = 'inline' if is_inline else 'general'
1690 channelstream.comment_channelstream_push(
1689 channelstream.comment_channelstream_push(
1691 request, comment_broadcast_channel, apiuser,
1690 request, comment_broadcast_channel, apiuser,
1692 _('posted a new {} comment').format(comment_type),
1691 _('posted a new {} comment').format(comment_type),
1693 comment_data=comment_data)
1692 comment_data=comment_data)
1694
1693
1695 return {
1694 return {
1696 'msg': (
1695 'msg': (
1697 'Commented on commit `{}` for repository `{}`'.format(
1696 'Commented on commit `{}` for repository `{}`'.format(
1698 comment.revision, repo.repo_name)),
1697 comment.revision, repo.repo_name)),
1699 'status_change': status,
1698 'status_change': status,
1700 'success': True,
1699 'success': True,
1701 }
1700 }
1702 except JSONRPCError:
1701 except JSONRPCError:
1703 # catch any inside errors, and re-raise them to prevent from
1702 # catch any inside errors, and re-raise them to prevent from
1704 # below global catch to silence them
1703 # below global catch to silence them
1705 raise
1704 raise
1706 except Exception:
1705 except Exception:
1707 log.exception("Exception occurred while trying to comment on commit")
1706 log.exception("Exception occurred while trying to comment on commit")
1708 raise JSONRPCError(
1707 raise JSONRPCError(
1709 f'failed to set comment on repository `{repo.repo_name}`'
1708 f'failed to set comment on repository `{repo.repo_name}`'
1710 )
1709 )
1711
1710
1712
1711
1713 @jsonrpc_method()
1712 @jsonrpc_method()
1714 def get_repo_comments(request, apiuser, repoid,
1713 def get_repo_comments(request, apiuser, repoid,
1715 commit_id=Optional(None), comment_type=Optional(None),
1714 commit_id=Optional(None), comment_type=Optional(None),
1716 userid=Optional(None)):
1715 userid=Optional(None)):
1717 """
1716 """
1718 Get all comments for a repository
1717 Get all comments for a repository
1719
1718
1720 :param apiuser: This is filled automatically from the |authtoken|.
1719 :param apiuser: This is filled automatically from the |authtoken|.
1721 :type apiuser: AuthUser
1720 :type apiuser: AuthUser
1722 :param repoid: Set the repository name or repository ID.
1721 :param repoid: Set the repository name or repository ID.
1723 :type repoid: str or int
1722 :type repoid: str or int
1724 :param commit_id: Optionally filter the comments by the commit_id
1723 :param commit_id: Optionally filter the comments by the commit_id
1725 :type commit_id: Optional(str), default: None
1724 :type commit_id: Optional(str), default: None
1726 :param comment_type: Optionally filter the comments by the comment_type
1725 :param comment_type: Optionally filter the comments by the comment_type
1727 one of: 'note', 'todo'
1726 one of: 'note', 'todo'
1728 :type comment_type: Optional(str), default: None
1727 :type comment_type: Optional(str), default: None
1729 :param userid: Optionally filter the comments by the author of comment
1728 :param userid: Optionally filter the comments by the author of comment
1730 :type userid: Optional(str or int), Default: None
1729 :type userid: Optional(str or int), Default: None
1731
1730
1732 Example error output:
1731 Example error output:
1733
1732
1734 .. code-block:: bash
1733 .. code-block:: bash
1735
1734
1736 {
1735 {
1737 "id" : <id_given_in_input>,
1736 "id" : <id_given_in_input>,
1738 "result" : [
1737 "result" : [
1739 {
1738 {
1740 "comment_author": <USER_DETAILS>,
1739 "comment_author": <USER_DETAILS>,
1741 "comment_created_on": "2017-02-01T14:38:16.309",
1740 "comment_created_on": "2017-02-01T14:38:16.309",
1742 "comment_f_path": "file.txt",
1741 "comment_f_path": "file.txt",
1743 "comment_id": 282,
1742 "comment_id": 282,
1744 "comment_lineno": "n1",
1743 "comment_lineno": "n1",
1745 "comment_resolved_by": null,
1744 "comment_resolved_by": null,
1746 "comment_status": [],
1745 "comment_status": [],
1747 "comment_text": "This file needs a header",
1746 "comment_text": "This file needs a header",
1748 "comment_type": "todo",
1747 "comment_type": "todo",
1749 "comment_last_version: 0
1748 "comment_last_version: 0
1750 }
1749 }
1751 ],
1750 ],
1752 "error" : null
1751 "error" : null
1753 }
1752 }
1754
1753
1755 """
1754 """
1756 repo = get_repo_or_error(repoid)
1755 repo = get_repo_or_error(repoid)
1757 if not has_superadmin_permission(apiuser):
1756 if not has_superadmin_permission(apiuser):
1758 _perms = ('repository.read', 'repository.write', 'repository.admin')
1757 _perms = ('repository.read', 'repository.write', 'repository.admin')
1759 validate_repo_permissions(apiuser, repoid, repo, _perms)
1758 validate_repo_permissions(apiuser, repoid, repo, _perms)
1760
1759
1761 commit_id = Optional.extract(commit_id)
1760 commit_id = Optional.extract(commit_id)
1762
1761
1763 userid = Optional.extract(userid)
1762 userid = Optional.extract(userid)
1764 if userid:
1763 if userid:
1765 user = get_user_or_error(userid)
1764 user = get_user_or_error(userid)
1766 else:
1765 else:
1767 user = None
1766 user = None
1768
1767
1769 comment_type = Optional.extract(comment_type)
1768 comment_type = Optional.extract(comment_type)
1770 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1769 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1771 raise JSONRPCError(
1770 raise JSONRPCError(
1772 'comment_type must be one of `{}` got {}'.format(
1771 'comment_type must be one of `{}` got {}'.format(
1773 ChangesetComment.COMMENT_TYPES, comment_type)
1772 ChangesetComment.COMMENT_TYPES, comment_type)
1774 )
1773 )
1775
1774
1776 comments = CommentsModel().get_repository_comments(
1775 comments = CommentsModel().get_repository_comments(
1777 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1776 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1778 return comments
1777 return comments
1779
1778
1780
1779
1781 @jsonrpc_method()
1780 @jsonrpc_method()
1782 def get_comment(request, apiuser, comment_id):
1781 def get_comment(request, apiuser, comment_id):
1783 """
1782 """
1784 Get single comment from repository or pull_request
1783 Get single comment from repository or pull_request
1785
1784
1786 :param apiuser: This is filled automatically from the |authtoken|.
1785 :param apiuser: This is filled automatically from the |authtoken|.
1787 :type apiuser: AuthUser
1786 :type apiuser: AuthUser
1788 :param comment_id: comment id found in the URL of comment
1787 :param comment_id: comment id found in the URL of comment
1789 :type comment_id: str or int
1788 :type comment_id: str or int
1790
1789
1791 Example error output:
1790 Example error output:
1792
1791
1793 .. code-block:: bash
1792 .. code-block:: bash
1794
1793
1795 {
1794 {
1796 "id" : <id_given_in_input>,
1795 "id" : <id_given_in_input>,
1797 "result" : {
1796 "result" : {
1798 "comment_author": <USER_DETAILS>,
1797 "comment_author": <USER_DETAILS>,
1799 "comment_created_on": "2017-02-01T14:38:16.309",
1798 "comment_created_on": "2017-02-01T14:38:16.309",
1800 "comment_f_path": "file.txt",
1799 "comment_f_path": "file.txt",
1801 "comment_id": 282,
1800 "comment_id": 282,
1802 "comment_lineno": "n1",
1801 "comment_lineno": "n1",
1803 "comment_resolved_by": null,
1802 "comment_resolved_by": null,
1804 "comment_status": [],
1803 "comment_status": [],
1805 "comment_text": "This file needs a header",
1804 "comment_text": "This file needs a header",
1806 "comment_type": "todo",
1805 "comment_type": "todo",
1807 "comment_last_version: 0
1806 "comment_last_version: 0
1808 },
1807 },
1809 "error" : null
1808 "error" : null
1810 }
1809 }
1811
1810
1812 """
1811 """
1813
1812
1814 comment = ChangesetComment.get(comment_id)
1813 comment = ChangesetComment.get(comment_id)
1815 if not comment:
1814 if not comment:
1816 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1815 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1817
1816
1818 perms = ('repository.read', 'repository.write', 'repository.admin')
1817 perms = ('repository.read', 'repository.write', 'repository.admin')
1819 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1818 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1820 (user=apiuser, repo_name=comment.repo.repo_name)
1819 (user=apiuser, repo_name=comment.repo.repo_name)
1821
1820
1822 if not has_comment_perm:
1821 if not has_comment_perm:
1823 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1822 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1824
1823
1825 return comment
1824 return comment
1826
1825
1827
1826
1828 @jsonrpc_method()
1827 @jsonrpc_method()
1829 def edit_comment(request, apiuser, message, comment_id, version,
1828 def edit_comment(request, apiuser, message, comment_id, version,
1830 userid=Optional(OAttr('apiuser'))):
1829 userid=Optional(OAttr('apiuser'))):
1831 """
1830 """
1832 Edit comment on the pull request or commit,
1831 Edit comment on the pull request or commit,
1833 specified by the `comment_id` and version. Initially version should be 0
1832 specified by the `comment_id` and version. Initially version should be 0
1834
1833
1835 :param apiuser: This is filled automatically from the |authtoken|.
1834 :param apiuser: This is filled automatically from the |authtoken|.
1836 :type apiuser: AuthUser
1835 :type apiuser: AuthUser
1837 :param comment_id: Specify the comment_id for editing
1836 :param comment_id: Specify the comment_id for editing
1838 :type comment_id: int
1837 :type comment_id: int
1839 :param version: version of the comment that will be created, starts from 0
1838 :param version: version of the comment that will be created, starts from 0
1840 :type version: int
1839 :type version: int
1841 :param message: The text content of the comment.
1840 :param message: The text content of the comment.
1842 :type message: str
1841 :type message: str
1843 :param userid: Comment on the pull request as this user
1842 :param userid: Comment on the pull request as this user
1844 :type userid: Optional(str or int)
1843 :type userid: Optional(str or int)
1845
1844
1846 Example output:
1845 Example output:
1847
1846
1848 .. code-block:: bash
1847 .. code-block:: bash
1849
1848
1850 id : <id_given_in_input>
1849 id : <id_given_in_input>
1851 result : {
1850 result : {
1852 "comment": "<comment data>",
1851 "comment": "<comment data>",
1853 "version": "<Integer>",
1852 "version": "<Integer>",
1854 },
1853 },
1855 error : null
1854 error : null
1856 """
1855 """
1857
1856
1858 auth_user = apiuser
1857 auth_user = apiuser
1859 comment = ChangesetComment.get(comment_id)
1858 comment = ChangesetComment.get(comment_id)
1860 if not comment:
1859 if not comment:
1861 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1860 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1862
1861
1863 is_super_admin = has_superadmin_permission(apiuser)
1862 is_super_admin = has_superadmin_permission(apiuser)
1864 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1863 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1865 (user=apiuser, repo_name=comment.repo.repo_name)
1864 (user=apiuser, repo_name=comment.repo.repo_name)
1866
1865
1867 if not isinstance(userid, Optional):
1866 if not isinstance(userid, Optional):
1868 if is_super_admin or is_repo_admin:
1867 if is_super_admin or is_repo_admin:
1869 apiuser = get_user_or_error(userid)
1868 apiuser = get_user_or_error(userid)
1870 auth_user = apiuser.AuthUser()
1869 auth_user = apiuser.AuthUser()
1871 else:
1870 else:
1872 raise JSONRPCError('userid is not the same as your user')
1871 raise JSONRPCError('userid is not the same as your user')
1873
1872
1874 comment_author = comment.author.user_id == auth_user.user_id
1873 comment_author = comment.author.user_id == auth_user.user_id
1875
1874
1876 if comment.immutable:
1875 if comment.immutable:
1877 raise JSONRPCError("Immutable comment cannot be edited")
1876 raise JSONRPCError("Immutable comment cannot be edited")
1878
1877
1879 if not (is_super_admin or is_repo_admin or comment_author):
1878 if not (is_super_admin or is_repo_admin or comment_author):
1880 raise JSONRPCError("you don't have access to edit this comment")
1879 raise JSONRPCError("you don't have access to edit this comment")
1881
1880
1882 try:
1881 try:
1883 comment_history = CommentsModel().edit(
1882 comment_history = CommentsModel().edit(
1884 comment_id=comment_id,
1883 comment_id=comment_id,
1885 text=message,
1884 text=message,
1886 auth_user=auth_user,
1885 auth_user=auth_user,
1887 version=version,
1886 version=version,
1888 )
1887 )
1889 Session().commit()
1888 Session().commit()
1890 except CommentVersionMismatch:
1889 except CommentVersionMismatch:
1891 raise JSONRPCError(
1890 raise JSONRPCError(
1892 f'comment ({comment_id}) version ({version}) mismatch'
1891 f'comment ({comment_id}) version ({version}) mismatch'
1893 )
1892 )
1894 if not comment_history and not message:
1893 if not comment_history and not message:
1895 raise JSONRPCError(
1894 raise JSONRPCError(
1896 f"comment ({comment_id}) can't be changed with empty string"
1895 f"comment ({comment_id}) can't be changed with empty string"
1897 )
1896 )
1898
1897
1899 if comment.pull_request:
1898 if comment.pull_request:
1900 pull_request = comment.pull_request
1899 pull_request = comment.pull_request
1901 PullRequestModel().trigger_pull_request_hook(
1900 PullRequestModel().trigger_pull_request_hook(
1902 pull_request, apiuser, 'comment_edit',
1901 pull_request, apiuser, 'comment_edit',
1903 data={'comment': comment})
1902 data={'comment': comment})
1904 else:
1903 else:
1905 db_repo = comment.repo
1904 db_repo = comment.repo
1906 commit_id = comment.revision
1905 commit_id = comment.revision
1907 commit = db_repo.get_commit(commit_id)
1906 commit = db_repo.get_commit(commit_id)
1908 CommentsModel().trigger_commit_comment_hook(
1907 CommentsModel().trigger_commit_comment_hook(
1909 db_repo, apiuser, 'edit',
1908 db_repo, apiuser, 'edit',
1910 data={'comment': comment, 'commit': commit})
1909 data={'comment': comment, 'commit': commit})
1911
1910
1912 data = {
1911 data = {
1913 'comment': comment,
1912 'comment': comment,
1914 'version': comment_history.version if comment_history else None,
1913 'version': comment_history.version if comment_history else None,
1915 }
1914 }
1916 return data
1915 return data
1917
1916
1918
1917
1919 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1918 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1920 # @jsonrpc_method()
1919 # @jsonrpc_method()
1921 # def delete_comment(request, apiuser, comment_id):
1920 # def delete_comment(request, apiuser, comment_id):
1922 # auth_user = apiuser
1921 # auth_user = apiuser
1923 #
1922 #
1924 # comment = ChangesetComment.get(comment_id)
1923 # comment = ChangesetComment.get(comment_id)
1925 # if not comment:
1924 # if not comment:
1926 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1925 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1927 #
1926 #
1928 # is_super_admin = has_superadmin_permission(apiuser)
1927 # is_super_admin = has_superadmin_permission(apiuser)
1929 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1928 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1930 # (user=apiuser, repo_name=comment.repo.repo_name)
1929 # (user=apiuser, repo_name=comment.repo.repo_name)
1931 #
1930 #
1932 # comment_author = comment.author.user_id == auth_user.user_id
1931 # comment_author = comment.author.user_id == auth_user.user_id
1933 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1932 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1934 # raise JSONRPCError("you don't have access to edit this comment")
1933 # raise JSONRPCError("you don't have access to edit this comment")
1935
1934
1936 @jsonrpc_method()
1935 @jsonrpc_method()
1937 def grant_user_permission(request, apiuser, repoid, userid, perm):
1936 def grant_user_permission(request, apiuser, repoid, userid, perm):
1938 """
1937 """
1939 Grant permissions for the specified user on the given repository,
1938 Grant permissions for the specified user on the given repository,
1940 or update existing permissions if found.
1939 or update existing permissions if found.
1941
1940
1942 This command can only be run using an |authtoken| with admin
1941 This command can only be run using an |authtoken| with admin
1943 permissions on the |repo|.
1942 permissions on the |repo|.
1944
1943
1945 :param apiuser: This is filled automatically from the |authtoken|.
1944 :param apiuser: This is filled automatically from the |authtoken|.
1946 :type apiuser: AuthUser
1945 :type apiuser: AuthUser
1947 :param repoid: Set the repository name or repository ID.
1946 :param repoid: Set the repository name or repository ID.
1948 :type repoid: str or int
1947 :type repoid: str or int
1949 :param userid: Set the user name.
1948 :param userid: Set the user name.
1950 :type userid: str
1949 :type userid: str
1951 :param perm: Set the user permissions, using the following format
1950 :param perm: Set the user permissions, using the following format
1952 ``(repository.(none|read|write|admin))``
1951 ``(repository.(none|read|write|admin))``
1953 :type perm: str
1952 :type perm: str
1954
1953
1955 Example output:
1954 Example output:
1956
1955
1957 .. code-block:: bash
1956 .. code-block:: bash
1958
1957
1959 id : <id_given_in_input>
1958 id : <id_given_in_input>
1960 result: {
1959 result: {
1961 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1960 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1962 "success": true
1961 "success": true
1963 }
1962 }
1964 error: null
1963 error: null
1965 """
1964 """
1966
1965
1967 repo = get_repo_or_error(repoid)
1966 repo = get_repo_or_error(repoid)
1968 user = get_user_or_error(userid)
1967 user = get_user_or_error(userid)
1969 perm = get_perm_or_error(perm)
1968 perm = get_perm_or_error(perm)
1970 if not has_superadmin_permission(apiuser):
1969 if not has_superadmin_permission(apiuser):
1971 _perms = ('repository.admin',)
1970 _perms = ('repository.admin',)
1972 validate_repo_permissions(apiuser, repoid, repo, _perms)
1971 validate_repo_permissions(apiuser, repoid, repo, _perms)
1973
1972
1974 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1973 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1975 try:
1974 try:
1976 changes = RepoModel().update_permissions(
1975 changes = RepoModel().update_permissions(
1977 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1976 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1978
1977
1979 action_data = {
1978 action_data = {
1980 'added': changes['added'],
1979 'added': changes['added'],
1981 'updated': changes['updated'],
1980 'updated': changes['updated'],
1982 'deleted': changes['deleted'],
1981 'deleted': changes['deleted'],
1983 }
1982 }
1984 audit_logger.store_api(
1983 audit_logger.store_api(
1985 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1984 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1986 Session().commit()
1985 Session().commit()
1987 PermissionModel().flush_user_permission_caches(changes)
1986 PermissionModel().flush_user_permission_caches(changes)
1988
1987
1989 return {
1988 return {
1990 'msg': 'Granted perm: `{}` for user: `{}` in repo: `{}`'.format(
1989 'msg': 'Granted perm: `{}` for user: `{}` in repo: `{}`'.format(
1991 perm.permission_name, user.username, repo.repo_name
1990 perm.permission_name, user.username, repo.repo_name
1992 ),
1991 ),
1993 'success': True
1992 'success': True
1994 }
1993 }
1995 except Exception:
1994 except Exception:
1996 log.exception("Exception occurred while trying edit permissions for repo")
1995 log.exception("Exception occurred while trying edit permissions for repo")
1997 raise JSONRPCError(
1996 raise JSONRPCError(
1998 'failed to edit permission for user: `{}` in repo: `{}`'.format(
1997 'failed to edit permission for user: `{}` in repo: `{}`'.format(
1999 userid, repoid
1998 userid, repoid
2000 )
1999 )
2001 )
2000 )
2002
2001
2003
2002
2004 @jsonrpc_method()
2003 @jsonrpc_method()
2005 def revoke_user_permission(request, apiuser, repoid, userid):
2004 def revoke_user_permission(request, apiuser, repoid, userid):
2006 """
2005 """
2007 Revoke permission for a user on the specified repository.
2006 Revoke permission for a user on the specified repository.
2008
2007
2009 This command can only be run using an |authtoken| with admin
2008 This command can only be run using an |authtoken| with admin
2010 permissions on the |repo|.
2009 permissions on the |repo|.
2011
2010
2012 :param apiuser: This is filled automatically from the |authtoken|.
2011 :param apiuser: This is filled automatically from the |authtoken|.
2013 :type apiuser: AuthUser
2012 :type apiuser: AuthUser
2014 :param repoid: Set the repository name or repository ID.
2013 :param repoid: Set the repository name or repository ID.
2015 :type repoid: str or int
2014 :type repoid: str or int
2016 :param userid: Set the user name of revoked user.
2015 :param userid: Set the user name of revoked user.
2017 :type userid: str or int
2016 :type userid: str or int
2018
2017
2019 Example error output:
2018 Example error output:
2020
2019
2021 .. code-block:: bash
2020 .. code-block:: bash
2022
2021
2023 id : <id_given_in_input>
2022 id : <id_given_in_input>
2024 result: {
2023 result: {
2025 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
2024 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
2026 "success": true
2025 "success": true
2027 }
2026 }
2028 error: null
2027 error: null
2029 """
2028 """
2030
2029
2031 repo = get_repo_or_error(repoid)
2030 repo = get_repo_or_error(repoid)
2032 user = get_user_or_error(userid)
2031 user = get_user_or_error(userid)
2033 if not has_superadmin_permission(apiuser):
2032 if not has_superadmin_permission(apiuser):
2034 _perms = ('repository.admin',)
2033 _perms = ('repository.admin',)
2035 validate_repo_permissions(apiuser, repoid, repo, _perms)
2034 validate_repo_permissions(apiuser, repoid, repo, _perms)
2036
2035
2037 perm_deletions = [[user.user_id, None, "user"]]
2036 perm_deletions = [[user.user_id, None, "user"]]
2038 try:
2037 try:
2039 changes = RepoModel().update_permissions(
2038 changes = RepoModel().update_permissions(
2040 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2039 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2041
2040
2042 action_data = {
2041 action_data = {
2043 'added': changes['added'],
2042 'added': changes['added'],
2044 'updated': changes['updated'],
2043 'updated': changes['updated'],
2045 'deleted': changes['deleted'],
2044 'deleted': changes['deleted'],
2046 }
2045 }
2047 audit_logger.store_api(
2046 audit_logger.store_api(
2048 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2047 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2049 Session().commit()
2048 Session().commit()
2050 PermissionModel().flush_user_permission_caches(changes)
2049 PermissionModel().flush_user_permission_caches(changes)
2051
2050
2052 return {
2051 return {
2053 'msg': 'Revoked perm for user: `{}` in repo: `{}`'.format(
2052 'msg': 'Revoked perm for user: `{}` in repo: `{}`'.format(
2054 user.username, repo.repo_name
2053 user.username, repo.repo_name
2055 ),
2054 ),
2056 'success': True
2055 'success': True
2057 }
2056 }
2058 except Exception:
2057 except Exception:
2059 log.exception("Exception occurred while trying revoke permissions to repo")
2058 log.exception("Exception occurred while trying revoke permissions to repo")
2060 raise JSONRPCError(
2059 raise JSONRPCError(
2061 'failed to edit permission for user: `{}` in repo: `{}`'.format(
2060 'failed to edit permission for user: `{}` in repo: `{}`'.format(
2062 userid, repoid
2061 userid, repoid
2063 )
2062 )
2064 )
2063 )
2065
2064
2066
2065
2067 @jsonrpc_method()
2066 @jsonrpc_method()
2068 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2067 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2069 """
2068 """
2070 Grant permission for a user group on the specified repository,
2069 Grant permission for a user group on the specified repository,
2071 or update existing permissions.
2070 or update existing permissions.
2072
2071
2073 This command can only be run using an |authtoken| with admin
2072 This command can only be run using an |authtoken| with admin
2074 permissions on the |repo|.
2073 permissions on the |repo|.
2075
2074
2076 :param apiuser: This is filled automatically from the |authtoken|.
2075 :param apiuser: This is filled automatically from the |authtoken|.
2077 :type apiuser: AuthUser
2076 :type apiuser: AuthUser
2078 :param repoid: Set the repository name or repository ID.
2077 :param repoid: Set the repository name or repository ID.
2079 :type repoid: str or int
2078 :type repoid: str or int
2080 :param usergroupid: Specify the ID of the user group.
2079 :param usergroupid: Specify the ID of the user group.
2081 :type usergroupid: str or int
2080 :type usergroupid: str or int
2082 :param perm: Set the user group permissions using the following
2081 :param perm: Set the user group permissions using the following
2083 format: (repository.(none|read|write|admin))
2082 format: (repository.(none|read|write|admin))
2084 :type perm: str
2083 :type perm: str
2085
2084
2086 Example output:
2085 Example output:
2087
2086
2088 .. code-block:: bash
2087 .. code-block:: bash
2089
2088
2090 id : <id_given_in_input>
2089 id : <id_given_in_input>
2091 result : {
2090 result : {
2092 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2091 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2093 "success": true
2092 "success": true
2094
2093
2095 }
2094 }
2096 error : null
2095 error : null
2097
2096
2098 Example error output:
2097 Example error output:
2099
2098
2100 .. code-block:: bash
2099 .. code-block:: bash
2101
2100
2102 id : <id_given_in_input>
2101 id : <id_given_in_input>
2103 result : null
2102 result : null
2104 error : {
2103 error : {
2105 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2104 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2106 }
2105 }
2107
2106
2108 """
2107 """
2109
2108
2110 repo = get_repo_or_error(repoid)
2109 repo = get_repo_or_error(repoid)
2111 perm = get_perm_or_error(perm)
2110 perm = get_perm_or_error(perm)
2112 if not has_superadmin_permission(apiuser):
2111 if not has_superadmin_permission(apiuser):
2113 _perms = ('repository.admin',)
2112 _perms = ('repository.admin',)
2114 validate_repo_permissions(apiuser, repoid, repo, _perms)
2113 validate_repo_permissions(apiuser, repoid, repo, _perms)
2115
2114
2116 user_group = get_user_group_or_error(usergroupid)
2115 user_group = get_user_group_or_error(usergroupid)
2117 if not has_superadmin_permission(apiuser):
2116 if not has_superadmin_permission(apiuser):
2118 # check if we have at least read permission for this user group !
2117 # check if we have at least read permission for this user group !
2119 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2118 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2120 if not HasUserGroupPermissionAnyApi(*_perms)(
2119 if not HasUserGroupPermissionAnyApi(*_perms)(
2121 user=apiuser, user_group_name=user_group.users_group_name):
2120 user=apiuser, user_group_name=user_group.users_group_name):
2122 raise JSONRPCError(
2121 raise JSONRPCError(
2123 f'user group `{usergroupid}` does not exist')
2122 f'user group `{usergroupid}` does not exist')
2124
2123
2125 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2124 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2126 try:
2125 try:
2127 changes = RepoModel().update_permissions(
2126 changes = RepoModel().update_permissions(
2128 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2127 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2129 action_data = {
2128 action_data = {
2130 'added': changes['added'],
2129 'added': changes['added'],
2131 'updated': changes['updated'],
2130 'updated': changes['updated'],
2132 'deleted': changes['deleted'],
2131 'deleted': changes['deleted'],
2133 }
2132 }
2134 audit_logger.store_api(
2133 audit_logger.store_api(
2135 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2134 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2136 Session().commit()
2135 Session().commit()
2137 PermissionModel().flush_user_permission_caches(changes)
2136 PermissionModel().flush_user_permission_caches(changes)
2138
2137
2139 return {
2138 return {
2140 'msg': 'Granted perm: `%s` for user group: `%s` in '
2139 'msg': 'Granted perm: `%s` for user group: `%s` in '
2141 'repo: `%s`' % (
2140 'repo: `%s`' % (
2142 perm.permission_name, user_group.users_group_name,
2141 perm.permission_name, user_group.users_group_name,
2143 repo.repo_name
2142 repo.repo_name
2144 ),
2143 ),
2145 'success': True
2144 'success': True
2146 }
2145 }
2147 except Exception:
2146 except Exception:
2148 log.exception(
2147 log.exception(
2149 "Exception occurred while trying change permission on repo")
2148 "Exception occurred while trying change permission on repo")
2150 raise JSONRPCError(
2149 raise JSONRPCError(
2151 'failed to edit permission for user group: `%s` in '
2150 'failed to edit permission for user group: `%s` in '
2152 'repo: `%s`' % (
2151 'repo: `%s`' % (
2153 usergroupid, repo.repo_name
2152 usergroupid, repo.repo_name
2154 )
2153 )
2155 )
2154 )
2156
2155
2157
2156
2158 @jsonrpc_method()
2157 @jsonrpc_method()
2159 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2158 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2160 """
2159 """
2161 Revoke the permissions of a user group on a given repository.
2160 Revoke the permissions of a user group on a given repository.
2162
2161
2163 This command can only be run using an |authtoken| with admin
2162 This command can only be run using an |authtoken| with admin
2164 permissions on the |repo|.
2163 permissions on the |repo|.
2165
2164
2166 :param apiuser: This is filled automatically from the |authtoken|.
2165 :param apiuser: This is filled automatically from the |authtoken|.
2167 :type apiuser: AuthUser
2166 :type apiuser: AuthUser
2168 :param repoid: Set the repository name or repository ID.
2167 :param repoid: Set the repository name or repository ID.
2169 :type repoid: str or int
2168 :type repoid: str or int
2170 :param usergroupid: Specify the user group ID.
2169 :param usergroupid: Specify the user group ID.
2171 :type usergroupid: str or int
2170 :type usergroupid: str or int
2172
2171
2173 Example output:
2172 Example output:
2174
2173
2175 .. code-block:: bash
2174 .. code-block:: bash
2176
2175
2177 id : <id_given_in_input>
2176 id : <id_given_in_input>
2178 result: {
2177 result: {
2179 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2178 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2180 "success": true
2179 "success": true
2181 }
2180 }
2182 error: null
2181 error: null
2183 """
2182 """
2184
2183
2185 repo = get_repo_or_error(repoid)
2184 repo = get_repo_or_error(repoid)
2186 if not has_superadmin_permission(apiuser):
2185 if not has_superadmin_permission(apiuser):
2187 _perms = ('repository.admin',)
2186 _perms = ('repository.admin',)
2188 validate_repo_permissions(apiuser, repoid, repo, _perms)
2187 validate_repo_permissions(apiuser, repoid, repo, _perms)
2189
2188
2190 user_group = get_user_group_or_error(usergroupid)
2189 user_group = get_user_group_or_error(usergroupid)
2191 if not has_superadmin_permission(apiuser):
2190 if not has_superadmin_permission(apiuser):
2192 # check if we have at least read permission for this user group !
2191 # check if we have at least read permission for this user group !
2193 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2192 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2194 if not HasUserGroupPermissionAnyApi(*_perms)(
2193 if not HasUserGroupPermissionAnyApi(*_perms)(
2195 user=apiuser, user_group_name=user_group.users_group_name):
2194 user=apiuser, user_group_name=user_group.users_group_name):
2196 raise JSONRPCError(
2195 raise JSONRPCError(
2197 f'user group `{usergroupid}` does not exist')
2196 f'user group `{usergroupid}` does not exist')
2198
2197
2199 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2198 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2200 try:
2199 try:
2201 changes = RepoModel().update_permissions(
2200 changes = RepoModel().update_permissions(
2202 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2201 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2203 action_data = {
2202 action_data = {
2204 'added': changes['added'],
2203 'added': changes['added'],
2205 'updated': changes['updated'],
2204 'updated': changes['updated'],
2206 'deleted': changes['deleted'],
2205 'deleted': changes['deleted'],
2207 }
2206 }
2208 audit_logger.store_api(
2207 audit_logger.store_api(
2209 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2208 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2210 Session().commit()
2209 Session().commit()
2211 PermissionModel().flush_user_permission_caches(changes)
2210 PermissionModel().flush_user_permission_caches(changes)
2212
2211
2213 return {
2212 return {
2214 'msg': 'Revoked perm for user group: `{}` in repo: `{}`'.format(
2213 'msg': 'Revoked perm for user group: `{}` in repo: `{}`'.format(
2215 user_group.users_group_name, repo.repo_name
2214 user_group.users_group_name, repo.repo_name
2216 ),
2215 ),
2217 'success': True
2216 'success': True
2218 }
2217 }
2219 except Exception:
2218 except Exception:
2220 log.exception("Exception occurred while trying revoke "
2219 log.exception("Exception occurred while trying revoke "
2221 "user group permission on repo")
2220 "user group permission on repo")
2222 raise JSONRPCError(
2221 raise JSONRPCError(
2223 'failed to edit permission for user group: `%s` in '
2222 'failed to edit permission for user group: `%s` in '
2224 'repo: `%s`' % (
2223 'repo: `%s`' % (
2225 user_group.users_group_name, repo.repo_name
2224 user_group.users_group_name, repo.repo_name
2226 )
2225 )
2227 )
2226 )
2228
2227
2229
2228
2230 @jsonrpc_method()
2229 @jsonrpc_method()
2231 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2230 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2232 """
2231 """
2233 Triggers a pull on the given repository from a remote location. You
2232 Triggers a pull on the given repository from a remote location. You
2234 can use this to keep remote repositories up-to-date.
2233 can use this to keep remote repositories up-to-date.
2235
2234
2236 This command can only be run using an |authtoken| with admin
2235 This command can only be run using an |authtoken| with admin
2237 rights to the specified repository. For more information,
2236 rights to the specified repository. For more information,
2238 see :ref:`config-token-ref`.
2237 see :ref:`config-token-ref`.
2239
2238
2240 This command takes the following options:
2239 This command takes the following options:
2241
2240
2242 :param apiuser: This is filled automatically from the |authtoken|.
2241 :param apiuser: This is filled automatically from the |authtoken|.
2243 :type apiuser: AuthUser
2242 :type apiuser: AuthUser
2244 :param repoid: The repository name or repository ID.
2243 :param repoid: The repository name or repository ID.
2245 :type repoid: str or int
2244 :type repoid: str or int
2246 :param remote_uri: Optional remote URI to pass in for pull
2245 :param remote_uri: Optional remote URI to pass in for pull
2247 :type remote_uri: str
2246 :type remote_uri: str
2248
2247
2249 Example output:
2248 Example output:
2250
2249
2251 .. code-block:: bash
2250 .. code-block:: bash
2252
2251
2253 id : <id_given_in_input>
2252 id : <id_given_in_input>
2254 result : {
2253 result : {
2255 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2254 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2256 "repository": "<repository name>"
2255 "repository": "<repository name>"
2257 }
2256 }
2258 error : null
2257 error : null
2259
2258
2260 Example error output:
2259 Example error output:
2261
2260
2262 .. code-block:: bash
2261 .. code-block:: bash
2263
2262
2264 id : <id_given_in_input>
2263 id : <id_given_in_input>
2265 result : null
2264 result : null
2266 error : {
2265 error : {
2267 "Unable to push changes from `<remote_url>`"
2266 "Unable to push changes from `<remote_url>`"
2268 }
2267 }
2269
2268
2270 """
2269 """
2271
2270
2272 repo = get_repo_or_error(repoid)
2271 repo = get_repo_or_error(repoid)
2273 remote_uri = Optional.extract(remote_uri)
2272 remote_uri = Optional.extract(remote_uri)
2274 remote_uri_display = remote_uri or repo.clone_uri_hidden
2273 remote_uri_display = remote_uri or repo.clone_uri_hidden
2275 if not has_superadmin_permission(apiuser):
2274 if not has_superadmin_permission(apiuser):
2276 _perms = ('repository.admin',)
2275 _perms = ('repository.admin',)
2277 validate_repo_permissions(apiuser, repoid, repo, _perms)
2276 validate_repo_permissions(apiuser, repoid, repo, _perms)
2278
2277
2279 try:
2278 try:
2280 ScmModel().pull_changes(
2279 ScmModel().pull_changes(
2281 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2280 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2282 return {
2281 return {
2283 'msg': 'Pulled from url `{}` on repo `{}`'.format(
2282 'msg': 'Pulled from url `{}` on repo `{}`'.format(
2284 remote_uri_display, repo.repo_name),
2283 remote_uri_display, repo.repo_name),
2285 'repository': repo.repo_name
2284 'repository': repo.repo_name
2286 }
2285 }
2287 except Exception:
2286 except Exception:
2288 log.exception("Exception occurred while trying to "
2287 log.exception("Exception occurred while trying to "
2289 "pull changes from remote location")
2288 "pull changes from remote location")
2290 raise JSONRPCError(
2289 raise JSONRPCError(
2291 'Unable to pull changes from `%s`' % remote_uri_display
2290 'Unable to pull changes from `%s`' % remote_uri_display
2292 )
2291 )
2293
2292
2294
2293
2295 @jsonrpc_method()
2294 @jsonrpc_method()
2296 def strip(request, apiuser, repoid, revision, branch):
2295 def strip(request, apiuser, repoid, revision, branch):
2297 """
2296 """
2298 Strips the given revision from the specified repository.
2297 Strips the given revision from the specified repository.
2299
2298
2300 * This will remove the revision and all of its decendants.
2299 * This will remove the revision and all of its decendants.
2301
2300
2302 This command can only be run using an |authtoken| with admin rights to
2301 This command can only be run using an |authtoken| with admin rights to
2303 the specified repository.
2302 the specified repository.
2304
2303
2305 This command takes the following options:
2304 This command takes the following options:
2306
2305
2307 :param apiuser: This is filled automatically from the |authtoken|.
2306 :param apiuser: This is filled automatically from the |authtoken|.
2308 :type apiuser: AuthUser
2307 :type apiuser: AuthUser
2309 :param repoid: The repository name or repository ID.
2308 :param repoid: The repository name or repository ID.
2310 :type repoid: str or int
2309 :type repoid: str or int
2311 :param revision: The revision you wish to strip.
2310 :param revision: The revision you wish to strip.
2312 :type revision: str
2311 :type revision: str
2313 :param branch: The branch from which to strip the revision.
2312 :param branch: The branch from which to strip the revision.
2314 :type branch: str
2313 :type branch: str
2315
2314
2316 Example output:
2315 Example output:
2317
2316
2318 .. code-block:: bash
2317 .. code-block:: bash
2319
2318
2320 id : <id_given_in_input>
2319 id : <id_given_in_input>
2321 result : {
2320 result : {
2322 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2321 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2323 "repository": "<repository name>"
2322 "repository": "<repository name>"
2324 }
2323 }
2325 error : null
2324 error : null
2326
2325
2327 Example error output:
2326 Example error output:
2328
2327
2329 .. code-block:: bash
2328 .. code-block:: bash
2330
2329
2331 id : <id_given_in_input>
2330 id : <id_given_in_input>
2332 result : null
2331 result : null
2333 error : {
2332 error : {
2334 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2333 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2335 }
2334 }
2336
2335
2337 """
2336 """
2338
2337
2339 repo = get_repo_or_error(repoid)
2338 repo = get_repo_or_error(repoid)
2340 if not has_superadmin_permission(apiuser):
2339 if not has_superadmin_permission(apiuser):
2341 _perms = ('repository.admin',)
2340 _perms = ('repository.admin',)
2342 validate_repo_permissions(apiuser, repoid, repo, _perms)
2341 validate_repo_permissions(apiuser, repoid, repo, _perms)
2343
2342
2344 try:
2343 try:
2345 ScmModel().strip(repo, revision, branch)
2344 ScmModel().strip(repo, revision, branch)
2346 audit_logger.store_api(
2345 audit_logger.store_api(
2347 'repo.commit.strip', action_data={'commit_id': revision},
2346 'repo.commit.strip', action_data={'commit_id': revision},
2348 repo=repo,
2347 repo=repo,
2349 user=apiuser, commit=True)
2348 user=apiuser, commit=True)
2350
2349
2351 return {
2350 return {
2352 'msg': 'Stripped commit {} from repo `{}`'.format(
2351 'msg': 'Stripped commit {} from repo `{}`'.format(
2353 revision, repo.repo_name),
2352 revision, repo.repo_name),
2354 'repository': repo.repo_name
2353 'repository': repo.repo_name
2355 }
2354 }
2356 except Exception:
2355 except Exception:
2357 log.exception("Exception while trying to strip")
2356 log.exception("Exception while trying to strip")
2358 raise JSONRPCError(
2357 raise JSONRPCError(
2359 'Unable to strip commit {} from repo `{}`'.format(
2358 'Unable to strip commit {} from repo `{}`'.format(
2360 revision, repo.repo_name)
2359 revision, repo.repo_name)
2361 )
2360 )
2362
2361
2363
2362
2364 @jsonrpc_method()
2363 @jsonrpc_method()
2365 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2364 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2366 """
2365 """
2367 Returns all settings for a repository. If key is given it only returns the
2366 Returns all settings for a repository. If key is given it only returns the
2368 setting identified by the key or null.
2367 setting identified by the key or null.
2369
2368
2370 :param apiuser: This is filled automatically from the |authtoken|.
2369 :param apiuser: This is filled automatically from the |authtoken|.
2371 :type apiuser: AuthUser
2370 :type apiuser: AuthUser
2372 :param repoid: The repository name or repository id.
2371 :param repoid: The repository name or repository id.
2373 :type repoid: str or int
2372 :type repoid: str or int
2374 :param key: Key of the setting to return.
2373 :param key: Key of the setting to return.
2375 :type: key: Optional(str)
2374 :type: key: Optional(str)
2376
2375
2377 Example output:
2376 Example output:
2378
2377
2379 .. code-block:: bash
2378 .. code-block:: bash
2380
2379
2381 {
2380 {
2382 "error": null,
2381 "error": null,
2383 "id": 237,
2382 "id": 237,
2384 "result": {
2383 "result": {
2385 "extensions_largefiles": true,
2384 "extensions_largefiles": true,
2386 "extensions_evolve": true,
2385 "extensions_evolve": true,
2387 "hooks_changegroup_push_logger": true,
2386 "hooks_changegroup_push_logger": true,
2388 "hooks_changegroup_repo_size": false,
2387 "hooks_changegroup_repo_size": false,
2389 "hooks_outgoing_pull_logger": true,
2388 "hooks_outgoing_pull_logger": true,
2390 "phases_publish": "True",
2389 "phases_publish": "True",
2391 "rhodecode_hg_use_rebase_for_merging": true,
2390 "rhodecode_hg_use_rebase_for_merging": true,
2392 "rhodecode_pr_merge_enabled": true,
2391 "rhodecode_pr_merge_enabled": true,
2393 "rhodecode_use_outdated_comments": true
2392 "rhodecode_use_outdated_comments": true
2394 }
2393 }
2395 }
2394 }
2396 """
2395 """
2397
2396
2398 # Restrict access to this api method to super-admins, and repo admins only.
2397 # Restrict access to this api method to super-admins, and repo admins only.
2399 repo = get_repo_or_error(repoid)
2398 repo = get_repo_or_error(repoid)
2400 if not has_superadmin_permission(apiuser):
2399 if not has_superadmin_permission(apiuser):
2401 _perms = ('repository.admin',)
2400 _perms = ('repository.admin',)
2402 validate_repo_permissions(apiuser, repoid, repo, _perms)
2401 validate_repo_permissions(apiuser, repoid, repo, _perms)
2403
2402
2404 try:
2403 try:
2405 settings_model = VcsSettingsModel(repo=repo)
2404 settings_model = VcsSettingsModel(repo=repo)
2406 settings = settings_model.get_global_settings()
2405 settings = settings_model.get_global_settings()
2407 settings.update(settings_model.get_repo_settings())
2406 settings.update(settings_model.get_repo_settings())
2408
2407
2409 # If only a single setting is requested fetch it from all settings.
2408 # If only a single setting is requested fetch it from all settings.
2410 key = Optional.extract(key)
2409 key = Optional.extract(key)
2411 if key is not None:
2410 if key is not None:
2412 settings = settings.get(key, None)
2411 settings = settings.get(key, None)
2413 except Exception:
2412 except Exception:
2414 msg = f'Failed to fetch settings for repository `{repoid}`'
2413 msg = f'Failed to fetch settings for repository `{repoid}`'
2415 log.exception(msg)
2414 log.exception(msg)
2416 raise JSONRPCError(msg)
2415 raise JSONRPCError(msg)
2417
2416
2418 return settings
2417 return settings
2419
2418
2420
2419
2421 @jsonrpc_method()
2420 @jsonrpc_method()
2422 def set_repo_settings(request, apiuser, repoid, settings):
2421 def set_repo_settings(request, apiuser, repoid, settings):
2423 """
2422 """
2424 Update repository settings. Returns true on success.
2423 Update repository settings. Returns true on success.
2425
2424
2426 :param apiuser: This is filled automatically from the |authtoken|.
2425 :param apiuser: This is filled automatically from the |authtoken|.
2427 :type apiuser: AuthUser
2426 :type apiuser: AuthUser
2428 :param repoid: The repository name or repository id.
2427 :param repoid: The repository name or repository id.
2429 :type repoid: str or int
2428 :type repoid: str or int
2430 :param settings: The new settings for the repository.
2429 :param settings: The new settings for the repository.
2431 :type: settings: dict
2430 :type: settings: dict
2432
2431
2433 Example output:
2432 Example output:
2434
2433
2435 .. code-block:: bash
2434 .. code-block:: bash
2436
2435
2437 {
2436 {
2438 "error": null,
2437 "error": null,
2439 "id": 237,
2438 "id": 237,
2440 "result": true
2439 "result": true
2441 }
2440 }
2442 """
2441 """
2443 # Restrict access to this api method to super-admins, and repo admins only.
2442 # Restrict access to this api method to super-admins, and repo admins only.
2444 repo = get_repo_or_error(repoid)
2443 repo = get_repo_or_error(repoid)
2445 if not has_superadmin_permission(apiuser):
2444 if not has_superadmin_permission(apiuser):
2446 _perms = ('repository.admin',)
2445 _perms = ('repository.admin',)
2447 validate_repo_permissions(apiuser, repoid, repo, _perms)
2446 validate_repo_permissions(apiuser, repoid, repo, _perms)
2448
2447
2449 if type(settings) is not dict:
2448 if type(settings) is not dict:
2450 raise JSONRPCError('Settings have to be a JSON Object.')
2449 raise JSONRPCError('Settings have to be a JSON Object.')
2451
2450
2452 try:
2451 try:
2453 settings_model = VcsSettingsModel(repo=repoid)
2452 settings_model = VcsSettingsModel(repo=repoid)
2454
2453
2455 # Merge global, repo and incoming settings.
2454 # Merge global, repo and incoming settings.
2456 new_settings = settings_model.get_global_settings()
2455 new_settings = settings_model.get_global_settings()
2457 new_settings.update(settings_model.get_repo_settings())
2456 new_settings.update(settings_model.get_repo_settings())
2458 new_settings.update(settings)
2457 new_settings.update(settings)
2459
2458
2460 # Update the settings.
2459 # Update the settings.
2461 inherit_global_settings = new_settings.get(
2460 inherit_global_settings = new_settings.get(
2462 'inherit_global_settings', False)
2461 'inherit_global_settings', False)
2463 settings_model.create_or_update_repo_settings(
2462 settings_model.create_or_update_repo_settings(
2464 new_settings, inherit_global_settings=inherit_global_settings)
2463 new_settings, inherit_global_settings=inherit_global_settings)
2465 Session().commit()
2464 Session().commit()
2466 except Exception:
2465 except Exception:
2467 msg = f'Failed to update settings for repository `{repoid}`'
2466 msg = f'Failed to update settings for repository `{repoid}`'
2468 log.exception(msg)
2467 log.exception(msg)
2469 raise JSONRPCError(msg)
2468 raise JSONRPCError(msg)
2470
2469
2471 # Indicate success.
2470 # Indicate success.
2472 return True
2471 return True
2473
2472
2474
2473
2475 @jsonrpc_method()
2474 @jsonrpc_method()
2476 def maintenance(request, apiuser, repoid):
2475 def maintenance(request, apiuser, repoid):
2477 """
2476 """
2478 Triggers a maintenance on the given repository.
2477 Triggers a maintenance on the given repository.
2479
2478
2480 This command can only be run using an |authtoken| with admin
2479 This command can only be run using an |authtoken| with admin
2481 rights to the specified repository. For more information,
2480 rights to the specified repository. For more information,
2482 see :ref:`config-token-ref`.
2481 see :ref:`config-token-ref`.
2483
2482
2484 This command takes the following options:
2483 This command takes the following options:
2485
2484
2486 :param apiuser: This is filled automatically from the |authtoken|.
2485 :param apiuser: This is filled automatically from the |authtoken|.
2487 :type apiuser: AuthUser
2486 :type apiuser: AuthUser
2488 :param repoid: The repository name or repository ID.
2487 :param repoid: The repository name or repository ID.
2489 :type repoid: str or int
2488 :type repoid: str or int
2490
2489
2491 Example output:
2490 Example output:
2492
2491
2493 .. code-block:: bash
2492 .. code-block:: bash
2494
2493
2495 id : <id_given_in_input>
2494 id : <id_given_in_input>
2496 result : {
2495 result : {
2497 "msg": "executed maintenance command",
2496 "msg": "executed maintenance command",
2498 "executed_actions": [
2497 "executed_actions": [
2499 <action_message>, <action_message2>...
2498 <action_message>, <action_message2>...
2500 ],
2499 ],
2501 "repository": "<repository name>"
2500 "repository": "<repository name>"
2502 }
2501 }
2503 error : null
2502 error : null
2504
2503
2505 Example error output:
2504 Example error output:
2506
2505
2507 .. code-block:: bash
2506 .. code-block:: bash
2508
2507
2509 id : <id_given_in_input>
2508 id : <id_given_in_input>
2510 result : null
2509 result : null
2511 error : {
2510 error : {
2512 "Unable to execute maintenance on `<reponame>`"
2511 "Unable to execute maintenance on `<reponame>`"
2513 }
2512 }
2514
2513
2515 """
2514 """
2516
2515
2517 repo = get_repo_or_error(repoid)
2516 repo = get_repo_or_error(repoid)
2518 if not has_superadmin_permission(apiuser):
2517 if not has_superadmin_permission(apiuser):
2519 _perms = ('repository.admin',)
2518 _perms = ('repository.admin',)
2520 validate_repo_permissions(apiuser, repoid, repo, _perms)
2519 validate_repo_permissions(apiuser, repoid, repo, _perms)
2521
2520
2522 try:
2521 try:
2523 maintenance = repo_maintenance.RepoMaintenance()
2522 maintenance = repo_maintenance.RepoMaintenance()
2524 executed_actions = maintenance.execute(repo)
2523 executed_actions = maintenance.execute(repo)
2525
2524
2526 return {
2525 return {
2527 'msg': 'executed maintenance command',
2526 'msg': 'executed maintenance command',
2528 'executed_actions': executed_actions,
2527 'executed_actions': executed_actions,
2529 'repository': repo.repo_name
2528 'repository': repo.repo_name
2530 }
2529 }
2531 except Exception:
2530 except Exception:
2532 log.exception("Exception occurred while trying to run maintenance")
2531 log.exception("Exception occurred while trying to run maintenance")
2533 raise JSONRPCError(
2532 raise JSONRPCError(
2534 'Unable to execute maintenance on `%s`' % repo.repo_name)
2533 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,134 +1,132 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import pytest
20 import pytest
21 import mock
21 import mock
22
22
23 from rhodecode.apps._base import ADMIN_PREFIX
24 from rhodecode.lib import helpers as h
25 from rhodecode.lib.auth import check_password
23 from rhodecode.lib.auth import check_password
26 from rhodecode.model.meta import Session
24 from rhodecode.model.meta import Session
27 from rhodecode.model.user import UserModel
25 from rhodecode.model.user import UserModel
28 from rhodecode.tests import assert_session_flash
26 from rhodecode.tests import assert_session_flash, TestController
29 from rhodecode.tests.fixture import Fixture, TestController, error_function
27 from rhodecode.tests.fixture import Fixture, error_function
30 from rhodecode.tests.routes import route_path
28 from rhodecode.tests.routes import route_path
31
29
32 fixture = Fixture()
30 fixture = Fixture()
33
31
34
32
35 test_user_1 = 'testme'
33 test_user_1 = 'testme'
36 test_user_1_password = '0jd83nHNS/d23n'
34 test_user_1_password = '0jd83nHNS/d23n'
37
35
38
36
39 class TestMyAccountPassword(TestController):
37 class TestMyAccountPassword(TestController):
40 def test_valid_change_password(self, user_util):
38 def test_valid_change_password(self, user_util):
41 new_password = 'my_new_valid_password'
39 new_password = 'my_new_valid_password'
42 user = user_util.create_user(password=test_user_1_password)
40 user = user_util.create_user(password=test_user_1_password)
43 self.log_user(user.username, test_user_1_password)
41 self.log_user(user.username, test_user_1_password)
44
42
45 form_data = [
43 form_data = [
46 ('current_password', test_user_1_password),
44 ('current_password', test_user_1_password),
47 ('__start__', 'new_password:mapping'),
45 ('__start__', 'new_password:mapping'),
48 ('new_password', new_password),
46 ('new_password', new_password),
49 ('new_password-confirm', new_password),
47 ('new_password-confirm', new_password),
50 ('__end__', 'new_password:mapping'),
48 ('__end__', 'new_password:mapping'),
51 ('csrf_token', self.csrf_token),
49 ('csrf_token', self.csrf_token),
52 ]
50 ]
53 response = self.app.post(
51 response = self.app.post(
54 route_path('my_account_password_update'), form_data).follow()
52 route_path('my_account_password_update'), form_data).follow()
55 assert 'Successfully updated password' in response
53 assert 'Successfully updated password' in response
56
54
57 # check_password depends on user being in session
55 # check_password depends on user being in session
58 Session().add(user)
56 Session().add(user)
59 try:
57 try:
60 assert check_password(new_password, user.password)
58 assert check_password(new_password, user.password)
61 finally:
59 finally:
62 Session().expunge(user)
60 Session().expunge(user)
63
61
64 @pytest.mark.parametrize('current_pw, new_pw, confirm_pw', [
62 @pytest.mark.parametrize('current_pw, new_pw, confirm_pw', [
65 ('', 'abcdef123', 'abcdef123'),
63 ('', 'abcdef123', 'abcdef123'),
66 ('wrong_pw', 'abcdef123', 'abcdef123'),
64 ('wrong_pw', 'abcdef123', 'abcdef123'),
67 (test_user_1_password, test_user_1_password, test_user_1_password),
65 (test_user_1_password, test_user_1_password, test_user_1_password),
68 (test_user_1_password, '', ''),
66 (test_user_1_password, '', ''),
69 (test_user_1_password, 'abcdef123', ''),
67 (test_user_1_password, 'abcdef123', ''),
70 (test_user_1_password, '', 'abcdef123'),
68 (test_user_1_password, '', 'abcdef123'),
71 (test_user_1_password, 'not_the', 'same_pw'),
69 (test_user_1_password, 'not_the', 'same_pw'),
72 (test_user_1_password, 'short', 'short'),
70 (test_user_1_password, 'short', 'short'),
73 ])
71 ])
74 def test_invalid_change_password(self, current_pw, new_pw, confirm_pw,
72 def test_invalid_change_password(self, current_pw, new_pw, confirm_pw,
75 user_util):
73 user_util):
76 user = user_util.create_user(password=test_user_1_password)
74 user = user_util.create_user(password=test_user_1_password)
77 self.log_user(user.username, test_user_1_password)
75 self.log_user(user.username, test_user_1_password)
78
76
79 form_data = [
77 form_data = [
80 ('current_password', current_pw),
78 ('current_password', current_pw),
81 ('__start__', 'new_password:mapping'),
79 ('__start__', 'new_password:mapping'),
82 ('new_password', new_pw),
80 ('new_password', new_pw),
83 ('new_password-confirm', confirm_pw),
81 ('new_password-confirm', confirm_pw),
84 ('__end__', 'new_password:mapping'),
82 ('__end__', 'new_password:mapping'),
85 ('csrf_token', self.csrf_token),
83 ('csrf_token', self.csrf_token),
86 ]
84 ]
87 response = self.app.post(
85 response = self.app.post(
88 route_path('my_account_password_update'), form_data)
86 route_path('my_account_password_update'), form_data)
89
87
90 assert_response = response.assert_response()
88 assert_response = response.assert_response()
91 assert assert_response.get_elements('.error-block')
89 assert assert_response.get_elements('.error-block')
92
90
93 @mock.patch.object(UserModel, 'update_user', error_function)
91 @mock.patch.object(UserModel, 'update_user', error_function)
94 def test_invalid_change_password_exception(self, user_util):
92 def test_invalid_change_password_exception(self, user_util):
95 user = user_util.create_user(password=test_user_1_password)
93 user = user_util.create_user(password=test_user_1_password)
96 self.log_user(user.username, test_user_1_password)
94 self.log_user(user.username, test_user_1_password)
97
95
98 form_data = [
96 form_data = [
99 ('current_password', test_user_1_password),
97 ('current_password', test_user_1_password),
100 ('__start__', 'new_password:mapping'),
98 ('__start__', 'new_password:mapping'),
101 ('new_password', '123456'),
99 ('new_password', '123456'),
102 ('new_password-confirm', '123456'),
100 ('new_password-confirm', '123456'),
103 ('__end__', 'new_password:mapping'),
101 ('__end__', 'new_password:mapping'),
104 ('csrf_token', self.csrf_token),
102 ('csrf_token', self.csrf_token),
105 ]
103 ]
106 response = self.app.post(
104 response = self.app.post(
107 route_path('my_account_password_update'), form_data)
105 route_path('my_account_password_update'), form_data)
108 assert_session_flash(
106 assert_session_flash(
109 response, 'Error occurred during update of user password')
107 response, 'Error occurred during update of user password')
110
108
111 def test_password_is_updated_in_session_on_password_change(self, user_util):
109 def test_password_is_updated_in_session_on_password_change(self, user_util):
112 old_password = 'abcdef123'
110 old_password = 'abcdef123'
113 new_password = 'abcdef124'
111 new_password = 'abcdef124'
114
112
115 user = user_util.create_user(password=old_password)
113 user = user_util.create_user(password=old_password)
116 session = self.log_user(user.username, old_password)
114 session = self.log_user(user.username, old_password)
117 old_password_hash = session['password']
115 old_password_hash = session['password']
118
116
119 form_data = [
117 form_data = [
120 ('current_password', old_password),
118 ('current_password', old_password),
121 ('__start__', 'new_password:mapping'),
119 ('__start__', 'new_password:mapping'),
122 ('new_password', new_password),
120 ('new_password', new_password),
123 ('new_password-confirm', new_password),
121 ('new_password-confirm', new_password),
124 ('__end__', 'new_password:mapping'),
122 ('__end__', 'new_password:mapping'),
125 ('csrf_token', self.csrf_token),
123 ('csrf_token', self.csrf_token),
126 ]
124 ]
127 self.app.post(
125 self.app.post(
128 route_path('my_account_password_update'), form_data)
126 route_path('my_account_password_update'), form_data)
129
127
130 response = self.app.get(route_path('home'))
128 response = self.app.get(route_path('home'))
131 session = response.get_session_from_response()
129 session = response.get_session_from_response()
132 new_password_hash = session['rhodecode_user']['password']
130 new_password_hash = session['rhodecode_user']['password']
133
131
134 assert old_password_hash != new_password_hash No newline at end of file
132 assert old_password_hash != new_password_hash
@@ -1,847 +1,845 b''
1 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 import os
20 import os
21 import sys
21 import sys
22 import time
22 import time
23 import platform
23 import platform
24 import collections
24 import collections
25 import psutil
25 import psutil
26 from functools import wraps
26 from functools import wraps
27
27
28 import pkg_resources
28 import pkg_resources
29 import logging
29 import logging
30 import resource
30 import resource
31
31
32 import configparser
32 import configparser
33
33
34 from rc_license.models import LicenseModel
34 from rc_license.models import LicenseModel
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 _NA = 'NOT AVAILABLE'
40 _NA = 'NOT AVAILABLE'
41 _NA_FLOAT = 0.0
41 _NA_FLOAT = 0.0
42
42
43 STATE_OK = 'ok'
43 STATE_OK = 'ok'
44 STATE_ERR = 'error'
44 STATE_ERR = 'error'
45 STATE_WARN = 'warning'
45 STATE_WARN = 'warning'
46
46
47 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
47 STATE_OK_DEFAULT = {'message': '', 'type': STATE_OK}
48
48
49
49
50 registered_helpers = {}
50 registered_helpers = {}
51
51
52
52
53 def register_sysinfo(func):
53 def register_sysinfo(func):
54 """
54 """
55 @register_helper
55 @register_helper
56 def db_check():
56 def db_check():
57 pass
57 pass
58
58
59 db_check == registered_helpers['db_check']
59 db_check == registered_helpers['db_check']
60 """
60 """
61 global registered_helpers
61 global registered_helpers
62 registered_helpers[func.__name__] = func
62 registered_helpers[func.__name__] = func
63
63
64 @wraps(func)
64 @wraps(func)
65 def _wrapper(*args, **kwargs):
65 def _wrapper(*args, **kwargs):
66 return func(*args, **kwargs)
66 return func(*args, **kwargs)
67 return _wrapper
67 return _wrapper
68
68
69
69
70 # HELPERS
70 # HELPERS
71 def percentage(part: (int, float), whole: (int, float)):
71 def percentage(part: (int, float), whole: (int, float)):
72 whole = float(whole)
72 whole = float(whole)
73 if whole > 0:
73 if whole > 0:
74 return round(100 * float(part) / whole, 1)
74 return round(100 * float(part) / whole, 1)
75 return 0.0
75 return 0.0
76
76
77
77
78 def get_storage_size(storage_path):
78 def get_storage_size(storage_path):
79 sizes = []
79 sizes = []
80 for file_ in os.listdir(storage_path):
80 for file_ in os.listdir(storage_path):
81 storage_file = os.path.join(storage_path, file_)
81 storage_file = os.path.join(storage_path, file_)
82 if os.path.isfile(storage_file):
82 if os.path.isfile(storage_file):
83 try:
83 try:
84 sizes.append(os.path.getsize(storage_file))
84 sizes.append(os.path.getsize(storage_file))
85 except OSError:
85 except OSError:
86 log.exception('Failed to get size of storage file %s', storage_file)
86 log.exception('Failed to get size of storage file %s', storage_file)
87 pass
87 pass
88
88
89 return sum(sizes)
89 return sum(sizes)
90
90
91
91
92 def get_resource(resource_type):
92 def get_resource(resource_type):
93 try:
93 try:
94 return resource.getrlimit(resource_type)
94 return resource.getrlimit(resource_type)
95 except Exception:
95 except Exception:
96 return 'NOT_SUPPORTED'
96 return 'NOT_SUPPORTED'
97
97
98
98
99 def get_cert_path(ini_path):
99 def get_cert_path(ini_path):
100 default = '/etc/ssl/certs/ca-certificates.crt'
100 default = '/etc/ssl/certs/ca-certificates.crt'
101 control_ca_bundle = os.path.join(
101 control_ca_bundle = os.path.join(
102 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
102 os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(ini_path)))),
103 '.rccontrol-profile/etc/ca-bundle.crt')
103 '.rccontrol-profile/etc/ca-bundle.crt')
104 if os.path.isfile(control_ca_bundle):
104 if os.path.isfile(control_ca_bundle):
105 default = control_ca_bundle
105 default = control_ca_bundle
106
106
107 return default
107 return default
108
108
109
109
110 class SysInfoRes(object):
110 class SysInfoRes(object):
111 def __init__(self, value, state=None, human_value=None):
111 def __init__(self, value, state=None, human_value=None):
112 self.value = value
112 self.value = value
113 self.state = state or STATE_OK_DEFAULT
113 self.state = state or STATE_OK_DEFAULT
114 self.human_value = human_value or value
114 self.human_value = human_value or value
115
115
116 def __json__(self):
116 def __json__(self):
117 return {
117 return {
118 'value': self.value,
118 'value': self.value,
119 'state': self.state,
119 'state': self.state,
120 'human_value': self.human_value,
120 'human_value': self.human_value,
121 }
121 }
122
122
123 def get_value(self):
123 def get_value(self):
124 return self.__json__()
124 return self.__json__()
125
125
126 def __str__(self):
126 def __str__(self):
127 return f'<SysInfoRes({self.__json__()})>'
127 return f'<SysInfoRes({self.__json__()})>'
128
128
129
129
130 class SysInfo(object):
130 class SysInfo(object):
131
131
132 def __init__(self, func_name, **kwargs):
132 def __init__(self, func_name, **kwargs):
133 self.function_name = func_name
133 self.function_name = func_name
134 self.value = _NA
134 self.value = _NA
135 self.state = None
135 self.state = None
136 self.kwargs = kwargs or {}
136 self.kwargs = kwargs or {}
137
137
138 def __call__(self):
138 def __call__(self):
139 computed = self.compute(**self.kwargs)
139 computed = self.compute(**self.kwargs)
140 if not isinstance(computed, SysInfoRes):
140 if not isinstance(computed, SysInfoRes):
141 raise ValueError(
141 raise ValueError(
142 'computed value for {} is not instance of '
142 'computed value for {} is not instance of '
143 '{}, got {} instead'.format(
143 '{}, got {} instead'.format(
144 self.function_name, SysInfoRes, type(computed)))
144 self.function_name, SysInfoRes, type(computed)))
145 return computed.__json__()
145 return computed.__json__()
146
146
147 def __str__(self):
147 def __str__(self):
148 return f'<SysInfo({self.function_name})>'
148 return f'<SysInfo({self.function_name})>'
149
149
150 def compute(self, **kwargs):
150 def compute(self, **kwargs):
151 return self.function_name(**kwargs)
151 return self.function_name(**kwargs)
152
152
153
153
154 # SysInfo functions
154 # SysInfo functions
155 @register_sysinfo
155 @register_sysinfo
156 def python_info():
156 def python_info():
157 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
157 value = dict(version=f'{platform.python_version()}:{platform.python_implementation()}',
158 executable=sys.executable)
158 executable=sys.executable)
159 return SysInfoRes(value=value)
159 return SysInfoRes(value=value)
160
160
161
161
162 @register_sysinfo
162 @register_sysinfo
163 def py_modules():
163 def py_modules():
164 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
164 mods = dict([(p.project_name, {'version': p.version, 'location': p.location})
165 for p in pkg_resources.working_set])
165 for p in pkg_resources.working_set])
166
166
167 value = sorted(mods.items(), key=lambda k: k[0].lower())
167 value = sorted(mods.items(), key=lambda k: k[0].lower())
168 return SysInfoRes(value=value)
168 return SysInfoRes(value=value)
169
169
170
170
171 @register_sysinfo
171 @register_sysinfo
172 def platform_type():
172 def platform_type():
173 from rhodecode.lib.utils import generate_platform_uuid
173 from rhodecode.lib.utils import generate_platform_uuid
174
174
175 value = dict(
175 value = dict(
176 name=safe_str(platform.platform()),
176 name=safe_str(platform.platform()),
177 uuid=generate_platform_uuid()
177 uuid=generate_platform_uuid()
178 )
178 )
179 return SysInfoRes(value=value)
179 return SysInfoRes(value=value)
180
180
181
181
182 @register_sysinfo
182 @register_sysinfo
183 def locale_info():
183 def locale_info():
184 import locale
184 import locale
185
185
186 def safe_get_locale(locale_name):
186 def safe_get_locale(locale_name):
187 try:
187 try:
188 locale.getlocale(locale_name)
188 locale.getlocale(locale_name)
189 except TypeError:
189 except TypeError:
190 return f'FAILED_LOCALE_GET:{locale_name}'
190 return f'FAILED_LOCALE_GET:{locale_name}'
191
191
192 value = dict(
192 value = dict(
193 locale_default=locale.getlocale(),
193 locale_default=locale.getlocale(),
194 locale_lc_all=safe_get_locale(locale.LC_ALL),
194 locale_lc_all=safe_get_locale(locale.LC_ALL),
195 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
195 locale_lc_ctype=safe_get_locale(locale.LC_CTYPE),
196 lang_env=os.environ.get('LANG'),
196 lang_env=os.environ.get('LANG'),
197 lc_all_env=os.environ.get('LC_ALL'),
197 lc_all_env=os.environ.get('LC_ALL'),
198 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
198 local_archive_env=os.environ.get('LOCALE_ARCHIVE'),
199 )
199 )
200 human_value = \
200 human_value = \
201 f"LANG: {value['lang_env']}, \
201 f"LANG: {value['lang_env']}, \
202 locale LC_ALL: {value['locale_lc_all']}, \
202 locale LC_ALL: {value['locale_lc_all']}, \
203 locale LC_CTYPE: {value['locale_lc_ctype']}, \
203 locale LC_CTYPE: {value['locale_lc_ctype']}, \
204 Default locales: {value['locale_default']}"
204 Default locales: {value['locale_default']}"
205
205
206 return SysInfoRes(value=value, human_value=human_value)
206 return SysInfoRes(value=value, human_value=human_value)
207
207
208
208
209 @register_sysinfo
209 @register_sysinfo
210 def ulimit_info():
210 def ulimit_info():
211 data = collections.OrderedDict([
211 data = collections.OrderedDict([
212 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
212 ('cpu time (seconds)', get_resource(resource.RLIMIT_CPU)),
213 ('file size', get_resource(resource.RLIMIT_FSIZE)),
213 ('file size', get_resource(resource.RLIMIT_FSIZE)),
214 ('stack size', get_resource(resource.RLIMIT_STACK)),
214 ('stack size', get_resource(resource.RLIMIT_STACK)),
215 ('core file size', get_resource(resource.RLIMIT_CORE)),
215 ('core file size', get_resource(resource.RLIMIT_CORE)),
216 ('address space size', get_resource(resource.RLIMIT_AS)),
216 ('address space size', get_resource(resource.RLIMIT_AS)),
217 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
217 ('locked in mem size', get_resource(resource.RLIMIT_MEMLOCK)),
218 ('heap size', get_resource(resource.RLIMIT_DATA)),
218 ('heap size', get_resource(resource.RLIMIT_DATA)),
219 ('rss size', get_resource(resource.RLIMIT_RSS)),
219 ('rss size', get_resource(resource.RLIMIT_RSS)),
220 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
220 ('number of processes', get_resource(resource.RLIMIT_NPROC)),
221 ('open files', get_resource(resource.RLIMIT_NOFILE)),
221 ('open files', get_resource(resource.RLIMIT_NOFILE)),
222 ])
222 ])
223
223
224 text = ', '.join(f'{k}:{v}' for k, v in data.items())
224 text = ', '.join(f'{k}:{v}' for k, v in data.items())
225
225
226 value = {
226 value = {
227 'limits': data,
227 'limits': data,
228 'text': text,
228 'text': text,
229 }
229 }
230 return SysInfoRes(value=value)
230 return SysInfoRes(value=value)
231
231
232
232
233 @register_sysinfo
233 @register_sysinfo
234 def uptime():
234 def uptime():
235 from rhodecode.lib.helpers import age, time_to_datetime
235 from rhodecode.lib.helpers import age, time_to_datetime
236 from rhodecode.translation import TranslationString
236 from rhodecode.translation import TranslationString
237
237
238 value = dict(boot_time=0, uptime=0, text='')
238 value = dict(boot_time=0, uptime=0, text='')
239 state = STATE_OK_DEFAULT
239 state = STATE_OK_DEFAULT
240
240
241 boot_time = psutil.boot_time()
241 boot_time = psutil.boot_time()
242 value['boot_time'] = boot_time
242 value['boot_time'] = boot_time
243 value['uptime'] = time.time() - boot_time
243 value['uptime'] = time.time() - boot_time
244
244
245 date_or_age = age(time_to_datetime(boot_time))
245 date_or_age = age(time_to_datetime(boot_time))
246 if isinstance(date_or_age, TranslationString):
246 if isinstance(date_or_age, TranslationString):
247 date_or_age = date_or_age.interpolate()
247 date_or_age = date_or_age.interpolate()
248
248
249 human_value = value.copy()
249 human_value = value.copy()
250 human_value['boot_time'] = time_to_datetime(boot_time)
250 human_value['boot_time'] = time_to_datetime(boot_time)
251 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
251 human_value['uptime'] = age(time_to_datetime(boot_time), show_suffix=False)
252
252
253 human_value['text'] = f'Server started {date_or_age}'
253 human_value['text'] = f'Server started {date_or_age}'
254 return SysInfoRes(value=value, human_value=human_value)
254 return SysInfoRes(value=value, human_value=human_value)
255
255
256
256
257 @register_sysinfo
257 @register_sysinfo
258 def memory():
258 def memory():
259 from rhodecode.lib.helpers import format_byte_size_binary
259 from rhodecode.lib.helpers import format_byte_size_binary
260 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
260 value = dict(available=0, used=0, used_real=0, cached=0, percent=0,
261 percent_used=0, free=0, inactive=0, active=0, shared=0,
261 percent_used=0, free=0, inactive=0, active=0, shared=0,
262 total=0, buffers=0, text='')
262 total=0, buffers=0, text='')
263
263
264 state = STATE_OK_DEFAULT
264 state = STATE_OK_DEFAULT
265
265
266 value.update(dict(psutil.virtual_memory()._asdict()))
266 value.update(dict(psutil.virtual_memory()._asdict()))
267 value['used_real'] = value['total'] - value['available']
267 value['used_real'] = value['total'] - value['available']
268 value['percent_used'] = psutil._common.usage_percent(
268 value['percent_used'] = psutil._common.usage_percent(value['used_real'], value['total'], 1)
269 value['used_real'], value['total'], 1)
270
269
271 human_value = value.copy()
270 human_value = value.copy()
272 human_value['text'] = '{}/{}, {}% used'.format(
271 human_value['text'] = '{}/{}, {}% used'.format(
273 format_byte_size_binary(value['used_real']),
272 format_byte_size_binary(value['used_real']),
274 format_byte_size_binary(value['total']),
273 format_byte_size_binary(value['total']),
275 value['percent_used'])
274 value['percent_used'])
276
275
277 keys = list(value.keys())[::]
276 keys = list(value.keys())[::]
278 keys.pop(keys.index('percent'))
277 keys.pop(keys.index('percent'))
279 keys.pop(keys.index('percent_used'))
278 keys.pop(keys.index('percent_used'))
280 keys.pop(keys.index('text'))
279 keys.pop(keys.index('text'))
281 for k in keys:
280 for k in keys:
282 human_value[k] = format_byte_size_binary(value[k])
281 human_value[k] = format_byte_size_binary(value[k])
283
282
284 if state['type'] == STATE_OK and value['percent_used'] > 90:
283 if state['type'] == STATE_OK and value['percent_used'] > 90:
285 msg = 'Critical: your available RAM memory is very low.'
284 msg = 'Critical: your available RAM memory is very low.'
286 state = {'message': msg, 'type': STATE_ERR}
285 state = {'message': msg, 'type': STATE_ERR}
287
286
288 elif state['type'] == STATE_OK and value['percent_used'] > 70:
287 elif state['type'] == STATE_OK and value['percent_used'] > 70:
289 msg = 'Warning: your available RAM memory is running low.'
288 msg = 'Warning: your available RAM memory is running low.'
290 state = {'message': msg, 'type': STATE_WARN}
289 state = {'message': msg, 'type': STATE_WARN}
291
290
292 return SysInfoRes(value=value, state=state, human_value=human_value)
291 return SysInfoRes(value=value, state=state, human_value=human_value)
293
292
294
293
295 @register_sysinfo
294 @register_sysinfo
296 def machine_load():
295 def machine_load():
297 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
296 value = {'1_min': _NA_FLOAT, '5_min': _NA_FLOAT, '15_min': _NA_FLOAT, 'text': ''}
298 state = STATE_OK_DEFAULT
297 state = STATE_OK_DEFAULT
299
298
300 # load averages
299 # load averages
301 if hasattr(psutil.os, 'getloadavg'):
300 if hasattr(psutil.os, 'getloadavg'):
302 value.update(dict(
301 value.update(dict(
303 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
302 list(zip(['1_min', '5_min', '15_min'], psutil.os.getloadavg()))
304 ))
303 ))
305
304
306 human_value = value.copy()
305 human_value = value.copy()
307 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
306 human_value['text'] = '1min: {}, 5min: {}, 15min: {}'.format(
308 value['1_min'], value['5_min'], value['15_min'])
307 value['1_min'], value['5_min'], value['15_min'])
309
308
310 if state['type'] == STATE_OK and value['15_min'] > 5.0:
309 if state['type'] == STATE_OK and value['15_min'] > 5.0:
311 msg = 'Warning: your machine load is very high.'
310 msg = 'Warning: your machine load is very high.'
312 state = {'message': msg, 'type': STATE_WARN}
311 state = {'message': msg, 'type': STATE_WARN}
313
312
314 return SysInfoRes(value=value, state=state, human_value=human_value)
313 return SysInfoRes(value=value, state=state, human_value=human_value)
315
314
316
315
317 @register_sysinfo
316 @register_sysinfo
318 def cpu():
317 def cpu():
319 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
318 value = {'cpu': 0, 'cpu_count': 0, 'cpu_usage': []}
320 state = STATE_OK_DEFAULT
319 state = STATE_OK_DEFAULT
321
320
322 value['cpu'] = psutil.cpu_percent(0.5)
321 value['cpu'] = psutil.cpu_percent(0.5)
323 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
322 value['cpu_usage'] = psutil.cpu_percent(0.5, percpu=True)
324 value['cpu_count'] = psutil.cpu_count()
323 value['cpu_count'] = psutil.cpu_count()
325
324
326 human_value = value.copy()
325 human_value = value.copy()
327 human_value['text'] = '{} cores at {} %'.format(
326 human_value['text'] = '{} cores at {} %'.format(value['cpu_count'], value['cpu'])
328 value['cpu_count'], value['cpu'])
329
327
330 return SysInfoRes(value=value, state=state, human_value=human_value)
328 return SysInfoRes(value=value, state=state, human_value=human_value)
331
329
332
330
333 @register_sysinfo
331 @register_sysinfo
334 def storage():
332 def storage():
335 from rhodecode.lib.helpers import format_byte_size_binary
333 from rhodecode.lib.helpers import format_byte_size_binary
336 from rhodecode.model.settings import VcsSettingsModel
334 from rhodecode.model.settings import VcsSettingsModel
337 path = VcsSettingsModel().get_repos_location()
335 path = VcsSettingsModel().get_repos_location()
338
336
339 value = dict(percent=0, used=0, total=0, path=path, text='')
337 value = dict(percent=0, used=0, total=0, path=path, text='')
340 state = STATE_OK_DEFAULT
338 state = STATE_OK_DEFAULT
341
339
342 try:
340 try:
343 value.update(dict(psutil.disk_usage(path)._asdict()))
341 value.update(dict(psutil.disk_usage(path)._asdict()))
344 except Exception as e:
342 except Exception as e:
345 log.exception('Failed to fetch disk info')
343 log.exception('Failed to fetch disk info')
346 state = {'message': str(e), 'type': STATE_ERR}
344 state = {'message': str(e), 'type': STATE_ERR}
347
345
348 human_value = value.copy()
346 human_value = value.copy()
349 human_value['used'] = format_byte_size_binary(value['used'])
347 human_value['used'] = format_byte_size_binary(value['used'])
350 human_value['total'] = format_byte_size_binary(value['total'])
348 human_value['total'] = format_byte_size_binary(value['total'])
351 human_value['text'] = "{}/{}, {}% used".format(
349 human_value['text'] = "{}/{}, {}% used".format(
352 format_byte_size_binary(value['used']),
350 format_byte_size_binary(value['used']),
353 format_byte_size_binary(value['total']),
351 format_byte_size_binary(value['total']),
354 value['percent'])
352 value['percent'])
355
353
356 if state['type'] == STATE_OK and value['percent'] > 90:
354 if state['type'] == STATE_OK and value['percent'] > 90:
357 msg = 'Critical: your disk space is very low.'
355 msg = 'Critical: your disk space is very low.'
358 state = {'message': msg, 'type': STATE_ERR}
356 state = {'message': msg, 'type': STATE_ERR}
359
357
360 elif state['type'] == STATE_OK and value['percent'] > 70:
358 elif state['type'] == STATE_OK and value['percent'] > 70:
361 msg = 'Warning: your disk space is running low.'
359 msg = 'Warning: your disk space is running low.'
362 state = {'message': msg, 'type': STATE_WARN}
360 state = {'message': msg, 'type': STATE_WARN}
363
361
364 return SysInfoRes(value=value, state=state, human_value=human_value)
362 return SysInfoRes(value=value, state=state, human_value=human_value)
365
363
366
364
367 @register_sysinfo
365 @register_sysinfo
368 def storage_inodes():
366 def storage_inodes():
369 from rhodecode.model.settings import VcsSettingsModel
367 from rhodecode.model.settings import VcsSettingsModel
370 path = VcsSettingsModel().get_repos_location()
368 path = VcsSettingsModel().get_repos_location()
371
369
372 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
370 value = dict(percent=0.0, free=0, used=0, total=0, path=path, text='')
373 state = STATE_OK_DEFAULT
371 state = STATE_OK_DEFAULT
374
372
375 try:
373 try:
376 i_stat = os.statvfs(path)
374 i_stat = os.statvfs(path)
377 value['free'] = i_stat.f_ffree
375 value['free'] = i_stat.f_ffree
378 value['used'] = i_stat.f_files-i_stat.f_favail
376 value['used'] = i_stat.f_files-i_stat.f_favail
379 value['total'] = i_stat.f_files
377 value['total'] = i_stat.f_files
380 value['percent'] = percentage(value['used'], value['total'])
378 value['percent'] = percentage(value['used'], value['total'])
381 except Exception as e:
379 except Exception as e:
382 log.exception('Failed to fetch disk inodes info')
380 log.exception('Failed to fetch disk inodes info')
383 state = {'message': str(e), 'type': STATE_ERR}
381 state = {'message': str(e), 'type': STATE_ERR}
384
382
385 human_value = value.copy()
383 human_value = value.copy()
386 human_value['text'] = "{}/{}, {}% used".format(
384 human_value['text'] = "{}/{}, {}% used".format(
387 value['used'], value['total'], value['percent'])
385 value['used'], value['total'], value['percent'])
388
386
389 if state['type'] == STATE_OK and value['percent'] > 90:
387 if state['type'] == STATE_OK and value['percent'] > 90:
390 msg = 'Critical: your disk free inodes are very low.'
388 msg = 'Critical: your disk free inodes are very low.'
391 state = {'message': msg, 'type': STATE_ERR}
389 state = {'message': msg, 'type': STATE_ERR}
392
390
393 elif state['type'] == STATE_OK and value['percent'] > 70:
391 elif state['type'] == STATE_OK and value['percent'] > 70:
394 msg = 'Warning: your disk free inodes are running low.'
392 msg = 'Warning: your disk free inodes are running low.'
395 state = {'message': msg, 'type': STATE_WARN}
393 state = {'message': msg, 'type': STATE_WARN}
396
394
397 return SysInfoRes(value=value, state=state, human_value=human_value)
395 return SysInfoRes(value=value, state=state, human_value=human_value)
398
396
399
397
400 @register_sysinfo
398 @register_sysinfo
401 def storage_archives():
399 def storage_archives():
402 import rhodecode
400 import rhodecode
403 from rhodecode.lib.utils import safe_str
401 from rhodecode.lib.utils import safe_str
404 from rhodecode.lib.helpers import format_byte_size_binary
402 from rhodecode.lib.helpers import format_byte_size_binary
405
403
406 msg = 'Archive cache storage is controlled by ' \
404 msg = 'Archive cache storage is controlled by ' \
407 'archive_cache.store_dir=/path/to/cache option in the .ini file'
405 'archive_cache.store_dir=/path/to/cache option in the .ini file'
408 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
406 path = safe_str(rhodecode.CONFIG.get('archive_cache.store_dir', msg))
409
407
410 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
408 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
411 state = STATE_OK_DEFAULT
409 state = STATE_OK_DEFAULT
412 try:
410 try:
413 items_count = 0
411 items_count = 0
414 used = 0
412 used = 0
415 for root, dirs, files in os.walk(path):
413 for root, dirs, files in os.walk(path):
416 if root == path:
414 if root == path:
417 items_count = len(dirs)
415 items_count = len(dirs)
418
416
419 for f in files:
417 for f in files:
420 try:
418 try:
421 used += os.path.getsize(os.path.join(root, f))
419 used += os.path.getsize(os.path.join(root, f))
422 except OSError:
420 except OSError:
423 pass
421 pass
424 value.update({
422 value.update({
425 'percent': 100,
423 'percent': 100,
426 'used': used,
424 'used': used,
427 'total': used,
425 'total': used,
428 'items': items_count
426 'items': items_count
429 })
427 })
430
428
431 except Exception as e:
429 except Exception as e:
432 log.exception('failed to fetch archive cache storage')
430 log.exception('failed to fetch archive cache storage')
433 state = {'message': str(e), 'type': STATE_ERR}
431 state = {'message': str(e), 'type': STATE_ERR}
434
432
435 human_value = value.copy()
433 human_value = value.copy()
436 human_value['used'] = format_byte_size_binary(value['used'])
434 human_value['used'] = format_byte_size_binary(value['used'])
437 human_value['total'] = format_byte_size_binary(value['total'])
435 human_value['total'] = format_byte_size_binary(value['total'])
438 human_value['text'] = "{} ({} items)".format(
436 human_value['text'] = "{} ({} items)".format(
439 human_value['used'], value['items'])
437 human_value['used'], value['items'])
440
438
441 return SysInfoRes(value=value, state=state, human_value=human_value)
439 return SysInfoRes(value=value, state=state, human_value=human_value)
442
440
443
441
444 @register_sysinfo
442 @register_sysinfo
445 def storage_gist():
443 def storage_gist():
446 from rhodecode.model.gist import GIST_STORE_LOC
444 from rhodecode.model.gist import GIST_STORE_LOC
447 from rhodecode.model.settings import VcsSettingsModel
445 from rhodecode.model.settings import VcsSettingsModel
448 from rhodecode.lib.utils import safe_str
446 from rhodecode.lib.utils import safe_str
449 from rhodecode.lib.helpers import format_byte_size_binary
447 from rhodecode.lib.helpers import format_byte_size_binary
450 path = safe_str(os.path.join(
448 path = safe_str(os.path.join(
451 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
449 VcsSettingsModel().get_repos_location(), GIST_STORE_LOC))
452
450
453 # gist storage
451 # gist storage
454 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
452 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
455 state = STATE_OK_DEFAULT
453 state = STATE_OK_DEFAULT
456
454
457 try:
455 try:
458 items_count = 0
456 items_count = 0
459 used = 0
457 used = 0
460 for root, dirs, files in os.walk(path):
458 for root, dirs, files in os.walk(path):
461 if root == path:
459 if root == path:
462 items_count = len(dirs)
460 items_count = len(dirs)
463
461
464 for f in files:
462 for f in files:
465 try:
463 try:
466 used += os.path.getsize(os.path.join(root, f))
464 used += os.path.getsize(os.path.join(root, f))
467 except OSError:
465 except OSError:
468 pass
466 pass
469 value.update({
467 value.update({
470 'percent': 100,
468 'percent': 100,
471 'used': used,
469 'used': used,
472 'total': used,
470 'total': used,
473 'items': items_count
471 'items': items_count
474 })
472 })
475 except Exception as e:
473 except Exception as e:
476 log.exception('failed to fetch gist storage items')
474 log.exception('failed to fetch gist storage items')
477 state = {'message': str(e), 'type': STATE_ERR}
475 state = {'message': str(e), 'type': STATE_ERR}
478
476
479 human_value = value.copy()
477 human_value = value.copy()
480 human_value['used'] = format_byte_size_binary(value['used'])
478 human_value['used'] = format_byte_size_binary(value['used'])
481 human_value['total'] = format_byte_size_binary(value['total'])
479 human_value['total'] = format_byte_size_binary(value['total'])
482 human_value['text'] = "{} ({} items)".format(
480 human_value['text'] = "{} ({} items)".format(
483 human_value['used'], value['items'])
481 human_value['used'], value['items'])
484
482
485 return SysInfoRes(value=value, state=state, human_value=human_value)
483 return SysInfoRes(value=value, state=state, human_value=human_value)
486
484
487
485
488 @register_sysinfo
486 @register_sysinfo
489 def storage_temp():
487 def storage_temp():
490 import tempfile
488 import tempfile
491 from rhodecode.lib.helpers import format_byte_size_binary
489 from rhodecode.lib.helpers import format_byte_size_binary
492
490
493 path = tempfile.gettempdir()
491 path = tempfile.gettempdir()
494 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
492 value = dict(percent=0, used=0, total=0, items=0, path=path, text='')
495 state = STATE_OK_DEFAULT
493 state = STATE_OK_DEFAULT
496
494
497 if not psutil:
495 if not psutil:
498 return SysInfoRes(value=value, state=state)
496 return SysInfoRes(value=value, state=state)
499
497
500 try:
498 try:
501 value.update(dict(psutil.disk_usage(path)._asdict()))
499 value.update(dict(psutil.disk_usage(path)._asdict()))
502 except Exception as e:
500 except Exception as e:
503 log.exception('Failed to fetch temp dir info')
501 log.exception('Failed to fetch temp dir info')
504 state = {'message': str(e), 'type': STATE_ERR}
502 state = {'message': str(e), 'type': STATE_ERR}
505
503
506 human_value = value.copy()
504 human_value = value.copy()
507 human_value['used'] = format_byte_size_binary(value['used'])
505 human_value['used'] = format_byte_size_binary(value['used'])
508 human_value['total'] = format_byte_size_binary(value['total'])
506 human_value['total'] = format_byte_size_binary(value['total'])
509 human_value['text'] = "{}/{}, {}% used".format(
507 human_value['text'] = "{}/{}, {}% used".format(
510 format_byte_size_binary(value['used']),
508 format_byte_size_binary(value['used']),
511 format_byte_size_binary(value['total']),
509 format_byte_size_binary(value['total']),
512 value['percent'])
510 value['percent'])
513
511
514 return SysInfoRes(value=value, state=state, human_value=human_value)
512 return SysInfoRes(value=value, state=state, human_value=human_value)
515
513
516
514
517 @register_sysinfo
515 @register_sysinfo
518 def search_info():
516 def search_info():
519 import rhodecode
517 import rhodecode
520 from rhodecode.lib.index import searcher_from_config
518 from rhodecode.lib.index import searcher_from_config
521
519
522 backend = rhodecode.CONFIG.get('search.module', '')
520 backend = rhodecode.CONFIG.get('search.module', '')
523 location = rhodecode.CONFIG.get('search.location', '')
521 location = rhodecode.CONFIG.get('search.location', '')
524
522
525 try:
523 try:
526 searcher = searcher_from_config(rhodecode.CONFIG)
524 searcher = searcher_from_config(rhodecode.CONFIG)
527 searcher = searcher.__class__.__name__
525 searcher = searcher.__class__.__name__
528 except Exception:
526 except Exception:
529 searcher = None
527 searcher = None
530
528
531 value = dict(
529 value = dict(
532 backend=backend, searcher=searcher, location=location, text='')
530 backend=backend, searcher=searcher, location=location, text='')
533 state = STATE_OK_DEFAULT
531 state = STATE_OK_DEFAULT
534
532
535 human_value = value.copy()
533 human_value = value.copy()
536 human_value['text'] = "backend:`{}`".format(human_value['backend'])
534 human_value['text'] = "backend:`{}`".format(human_value['backend'])
537
535
538 return SysInfoRes(value=value, state=state, human_value=human_value)
536 return SysInfoRes(value=value, state=state, human_value=human_value)
539
537
540
538
541 @register_sysinfo
539 @register_sysinfo
542 def git_info():
540 def git_info():
543 from rhodecode.lib.vcs.backends import git
541 from rhodecode.lib.vcs.backends import git
544 state = STATE_OK_DEFAULT
542 state = STATE_OK_DEFAULT
545 value = human_value = ''
543 value = human_value = ''
546 try:
544 try:
547 value = git.discover_git_version(raise_on_exc=True)
545 value = git.discover_git_version(raise_on_exc=True)
548 human_value = f'version reported from VCSServer: {value}'
546 human_value = f'version reported from VCSServer: {value}'
549 except Exception as e:
547 except Exception as e:
550 state = {'message': str(e), 'type': STATE_ERR}
548 state = {'message': str(e), 'type': STATE_ERR}
551
549
552 return SysInfoRes(value=value, state=state, human_value=human_value)
550 return SysInfoRes(value=value, state=state, human_value=human_value)
553
551
554
552
555 @register_sysinfo
553 @register_sysinfo
556 def hg_info():
554 def hg_info():
557 from rhodecode.lib.vcs.backends import hg
555 from rhodecode.lib.vcs.backends import hg
558 state = STATE_OK_DEFAULT
556 state = STATE_OK_DEFAULT
559 value = human_value = ''
557 value = human_value = ''
560 try:
558 try:
561 value = hg.discover_hg_version(raise_on_exc=True)
559 value = hg.discover_hg_version(raise_on_exc=True)
562 human_value = f'version reported from VCSServer: {value}'
560 human_value = f'version reported from VCSServer: {value}'
563 except Exception as e:
561 except Exception as e:
564 state = {'message': str(e), 'type': STATE_ERR}
562 state = {'message': str(e), 'type': STATE_ERR}
565 return SysInfoRes(value=value, state=state, human_value=human_value)
563 return SysInfoRes(value=value, state=state, human_value=human_value)
566
564
567
565
568 @register_sysinfo
566 @register_sysinfo
569 def svn_info():
567 def svn_info():
570 from rhodecode.lib.vcs.backends import svn
568 from rhodecode.lib.vcs.backends import svn
571 state = STATE_OK_DEFAULT
569 state = STATE_OK_DEFAULT
572 value = human_value = ''
570 value = human_value = ''
573 try:
571 try:
574 value = svn.discover_svn_version(raise_on_exc=True)
572 value = svn.discover_svn_version(raise_on_exc=True)
575 human_value = f'version reported from VCSServer: {value}'
573 human_value = f'version reported from VCSServer: {value}'
576 except Exception as e:
574 except Exception as e:
577 state = {'message': str(e), 'type': STATE_ERR}
575 state = {'message': str(e), 'type': STATE_ERR}
578 return SysInfoRes(value=value, state=state, human_value=human_value)
576 return SysInfoRes(value=value, state=state, human_value=human_value)
579
577
580
578
581 @register_sysinfo
579 @register_sysinfo
582 def vcs_backends():
580 def vcs_backends():
583 import rhodecode
581 import rhodecode
584 value = rhodecode.CONFIG.get('vcs.backends')
582 value = rhodecode.CONFIG.get('vcs.backends')
585 human_value = 'Enabled backends in order: {}'.format(','.join(value))
583 human_value = 'Enabled backends in order: {}'.format(','.join(value))
586 return SysInfoRes(value=value, human_value=human_value)
584 return SysInfoRes(value=value, human_value=human_value)
587
585
588
586
589 @register_sysinfo
587 @register_sysinfo
590 def vcs_server():
588 def vcs_server():
591 import rhodecode
589 import rhodecode
592 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
590 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
593
591
594 server_url = rhodecode.CONFIG.get('vcs.server')
592 server_url = rhodecode.CONFIG.get('vcs.server')
595 enabled = rhodecode.CONFIG.get('vcs.server.enable')
593 enabled = rhodecode.CONFIG.get('vcs.server.enable')
596 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
594 protocol = rhodecode.CONFIG.get('vcs.server.protocol') or 'http'
597 state = STATE_OK_DEFAULT
595 state = STATE_OK_DEFAULT
598 version = None
596 version = None
599 workers = 0
597 workers = 0
600
598
601 try:
599 try:
602 data = get_vcsserver_service_data()
600 data = get_vcsserver_service_data()
603 if data and 'version' in data:
601 if data and 'version' in data:
604 version = data['version']
602 version = data['version']
605
603
606 if data and 'config' in data:
604 if data and 'config' in data:
607 conf = data['config']
605 conf = data['config']
608 workers = conf.get('workers', 'NOT AVAILABLE')
606 workers = conf.get('workers', 'NOT AVAILABLE')
609
607
610 connection = 'connected'
608 connection = 'connected'
611 except Exception as e:
609 except Exception as e:
612 connection = 'failed'
610 connection = 'failed'
613 state = {'message': str(e), 'type': STATE_ERR}
611 state = {'message': str(e), 'type': STATE_ERR}
614
612
615 value = dict(
613 value = dict(
616 url=server_url,
614 url=server_url,
617 enabled=enabled,
615 enabled=enabled,
618 protocol=protocol,
616 protocol=protocol,
619 connection=connection,
617 connection=connection,
620 version=version,
618 version=version,
621 text='',
619 text='',
622 )
620 )
623
621
624 human_value = value.copy()
622 human_value = value.copy()
625 human_value['text'] = \
623 human_value['text'] = \
626 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
624 '{url}@ver:{ver} via {mode} mode[workers:{workers}], connection:{conn}'.format(
627 url=server_url, ver=version, workers=workers, mode=protocol,
625 url=server_url, ver=version, workers=workers, mode=protocol,
628 conn=connection)
626 conn=connection)
629
627
630 return SysInfoRes(value=value, state=state, human_value=human_value)
628 return SysInfoRes(value=value, state=state, human_value=human_value)
631
629
632
630
633 @register_sysinfo
631 @register_sysinfo
634 def vcs_server_config():
632 def vcs_server_config():
635 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
633 from rhodecode.lib.vcs.backends import get_vcsserver_service_data
636 state = STATE_OK_DEFAULT
634 state = STATE_OK_DEFAULT
637
635
638 value = {}
636 value = {}
639 try:
637 try:
640 data = get_vcsserver_service_data()
638 data = get_vcsserver_service_data()
641 value = data['app_config']
639 value = data['app_config']
642 except Exception as e:
640 except Exception as e:
643 state = {'message': str(e), 'type': STATE_ERR}
641 state = {'message': str(e), 'type': STATE_ERR}
644
642
645 human_value = value.copy()
643 human_value = value.copy()
646 human_value['text'] = 'VCS Server config'
644 human_value['text'] = 'VCS Server config'
647
645
648 return SysInfoRes(value=value, state=state, human_value=human_value)
646 return SysInfoRes(value=value, state=state, human_value=human_value)
649
647
650
648
651 @register_sysinfo
649 @register_sysinfo
652 def rhodecode_app_info():
650 def rhodecode_app_info():
653 import rhodecode
651 import rhodecode
654 edition = rhodecode.CONFIG.get('rhodecode.edition')
652 edition = rhodecode.CONFIG.get('rhodecode.edition')
655
653
656 value = dict(
654 value = dict(
657 rhodecode_version=rhodecode.__version__,
655 rhodecode_version=rhodecode.__version__,
658 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
656 rhodecode_lib_path=os.path.abspath(rhodecode.__file__),
659 text=''
657 text=''
660 )
658 )
661 human_value = value.copy()
659 human_value = value.copy()
662 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
660 human_value['text'] = 'RhodeCode {edition}, version {ver}'.format(
663 edition=edition, ver=value['rhodecode_version']
661 edition=edition, ver=value['rhodecode_version']
664 )
662 )
665 return SysInfoRes(value=value, human_value=human_value)
663 return SysInfoRes(value=value, human_value=human_value)
666
664
667
665
668 @register_sysinfo
666 @register_sysinfo
669 def rhodecode_config():
667 def rhodecode_config():
670 import rhodecode
668 import rhodecode
671 path = rhodecode.CONFIG.get('__file__')
669 path = rhodecode.CONFIG.get('__file__')
672 rhodecode_ini_safe = rhodecode.CONFIG.copy()
670 rhodecode_ini_safe = rhodecode.CONFIG.copy()
673 cert_path = get_cert_path(path)
671 cert_path = get_cert_path(path)
674
672
675 try:
673 try:
676 config = configparser.ConfigParser()
674 config = configparser.ConfigParser()
677 config.read(path)
675 config.read(path)
678 parsed_ini = config
676 parsed_ini = config
679 if parsed_ini.has_section('server:main'):
677 if parsed_ini.has_section('server:main'):
680 parsed_ini = dict(parsed_ini.items('server:main'))
678 parsed_ini = dict(parsed_ini.items('server:main'))
681 except Exception:
679 except Exception:
682 log.exception('Failed to read .ini file for display')
680 log.exception('Failed to read .ini file for display')
683 parsed_ini = {}
681 parsed_ini = {}
684
682
685 rhodecode_ini_safe['server:main'] = parsed_ini
683 rhodecode_ini_safe['server:main'] = parsed_ini
686
684
687 blacklist = [
685 blacklist = [
688 f'rhodecode_{LicenseModel.LICENSE_DB_KEY}',
686 f'rhodecode_{LicenseModel.LICENSE_DB_KEY}',
689 'routes.map',
687 'routes.map',
690 'sqlalchemy.db1.url',
688 'sqlalchemy.db1.url',
691 'channelstream.secret',
689 'channelstream.secret',
692 'beaker.session.secret',
690 'beaker.session.secret',
693 'rhodecode.encrypted_values.secret',
691 'rhodecode.encrypted_values.secret',
694 'rhodecode_auth_github_consumer_key',
692 'rhodecode_auth_github_consumer_key',
695 'rhodecode_auth_github_consumer_secret',
693 'rhodecode_auth_github_consumer_secret',
696 'rhodecode_auth_google_consumer_key',
694 'rhodecode_auth_google_consumer_key',
697 'rhodecode_auth_google_consumer_secret',
695 'rhodecode_auth_google_consumer_secret',
698 'rhodecode_auth_bitbucket_consumer_secret',
696 'rhodecode_auth_bitbucket_consumer_secret',
699 'rhodecode_auth_bitbucket_consumer_key',
697 'rhodecode_auth_bitbucket_consumer_key',
700 'rhodecode_auth_twitter_consumer_secret',
698 'rhodecode_auth_twitter_consumer_secret',
701 'rhodecode_auth_twitter_consumer_key',
699 'rhodecode_auth_twitter_consumer_key',
702
700
703 'rhodecode_auth_twitter_secret',
701 'rhodecode_auth_twitter_secret',
704 'rhodecode_auth_github_secret',
702 'rhodecode_auth_github_secret',
705 'rhodecode_auth_google_secret',
703 'rhodecode_auth_google_secret',
706 'rhodecode_auth_bitbucket_secret',
704 'rhodecode_auth_bitbucket_secret',
707
705
708 'appenlight.api_key',
706 'appenlight.api_key',
709 ('app_conf', 'sqlalchemy.db1.url')
707 ('app_conf', 'sqlalchemy.db1.url')
710 ]
708 ]
711 for k in blacklist:
709 for k in blacklist:
712 if isinstance(k, tuple):
710 if isinstance(k, tuple):
713 section, key = k
711 section, key = k
714 if section in rhodecode_ini_safe:
712 if section in rhodecode_ini_safe:
715 rhodecode_ini_safe[section] = '**OBFUSCATED**'
713 rhodecode_ini_safe[section] = '**OBFUSCATED**'
716 else:
714 else:
717 rhodecode_ini_safe.pop(k, None)
715 rhodecode_ini_safe.pop(k, None)
718
716
719 # TODO: maybe put some CONFIG checks here ?
717 # TODO: maybe put some CONFIG checks here ?
720 return SysInfoRes(value={'config': rhodecode_ini_safe,
718 return SysInfoRes(value={'config': rhodecode_ini_safe,
721 'path': path, 'cert_path': cert_path})
719 'path': path, 'cert_path': cert_path})
722
720
723
721
724 @register_sysinfo
722 @register_sysinfo
725 def database_info():
723 def database_info():
726 import rhodecode
724 import rhodecode
727 from sqlalchemy.engine import url as engine_url
725 from sqlalchemy.engine import url as engine_url
728 from rhodecode.model import meta
726 from rhodecode.model import meta
729 from rhodecode.model.meta import Session
727 from rhodecode.model.meta import Session
730 from rhodecode.model.db import DbMigrateVersion
728 from rhodecode.model.db import DbMigrateVersion
731
729
732 state = STATE_OK_DEFAULT
730 state = STATE_OK_DEFAULT
733
731
734 db_migrate = DbMigrateVersion.query().filter(
732 db_migrate = DbMigrateVersion.query().filter(
735 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
733 DbMigrateVersion.repository_id == 'rhodecode_db_migrations').one()
736
734
737 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
735 db_url_obj = engine_url.make_url(rhodecode.CONFIG['sqlalchemy.db1.url'])
738
736
739 try:
737 try:
740 engine = meta.get_engine()
738 engine = meta.get_engine()
741 db_server_info = engine.dialect._get_server_version_info(
739 db_server_info = engine.dialect._get_server_version_info(
742 Session.connection(bind=engine))
740 Session.connection(bind=engine))
743 db_version = '.'.join(map(str, db_server_info))
741 db_version = '.'.join(map(str, db_server_info))
744 except Exception:
742 except Exception:
745 log.exception('failed to fetch db version')
743 log.exception('failed to fetch db version')
746 db_version = 'UNKNOWN'
744 db_version = 'UNKNOWN'
747
745
748 db_info = dict(
746 db_info = dict(
749 migrate_version=db_migrate.version,
747 migrate_version=db_migrate.version,
750 type=db_url_obj.get_backend_name(),
748 type=db_url_obj.get_backend_name(),
751 version=db_version,
749 version=db_version,
752 url=repr(db_url_obj)
750 url=repr(db_url_obj)
753 )
751 )
754 current_version = db_migrate.version
752 current_version = db_migrate.version
755 expected_version = rhodecode.__dbversion__
753 expected_version = rhodecode.__dbversion__
756 if state['type'] == STATE_OK and current_version != expected_version:
754 if state['type'] == STATE_OK and current_version != expected_version:
757 msg = 'Critical: database schema mismatch, ' \
755 msg = 'Critical: database schema mismatch, ' \
758 'expected version {}, got {}. ' \
756 'expected version {}, got {}. ' \
759 'Please run migrations on your database.'.format(
757 'Please run migrations on your database.'.format(
760 expected_version, current_version)
758 expected_version, current_version)
761 state = {'message': msg, 'type': STATE_ERR}
759 state = {'message': msg, 'type': STATE_ERR}
762
760
763 human_value = db_info.copy()
761 human_value = db_info.copy()
764 human_value['url'] = "{} @ migration version: {}".format(
762 human_value['url'] = "{} @ migration version: {}".format(
765 db_info['url'], db_info['migrate_version'])
763 db_info['url'], db_info['migrate_version'])
766 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
764 human_value['version'] = "{} {}".format(db_info['type'], db_info['version'])
767 return SysInfoRes(value=db_info, state=state, human_value=human_value)
765 return SysInfoRes(value=db_info, state=state, human_value=human_value)
768
766
769
767
770 @register_sysinfo
768 @register_sysinfo
771 def server_info(environ):
769 def server_info(environ):
772 import rhodecode
770 import rhodecode
773 from rhodecode.lib.base import get_server_ip_addr, get_server_port
771 from rhodecode.lib.base import get_server_ip_addr, get_server_port
774
772
775 value = {
773 value = {
776 'server_ip': '{}:{}'.format(
774 'server_ip': '{}:{}'.format(
777 get_server_ip_addr(environ, log_errors=False),
775 get_server_ip_addr(environ, log_errors=False),
778 get_server_port(environ)
776 get_server_port(environ)
779 ),
777 ),
780 'server_id': rhodecode.CONFIG.get('instance_id'),
778 'server_id': rhodecode.CONFIG.get('instance_id'),
781 }
779 }
782 return SysInfoRes(value=value)
780 return SysInfoRes(value=value)
783
781
784
782
785 @register_sysinfo
783 @register_sysinfo
786 def usage_info():
784 def usage_info():
787 from rhodecode.model.db import User, Repository
785 from rhodecode.model.db import User, Repository, true
788 value = {
786 value = {
789 'users': User.query().count(),
787 'users': User.query().count(),
790 'users_active': User.query().filter(User.active == True).count(),
788 'users_active': User.query().filter(User.active == true()).count(),
791 'repositories': Repository.query().count(),
789 'repositories': Repository.query().count(),
792 'repository_types': {
790 'repository_types': {
793 'hg': Repository.query().filter(
791 'hg': Repository.query().filter(
794 Repository.repo_type == 'hg').count(),
792 Repository.repo_type == 'hg').count(),
795 'git': Repository.query().filter(
793 'git': Repository.query().filter(
796 Repository.repo_type == 'git').count(),
794 Repository.repo_type == 'git').count(),
797 'svn': Repository.query().filter(
795 'svn': Repository.query().filter(
798 Repository.repo_type == 'svn').count(),
796 Repository.repo_type == 'svn').count(),
799 },
797 },
800 }
798 }
801 return SysInfoRes(value=value)
799 return SysInfoRes(value=value)
802
800
803
801
804 def get_system_info(environ):
802 def get_system_info(environ):
805 environ = environ or {}
803 environ = environ or {}
806 return {
804 return {
807 'rhodecode_app': SysInfo(rhodecode_app_info)(),
805 'rhodecode_app': SysInfo(rhodecode_app_info)(),
808 'rhodecode_config': SysInfo(rhodecode_config)(),
806 'rhodecode_config': SysInfo(rhodecode_config)(),
809 'rhodecode_usage': SysInfo(usage_info)(),
807 'rhodecode_usage': SysInfo(usage_info)(),
810 'python': SysInfo(python_info)(),
808 'python': SysInfo(python_info)(),
811 'py_modules': SysInfo(py_modules)(),
809 'py_modules': SysInfo(py_modules)(),
812
810
813 'platform': SysInfo(platform_type)(),
811 'platform': SysInfo(platform_type)(),
814 'locale': SysInfo(locale_info)(),
812 'locale': SysInfo(locale_info)(),
815 'server': SysInfo(server_info, environ=environ)(),
813 'server': SysInfo(server_info, environ=environ)(),
816 'database': SysInfo(database_info)(),
814 'database': SysInfo(database_info)(),
817 'ulimit': SysInfo(ulimit_info)(),
815 'ulimit': SysInfo(ulimit_info)(),
818 'storage': SysInfo(storage)(),
816 'storage': SysInfo(storage)(),
819 'storage_inodes': SysInfo(storage_inodes)(),
817 'storage_inodes': SysInfo(storage_inodes)(),
820 'storage_archive': SysInfo(storage_archives)(),
818 'storage_archive': SysInfo(storage_archives)(),
821 'storage_gist': SysInfo(storage_gist)(),
819 'storage_gist': SysInfo(storage_gist)(),
822 'storage_temp': SysInfo(storage_temp)(),
820 'storage_temp': SysInfo(storage_temp)(),
823
821
824 'search': SysInfo(search_info)(),
822 'search': SysInfo(search_info)(),
825
823
826 'uptime': SysInfo(uptime)(),
824 'uptime': SysInfo(uptime)(),
827 'load': SysInfo(machine_load)(),
825 'load': SysInfo(machine_load)(),
828 'cpu': SysInfo(cpu)(),
826 'cpu': SysInfo(cpu)(),
829 'memory': SysInfo(memory)(),
827 'memory': SysInfo(memory)(),
830
828
831 'vcs_backends': SysInfo(vcs_backends)(),
829 'vcs_backends': SysInfo(vcs_backends)(),
832 'vcs_server': SysInfo(vcs_server)(),
830 'vcs_server': SysInfo(vcs_server)(),
833
831
834 'vcs_server_config': SysInfo(vcs_server_config)(),
832 'vcs_server_config': SysInfo(vcs_server_config)(),
835
833
836 'git': SysInfo(git_info)(),
834 'git': SysInfo(git_info)(),
837 'hg': SysInfo(hg_info)(),
835 'hg': SysInfo(hg_info)(),
838 'svn': SysInfo(svn_info)(),
836 'svn': SysInfo(svn_info)(),
839 }
837 }
840
838
841
839
842 def load_system_info(key):
840 def load_system_info(key):
843 """
841 """
844 get_sys_info('vcs_server')
842 get_sys_info('vcs_server')
845 get_sys_info('database')
843 get_sys_info('database')
846 """
844 """
847 return SysInfo(registered_helpers[key])()
845 return SysInfo(registered_helpers[key])()
@@ -1,987 +1,987 b''
1 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19
19
20 """
20 """
21 Some simple helper functions
21 Some simple helper functions
22 """
22 """
23
23
24 import collections
24 import collections
25 import datetime
25 import datetime
26 import dateutil.relativedelta
26 import dateutil.relativedelta
27 import logging
27 import logging
28 import re
28 import re
29 import sys
29 import sys
30 import time
30 import time
31 import urllib.request
31 import urllib.request
32 import urllib.parse
32 import urllib.parse
33 import urllib.error
33 import urllib.error
34 import urlobject
34 import urlobject
35 import uuid
35 import uuid
36 import getpass
36 import getpass
37 import socket
37 import socket
38 import errno
38 import errno
39 import random
39 import random
40 import functools
40 import functools
41 from contextlib import closing
41 from contextlib import closing
42
42
43 import pygments.lexers
43 import pygments.lexers
44 import sqlalchemy
44 import sqlalchemy
45 import sqlalchemy.event
45 import sqlalchemy.event
46 import sqlalchemy.engine.url
46 import sqlalchemy.engine.url
47 import sqlalchemy.exc
47 import sqlalchemy.exc
48 import sqlalchemy.sql
48 import sqlalchemy.sql
49 import webob
49 import webob
50 from pyramid.settings import asbool
50 from pyramid.settings import asbool
51
51
52 import rhodecode
52 import rhodecode
53 from rhodecode.translation import _, _pluralize
53 from rhodecode.translation import _, _pluralize
54 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
54 from rhodecode.lib.str_utils import safe_str, safe_int, safe_bytes
55 from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe
55 from rhodecode.lib.hash_utils import md5, md5_safe, sha1, sha1_safe
56 from rhodecode.lib.type_utils import aslist, str2bool, StrictAttributeDict, AttributeDict
56 from rhodecode.lib.type_utils import aslist, str2bool, StrictAttributeDict, AttributeDict
57
57
58
58
59 def __get_lem(extra_mapping=None):
59 def __get_lem(extra_mapping=None):
60 """
60 """
61 Get language extension map based on what's inside pygments lexers
61 Get language extension map based on what's inside pygments lexers
62 """
62 """
63 d = collections.defaultdict(lambda: [])
63 d = collections.defaultdict(lambda: [])
64
64
65 def __clean(s):
65 def __clean(s):
66 s = s.lstrip('*')
66 s = s.lstrip('*')
67 s = s.lstrip('.')
67 s = s.lstrip('.')
68
68
69 if s.find('[') != -1:
69 if s.find('[') != -1:
70 exts = []
70 exts = []
71 start, stop = s.find('['), s.find(']')
71 start, stop = s.find('['), s.find(']')
72
72
73 for suffix in s[start + 1:stop]:
73 for suffix in s[start + 1:stop]:
74 exts.append(s[:s.find('[')] + suffix)
74 exts.append(s[:s.find('[')] + suffix)
75 return [e.lower() for e in exts]
75 return [e.lower() for e in exts]
76 else:
76 else:
77 return [s.lower()]
77 return [s.lower()]
78
78
79 for lx, t in sorted(pygments.lexers.LEXERS.items()):
79 for lx, t in sorted(pygments.lexers.LEXERS.items()):
80 m = list(map(__clean, t[-2]))
80 m = list(map(__clean, t[-2]))
81 if m:
81 if m:
82 m = functools.reduce(lambda x, y: x + y, m)
82 m = functools.reduce(lambda x, y: x + y, m)
83 for ext in m:
83 for ext in m:
84 desc = lx.replace('Lexer', '')
84 desc = lx.replace('Lexer', '')
85 d[ext].append(desc)
85 d[ext].append(desc)
86
86
87 data = dict(d)
87 data = dict(d)
88
88
89 extra_mapping = extra_mapping or {}
89 extra_mapping = extra_mapping or {}
90 if extra_mapping:
90 if extra_mapping:
91 for k, v in list(extra_mapping.items()):
91 for k, v in list(extra_mapping.items()):
92 if k not in data:
92 if k not in data:
93 # register new mapping2lexer
93 # register new mapping2lexer
94 data[k] = [v]
94 data[k] = [v]
95
95
96 return data
96 return data
97
97
98
98
99 def convert_line_endings(line: str, mode) -> str:
99 def convert_line_endings(line: str, mode) -> str:
100 """
100 """
101 Converts a given line "line end" accordingly to given mode
101 Converts a given line "line end" accordingly to given mode
102
102
103 Available modes are::
103 Available modes are::
104 0 - Unix
104 0 - Unix
105 1 - Mac
105 1 - Mac
106 2 - DOS
106 2 - DOS
107
107
108 :param line: given line to convert
108 :param line: given line to convert
109 :param mode: mode to convert to
109 :param mode: mode to convert to
110 :return: converted line according to mode
110 :return: converted line according to mode
111 """
111 """
112 if mode == 0:
112 if mode == 0:
113 line = line.replace('\r\n', '\n')
113 line = line.replace('\r\n', '\n')
114 line = line.replace('\r', '\n')
114 line = line.replace('\r', '\n')
115 elif mode == 1:
115 elif mode == 1:
116 line = line.replace('\r\n', '\r')
116 line = line.replace('\r\n', '\r')
117 line = line.replace('\n', '\r')
117 line = line.replace('\n', '\r')
118 elif mode == 2:
118 elif mode == 2:
119 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
119 line = re.sub('\r(?!\n)|(?<!\r)\n', '\r\n', line)
120 return line
120 return line
121
121
122
122
123 def detect_mode(line: str, default) -> int:
123 def detect_mode(line: str, default) -> int:
124 """
124 """
125 Detects line break for given line, if line break couldn't be found
125 Detects line break for given line, if line break couldn't be found
126 given default value is returned
126 given default value is returned
127
127
128 :param line: str line
128 :param line: str line
129 :param default: default
129 :param default: default
130 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
130 :return: value of line end on of 0 - Unix, 1 - Mac, 2 - DOS
131 """
131 """
132 if line.endswith('\r\n'):
132 if line.endswith('\r\n'):
133 return 2
133 return 2
134 elif line.endswith('\n'):
134 elif line.endswith('\n'):
135 return 0
135 return 0
136 elif line.endswith('\r'):
136 elif line.endswith('\r'):
137 return 1
137 return 1
138 else:
138 else:
139 return default
139 return default
140
140
141
141
142 def remove_suffix(s, suffix):
142 def remove_suffix(s, suffix):
143 if s.endswith(suffix):
143 if s.endswith(suffix):
144 s = s[:-1 * len(suffix)]
144 s = s[:-1 * len(suffix)]
145 return s
145 return s
146
146
147
147
148 def remove_prefix(s, prefix):
148 def remove_prefix(s, prefix):
149 if s.startswith(prefix):
149 if s.startswith(prefix):
150 s = s[len(prefix):]
150 s = s[len(prefix):]
151 return s
151 return s
152
152
153
153
154 def find_calling_context(ignore_modules=None, depth=4, output_writer=None, indent=True):
154 def find_calling_context(ignore_modules=None, depth=4, output_writer=None, indent=True):
155 """
155 """
156 Look through the calling stack and return the frame which called
156 Look through the calling stack and return the frame which called
157 this function and is part of core module ( ie. rhodecode.* )
157 this function and is part of core module ( ie. rhodecode.* )
158
158
159 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
159 :param ignore_modules: list of modules to ignore eg. ['rhodecode.lib']
160 :param depth:
160 :param depth:
161 :param output_writer:
161 :param output_writer:
162 :param indent:
162 :param indent:
163
163
164 usage::
164 usage::
165
165
166 from rhodecode.lib.utils2 import find_calling_context
166 from rhodecode.lib.utils2 import find_calling_context
167
167
168 calling_context = find_calling_context(ignore_modules=[
168 calling_context = find_calling_context(ignore_modules=[
169 'rhodecode.lib.caching_query',
169 'rhodecode.lib.caching_query',
170 'rhodecode.model.settings',
170 'rhodecode.model.settings',
171 ])
171 ])
172
172
173 """
173 """
174 import inspect
174 import inspect
175 if not output_writer:
175 if not output_writer:
176 try:
176 try:
177 from rich import print as pprint
177 from rich import print as pprint
178 except ImportError:
178 except ImportError:
179 pprint = print
179 pprint = print
180 output_writer = pprint
180 output_writer = pprint
181
181
182 frame = inspect.currentframe()
182 frame = inspect.currentframe()
183 cc = []
183 cc = []
184 try:
184 try:
185 for i in range(depth): # current frame + 3 callers
185 for i in range(depth): # current frame + 3 callers
186 frame = frame.f_back
186 frame = frame.f_back
187 if not frame:
187 if not frame:
188 break
188 break
189
189
190 info = inspect.getframeinfo(frame)
190 info = inspect.getframeinfo(frame)
191 name = frame.f_globals.get('__name__')
191 name = frame.f_globals.get('__name__')
192 if name not in ignore_modules:
192 if name not in ignore_modules:
193 cc.insert(0, f'CALL_CONTEXT:{i}: file {info.filename}:{info.lineno} -> {info.function}')
193 cc.insert(0, f'CALL_CONTEXT:{i}: file {info.filename}:{info.lineno} -> {info.function}')
194 finally:
194 finally:
195 # Avoids a reference cycle
195 # Avoids a reference cycle
196 del frame
196 del frame
197
197
198 output_writer('* INFO: This code was called from: *')
198 output_writer('* INFO: This code was called from: *')
199 for cnt, frm_info in enumerate(cc):
199 for cnt, frm_info in enumerate(cc):
200 if not indent:
200 if not indent:
201 cnt = 1
201 cnt = 1
202 output_writer(' ' * cnt + frm_info)
202 output_writer(' ' * cnt + frm_info)
203
203
204
204
205 def ping_connection(connection, branch):
205 def ping_connection(connection, branch):
206 if branch:
206 if branch:
207 # "branch" refers to a sub-connection of a connection,
207 # "branch" refers to a sub-connection of a connection,
208 # we don't want to bother pinging on these.
208 # we don't want to bother pinging on these.
209 return
209 return
210
210
211 # turn off "close with result". This flag is only used with
211 # turn off "close with result". This flag is only used with
212 # "connectionless" execution, otherwise will be False in any case
212 # "connectionless" execution, otherwise will be False in any case
213 save_should_close_with_result = connection.should_close_with_result
213 save_should_close_with_result = connection.should_close_with_result
214 connection.should_close_with_result = False
214 connection.should_close_with_result = False
215
215
216 try:
216 try:
217 # run a SELECT 1. use a core select() so that
217 # run a SELECT 1. use a core select() so that
218 # the SELECT of a scalar value without a table is
218 # the SELECT of a scalar value without a table is
219 # appropriately formatted for the backend
219 # appropriately formatted for the backend
220 connection.scalar(sqlalchemy.sql.select([1]))
220 connection.scalar(sqlalchemy.sql.select([1]))
221 except sqlalchemy.exc.DBAPIError as err:
221 except sqlalchemy.exc.DBAPIError as err:
222 # catch SQLAlchemy's DBAPIError, which is a wrapper
222 # catch SQLAlchemy's DBAPIError, which is a wrapper
223 # for the DBAPI's exception. It includes a .connection_invalidated
223 # for the DBAPI's exception. It includes a .connection_invalidated
224 # attribute which specifies if this connection is a "disconnect"
224 # attribute which specifies if this connection is a "disconnect"
225 # condition, which is based on inspection of the original exception
225 # condition, which is based on inspection of the original exception
226 # by the dialect in use.
226 # by the dialect in use.
227 if err.connection_invalidated:
227 if err.connection_invalidated:
228 # run the same SELECT again - the connection will re-validate
228 # run the same SELECT again - the connection will re-validate
229 # itself and establish a new connection. The disconnect detection
229 # itself and establish a new connection. The disconnect detection
230 # here also causes the whole connection pool to be invalidated
230 # here also causes the whole connection pool to be invalidated
231 # so that all stale connections are discarded.
231 # so that all stale connections are discarded.
232 connection.scalar(sqlalchemy.sql.select([1]))
232 connection.scalar(sqlalchemy.sql.select([1]))
233 else:
233 else:
234 raise
234 raise
235 finally:
235 finally:
236 # restore "close with result"
236 # restore "close with result"
237 connection.should_close_with_result = save_should_close_with_result
237 connection.should_close_with_result = save_should_close_with_result
238
238
239
239
240 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
240 def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
241 """Custom engine_from_config functions."""
241 """Custom engine_from_config functions."""
242 log = logging.getLogger('sqlalchemy.engine')
242 log = logging.getLogger('sqlalchemy.engine')
243 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
243 use_ping_connection = asbool(configuration.pop('sqlalchemy.db1.ping_connection', None))
244 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
244 debug = asbool(configuration.pop('sqlalchemy.db1.debug_query', None))
245
245
246 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
246 engine = sqlalchemy.engine_from_config(configuration, prefix, **kwargs)
247
247
248 def color_sql(sql):
248 def color_sql(sql):
249 color_seq = '\033[1;33m' # This is yellow: code 33
249 color_seq = '\033[1;33m' # This is yellow: code 33
250 normal = '\x1b[0m'
250 normal = '\x1b[0m'
251 return ''.join([color_seq, sql, normal])
251 return ''.join([color_seq, sql, normal])
252
252
253 if use_ping_connection:
253 if use_ping_connection:
254 log.debug('Adding ping_connection on the engine config.')
254 log.debug('Adding ping_connection on the engine config.')
255 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
255 sqlalchemy.event.listen(engine, "engine_connect", ping_connection)
256
256
257 if debug:
257 if debug:
258 # attach events only for debug configuration
258 # attach events only for debug configuration
259 def before_cursor_execute(conn, cursor, statement,
259 def before_cursor_execute(conn, cursor, statement,
260 parameters, context, executemany):
260 parameters, context, executemany):
261 setattr(conn, 'query_start_time', time.time())
261 setattr(conn, 'query_start_time', time.time())
262 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
262 log.info(color_sql(">>>>> STARTING QUERY >>>>>"))
263 find_calling_context(ignore_modules=[
263 find_calling_context(ignore_modules=[
264 'rhodecode.lib.caching_query',
264 'rhodecode.lib.caching_query',
265 'rhodecode.model.settings',
265 'rhodecode.model.settings',
266 ], output_writer=log.info)
266 ], output_writer=log.info)
267
267
268 def after_cursor_execute(conn, cursor, statement,
268 def after_cursor_execute(conn, cursor, statement,
269 parameters, context, executemany):
269 parameters, context, executemany):
270 delattr(conn, 'query_start_time')
270 delattr(conn, 'query_start_time')
271
271
272 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
272 sqlalchemy.event.listen(engine, "before_cursor_execute", before_cursor_execute)
273 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
273 sqlalchemy.event.listen(engine, "after_cursor_execute", after_cursor_execute)
274
274
275 return engine
275 return engine
276
276
277
277
278 def get_encryption_key(config) -> bytes:
278 def get_encryption_key(config) -> bytes:
279 secret = config.get('rhodecode.encrypted_values.secret')
279 secret = config.get('rhodecode.encrypted_values.secret')
280 default = config['beaker.session.secret']
280 default = config['beaker.session.secret']
281 enc_key = secret or default
281 enc_key = secret or default
282
282
283 return safe_bytes(enc_key)
283 return safe_bytes(enc_key)
284
284
285
285
286 def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False):
286 def age(prevdate, now=None, show_short_version=False, show_suffix=True, short_format=False):
287 """
287 """
288 Turns a datetime into an age string.
288 Turns a datetime into an age string.
289 If show_short_version is True, this generates a shorter string with
289 If show_short_version is True, this generates a shorter string with
290 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
290 an approximate age; ex. '1 day ago', rather than '1 day and 23 hours ago'.
291
291
292 * IMPORTANT*
292 * IMPORTANT*
293 Code of this function is written in special way so it's easier to
293 Code of this function is written in special way so it's easier to
294 backport it to javascript. If you mean to update it, please also update
294 backport it to javascript. If you mean to update it, please also update
295 `jquery.timeago-extension.js` file
295 `jquery.timeago-extension.js` file
296
296
297 :param prevdate: datetime object
297 :param prevdate: datetime object
298 :param now: get current time, if not define we use
298 :param now: get current time, if not define we use
299 `datetime.datetime.now()`
299 `datetime.datetime.now()`
300 :param show_short_version: if it should approximate the date and
300 :param show_short_version: if it should approximate the date and
301 return a shorter string
301 return a shorter string
302 :param show_suffix:
302 :param show_suffix:
303 :param short_format: show short format, eg 2D instead of 2 days
303 :param short_format: show short format, eg 2D instead of 2 days
304 :rtype: unicode
304 :rtype: unicode
305 :returns: unicode words describing age
305 :returns: unicode words describing age
306 """
306 """
307
307
308 def _get_relative_delta(now, prevdate):
308 def _get_relative_delta(now, prevdate):
309 base = dateutil.relativedelta.relativedelta(now, prevdate)
309 base = dateutil.relativedelta.relativedelta(now, prevdate)
310 return {
310 return {
311 'year': base.years,
311 'year': base.years,
312 'month': base.months,
312 'month': base.months,
313 'day': base.days,
313 'day': base.days,
314 'hour': base.hours,
314 'hour': base.hours,
315 'minute': base.minutes,
315 'minute': base.minutes,
316 'second': base.seconds,
316 'second': base.seconds,
317 }
317 }
318
318
319 def _is_leap_year(year):
319 def _is_leap_year(year):
320 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
320 return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
321
321
322 def get_month(prevdate):
322 def get_month(prevdate):
323 return prevdate.month
323 return prevdate.month
324
324
325 def get_year(prevdate):
325 def get_year(prevdate):
326 return prevdate.year
326 return prevdate.year
327
327
328 now = now or datetime.datetime.now()
328 now = now or datetime.datetime.now()
329 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
329 order = ['year', 'month', 'day', 'hour', 'minute', 'second']
330 deltas = {}
330 deltas = {}
331 future = False
331 future = False
332
332
333 if prevdate > now:
333 if prevdate > now:
334 now_old = now
334 now_old = now
335 now = prevdate
335 now = prevdate
336 prevdate = now_old
336 prevdate = now_old
337 future = True
337 future = True
338 if future:
338 if future:
339 prevdate = prevdate.replace(microsecond=0)
339 prevdate = prevdate.replace(microsecond=0)
340 # Get date parts deltas
340 # Get date parts deltas
341 for part in order:
341 for part in order:
342 rel_delta = _get_relative_delta(now, prevdate)
342 rel_delta = _get_relative_delta(now, prevdate)
343 deltas[part] = rel_delta[part]
343 deltas[part] = rel_delta[part]
344
344
345 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
345 # Fix negative offsets (there is 1 second between 10:59:59 and 11:00:00,
346 # not 1 hour, -59 minutes and -59 seconds)
346 # not 1 hour, -59 minutes and -59 seconds)
347 offsets = [[5, 60], [4, 60], [3, 24]]
347 offsets = [[5, 60], [4, 60], [3, 24]]
348 for element in offsets: # seconds, minutes, hours
348 for element in offsets: # seconds, minutes, hours
349 num = element[0]
349 num = element[0]
350 length = element[1]
350 length = element[1]
351
351
352 part = order[num]
352 part = order[num]
353 carry_part = order[num - 1]
353 carry_part = order[num - 1]
354
354
355 if deltas[part] < 0:
355 if deltas[part] < 0:
356 deltas[part] += length
356 deltas[part] += length
357 deltas[carry_part] -= 1
357 deltas[carry_part] -= 1
358
358
359 # Same thing for days except that the increment depends on the (variable)
359 # Same thing for days except that the increment depends on the (variable)
360 # number of days in the month
360 # number of days in the month
361 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
361 month_lengths = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
362 if deltas['day'] < 0:
362 if deltas['day'] < 0:
363 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
363 if get_month(prevdate) == 2 and _is_leap_year(get_year(prevdate)):
364 deltas['day'] += 29
364 deltas['day'] += 29
365 else:
365 else:
366 deltas['day'] += month_lengths[get_month(prevdate) - 1]
366 deltas['day'] += month_lengths[get_month(prevdate) - 1]
367
367
368 deltas['month'] -= 1
368 deltas['month'] -= 1
369
369
370 if deltas['month'] < 0:
370 if deltas['month'] < 0:
371 deltas['month'] += 12
371 deltas['month'] += 12
372 deltas['year'] -= 1
372 deltas['year'] -= 1
373
373
374 # Format the result
374 # Format the result
375 if short_format:
375 if short_format:
376 fmt_funcs = {
376 fmt_funcs = {
377 'year': lambda d: '%dy' % d,
377 'year': lambda d: '%dy' % d,
378 'month': lambda d: '%dm' % d,
378 'month': lambda d: '%dm' % d,
379 'day': lambda d: '%dd' % d,
379 'day': lambda d: '%dd' % d,
380 'hour': lambda d: '%dh' % d,
380 'hour': lambda d: '%dh' % d,
381 'minute': lambda d: '%dmin' % d,
381 'minute': lambda d: '%dmin' % d,
382 'second': lambda d: '%dsec' % d,
382 'second': lambda d: '%dsec' % d,
383 }
383 }
384 else:
384 else:
385 fmt_funcs = {
385 fmt_funcs = {
386 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(),
386 'year': lambda d: _pluralize('${num} year', '${num} years', d, mapping={'num': d}).interpolate(),
387 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(),
387 'month': lambda d: _pluralize('${num} month', '${num} months', d, mapping={'num': d}).interpolate(),
388 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(),
388 'day': lambda d: _pluralize('${num} day', '${num} days', d, mapping={'num': d}).interpolate(),
389 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(),
389 'hour': lambda d: _pluralize('${num} hour', '${num} hours', d, mapping={'num': d}).interpolate(),
390 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(),
390 'minute': lambda d: _pluralize('${num} minute', '${num} minutes', d, mapping={'num': d}).interpolate(),
391 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(),
391 'second': lambda d: _pluralize('${num} second', '${num} seconds', d, mapping={'num': d}).interpolate(),
392 }
392 }
393
393
394 i = 0
394 i = 0
395 for part in order:
395 for part in order:
396 value = deltas[part]
396 value = deltas[part]
397 if value != 0:
397 if value != 0:
398
398
399 if i < 5:
399 if i < 5:
400 sub_part = order[i + 1]
400 sub_part = order[i + 1]
401 sub_value = deltas[sub_part]
401 sub_value = deltas[sub_part]
402 else:
402 else:
403 sub_value = 0
403 sub_value = 0
404
404
405 if sub_value == 0 or show_short_version:
405 if sub_value == 0 or show_short_version:
406 _val = fmt_funcs[part](value)
406 _val = fmt_funcs[part](value)
407 if future:
407 if future:
408 if show_suffix:
408 if show_suffix:
409 return _('in ${ago}', mapping={'ago': _val})
409 return _('in ${ago}', mapping={'ago': _val})
410 else:
410 else:
411 return _(_val)
411 return _(_val)
412
412
413 else:
413 else:
414 if show_suffix:
414 if show_suffix:
415 return _('${ago} ago', mapping={'ago': _val})
415 return _('${ago} ago', mapping={'ago': _val})
416 else:
416 else:
417 return _(_val)
417 return _(_val)
418
418
419 val = fmt_funcs[part](value)
419 val = fmt_funcs[part](value)
420 val_detail = fmt_funcs[sub_part](sub_value)
420 val_detail = fmt_funcs[sub_part](sub_value)
421 mapping = {'val': val, 'detail': val_detail}
421 mapping = {'val': val, 'detail': val_detail}
422
422
423 if short_format:
423 if short_format:
424 datetime_tmpl = _('${val}, ${detail}', mapping=mapping)
424 datetime_tmpl = _('${val}, ${detail}', mapping=mapping)
425 if show_suffix:
425 if show_suffix:
426 datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping)
426 datetime_tmpl = _('${val}, ${detail} ago', mapping=mapping)
427 if future:
427 if future:
428 datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping)
428 datetime_tmpl = _('in ${val}, ${detail}', mapping=mapping)
429 else:
429 else:
430 datetime_tmpl = _('${val} and ${detail}', mapping=mapping)
430 datetime_tmpl = _('${val} and ${detail}', mapping=mapping)
431 if show_suffix:
431 if show_suffix:
432 datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping)
432 datetime_tmpl = _('${val} and ${detail} ago', mapping=mapping)
433 if future:
433 if future:
434 datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping)
434 datetime_tmpl = _('in ${val} and ${detail}', mapping=mapping)
435
435
436 return datetime_tmpl
436 return datetime_tmpl
437 i += 1
437 i += 1
438 return _('just now')
438 return _('just now')
439
439
440
440
441 def age_from_seconds(seconds):
441 def age_from_seconds(seconds):
442 seconds = safe_int(seconds) or 0
442 seconds = safe_int(seconds) or 0
443 prevdate = time_to_datetime(time.time() + seconds)
443 prevdate = time_to_datetime(time.time() + seconds)
444 return age(prevdate, show_suffix=False, show_short_version=True)
444 return age(prevdate, show_suffix=False, show_short_version=True)
445
445
446
446
447 def cleaned_uri(uri):
447 def cleaned_uri(uri):
448 """
448 """
449 Quotes '[' and ']' from uri if there is only one of them.
449 Quotes '[' and ']' from uri if there is only one of them.
450 according to RFC3986 we cannot use such chars in uri
450 according to RFC3986 we cannot use such chars in uri
451 :param uri:
451 :param uri:
452 :return: uri without this chars
452 :return: uri without this chars
453 """
453 """
454 return urllib.parse.quote(uri, safe='@$:/')
454 return urllib.parse.quote(uri, safe='@$:/')
455
455
456
456
457 def credentials_filter(uri):
457 def credentials_filter(uri):
458 """
458 """
459 Returns a url with removed credentials
459 Returns a url with removed credentials
460
460
461 :param uri:
461 :param uri:
462 """
462 """
463 import urlobject
463 import urlobject
464 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
464 if isinstance(uri, rhodecode.lib.encrypt.InvalidDecryptedValue):
465 return 'InvalidDecryptionKey'
465 return 'InvalidDecryptionKey'
466
466
467 url_obj = urlobject.URLObject(cleaned_uri(uri))
467 url_obj = urlobject.URLObject(cleaned_uri(uri))
468 url_obj = url_obj.without_password().without_username()
468 url_obj = url_obj.without_password().without_username()
469
469
470 return url_obj
470 return url_obj
471
471
472
472
473 def get_host_info(request):
473 def get_host_info(request):
474 """
474 """
475 Generate host info, to obtain full url e.g https://server.com
475 Generate host info, to obtain full url e.g https://server.com
476 use this
476 use this
477 `{scheme}://{netloc}`
477 `{scheme}://{netloc}`
478 """
478 """
479 if not request:
479 if not request:
480 return {}
480 return {}
481
481
482 qualified_home_url = request.route_url('home')
482 qualified_home_url = request.route_url('home')
483 parsed_url = urlobject.URLObject(qualified_home_url)
483 parsed_url = urlobject.URLObject(qualified_home_url)
484 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
484 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
485
485
486 return {
486 return {
487 'scheme': parsed_url.scheme,
487 'scheme': parsed_url.scheme,
488 'netloc': parsed_url.netloc+decoded_path,
488 'netloc': parsed_url.netloc+decoded_path,
489 'hostname': parsed_url.hostname,
489 'hostname': parsed_url.hostname,
490 }
490 }
491
491
492
492
493 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
493 def get_clone_url(request, uri_tmpl, repo_name, repo_id, repo_type, **override):
494 qualified_home_url = request.route_url('home')
494 qualified_home_url = request.route_url('home')
495 parsed_url = urlobject.URLObject(qualified_home_url)
495 parsed_url = urlobject.URLObject(qualified_home_url)
496 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
496 decoded_path = safe_str(urllib.parse.unquote(parsed_url.path.rstrip('/')))
497
497
498 args = {
498 args = {
499 'scheme': parsed_url.scheme,
499 'scheme': parsed_url.scheme,
500 'user': '',
500 'user': '',
501 'sys_user': getpass.getuser(),
501 'sys_user': getpass.getuser(),
502 # path if we use proxy-prefix
502 # path if we use proxy-prefix
503 'netloc': parsed_url.netloc+decoded_path,
503 'netloc': parsed_url.netloc+decoded_path,
504 'hostname': parsed_url.hostname,
504 'hostname': parsed_url.hostname,
505 'prefix': decoded_path,
505 'prefix': decoded_path,
506 'repo': repo_name,
506 'repo': repo_name,
507 'repoid': str(repo_id),
507 'repoid': str(repo_id),
508 'repo_type': repo_type
508 'repo_type': repo_type
509 }
509 }
510 args.update(override)
510 args.update(override)
511 args['user'] = urllib.parse.quote(safe_str(args['user']))
511 args['user'] = urllib.parse.quote(safe_str(args['user']))
512
512
513 for k, v in list(args.items()):
513 for k, v in list(args.items()):
514 tmpl_key = '{%s}' % k
514 tmpl_key = '{%s}' % k
515 uri_tmpl = uri_tmpl.replace(tmpl_key, v)
515 uri_tmpl = uri_tmpl.replace(tmpl_key, v)
516
516
517 # special case for SVN clone url
517 # special case for SVN clone url
518 if repo_type == 'svn':
518 if repo_type == 'svn':
519 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
519 uri_tmpl = uri_tmpl.replace('ssh://', 'svn+ssh://')
520
520
521 # remove leading @ sign if it's present. Case of empty user
521 # remove leading @ sign if it's present. Case of empty user
522 url_obj = urlobject.URLObject(uri_tmpl)
522 url_obj = urlobject.URLObject(uri_tmpl)
523 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
523 url = url_obj.with_netloc(url_obj.netloc.lstrip('@'))
524
524
525 return safe_str(url)
525 return safe_str(url)
526
526
527
527
528 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
528 def get_commit_safe(repo, commit_id=None, commit_idx=None, pre_load=None,
529 maybe_unreachable=False, reference_obj=None):
529 maybe_unreachable=False, reference_obj=None):
530 """
530 """
531 Safe version of get_commit if this commit doesn't exists for a
531 Safe version of get_commit if this commit doesn't exists for a
532 repository it returns a Dummy one instead
532 repository it returns a Dummy one instead
533
533
534 :param repo: repository instance
534 :param repo: repository instance
535 :param commit_id: commit id as str
535 :param commit_id: commit id as str
536 :param commit_idx: numeric commit index
536 :param commit_idx: numeric commit index
537 :param pre_load: optional list of commit attributes to load
537 :param pre_load: optional list of commit attributes to load
538 :param maybe_unreachable: translate unreachable commits on git repos
538 :param maybe_unreachable: translate unreachable commits on git repos
539 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
539 :param reference_obj: explicitly search via a reference obj in git. E.g "branch:123" would mean branch "123"
540 """
540 """
541 # TODO(skreft): remove these circular imports
541 # TODO(skreft): remove these circular imports
542 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
542 from rhodecode.lib.vcs.backends.base import BaseRepository, EmptyCommit
543 from rhodecode.lib.vcs.exceptions import RepositoryError
543 from rhodecode.lib.vcs.exceptions import RepositoryError
544 if not isinstance(repo, BaseRepository):
544 if not isinstance(repo, BaseRepository):
545 raise Exception('You must pass an Repository '
545 raise Exception('You must pass an Repository '
546 'object as first argument got %s', type(repo))
546 'object as first argument got %s', type(repo))
547
547
548 try:
548 try:
549 commit = repo.get_commit(
549 commit = repo.get_commit(
550 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
550 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load,
551 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
551 maybe_unreachable=maybe_unreachable, reference_obj=reference_obj)
552 except (RepositoryError, LookupError):
552 except (RepositoryError, LookupError):
553 commit = EmptyCommit()
553 commit = EmptyCommit()
554 return commit
554 return commit
555
555
556
556
557 def datetime_to_time(dt):
557 def datetime_to_time(dt):
558 if dt:
558 if dt:
559 return time.mktime(dt.timetuple())
559 return time.mktime(dt.timetuple())
560
560
561
561
562 def time_to_datetime(tm):
562 def time_to_datetime(tm):
563 if tm:
563 if tm:
564 if isinstance(tm, str):
564 if isinstance(tm, str):
565 try:
565 try:
566 tm = float(tm)
566 tm = float(tm)
567 except ValueError:
567 except ValueError:
568 return
568 return
569 return datetime.datetime.fromtimestamp(tm)
569 return datetime.datetime.fromtimestamp(tm)
570
570
571
571
572 def time_to_utcdatetime(tm):
572 def time_to_utcdatetime(tm):
573 if tm:
573 if tm:
574 if isinstance(tm, str):
574 if isinstance(tm, str):
575 try:
575 try:
576 tm = float(tm)
576 tm = float(tm)
577 except ValueError:
577 except ValueError:
578 return
578 return
579 return datetime.datetime.utcfromtimestamp(tm)
579 return datetime.datetime.utcfromtimestamp(tm)
580
580
581
581
582 MENTIONS_REGEX = re.compile(
582 MENTIONS_REGEX = re.compile(
583 # ^@ or @ without any special chars in front
583 # ^@ or @ without any special chars in front
584 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
584 r'(?:^@|[^a-zA-Z0-9\-\_\.]@)'
585 # main body starts with letter, then can be . - _
585 # main body starts with letter, then can be . - _
586 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
586 r'([a-zA-Z0-9]{1}[a-zA-Z0-9\-\_\.]+)',
587 re.VERBOSE | re.MULTILINE)
587 re.VERBOSE | re.MULTILINE)
588
588
589
589
590 def extract_mentioned_users(s):
590 def extract_mentioned_users(s):
591 """
591 """
592 Returns unique usernames from given string s that have @mention
592 Returns unique usernames from given string s that have @mention
593
593
594 :param s: string to get mentions
594 :param s: string to get mentions
595 """
595 """
596 usrs = set()
596 usrs = set()
597 for username in MENTIONS_REGEX.findall(s):
597 for username in MENTIONS_REGEX.findall(s):
598 usrs.add(username)
598 usrs.add(username)
599
599
600 return sorted(list(usrs), key=lambda k: k.lower())
600 return sorted(list(usrs), key=lambda k: k.lower())
601
601
602
602
603 def fix_PATH(os_=None):
603 def fix_PATH(os_=None):
604 """
604 """
605 Get current active python path, and append it to PATH variable to fix
605 Get current active python path, and append it to PATH variable to fix
606 issues of subprocess calls and different python versions
606 issues of subprocess calls and different python versions
607 """
607 """
608 if os_ is None:
608 if os_ is None:
609 import os
609 import os
610 else:
610 else:
611 os = os_
611 os = os_
612
612
613 cur_path = os.path.split(sys.executable)[0]
613 cur_path = os.path.split(sys.executable)[0]
614 os_path = os.environ['PATH']
614 os_path = os.environ['PATH']
615 if not os.environ['PATH'].startswith(cur_path):
615 if not os.environ['PATH'].startswith(cur_path):
616 os.environ['PATH'] = f'{cur_path}:{os_path}'
616 os.environ['PATH'] = f'{cur_path}:{os_path}'
617
617
618
618
619 def obfuscate_url_pw(engine):
619 def obfuscate_url_pw(engine):
620 _url = engine or ''
620 _url = engine or ''
621 try:
621 try:
622 _url = sqlalchemy.engine.url.make_url(engine)
622 _url = sqlalchemy.engine.url.make_url(engine)
623 except Exception:
623 except Exception:
624 pass
624 pass
625 return repr(_url)
625 return repr(_url)
626
626
627
627
628 def get_server_url(environ):
628 def get_server_url(environ):
629 req = webob.Request(environ)
629 req = webob.Request(environ)
630 return req.host_url + req.script_name
630 return req.host_url + req.script_name
631
631
632
632
633 def unique_id(hexlen=32):
633 def unique_id(hexlen=32):
634 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
634 alphabet = "23456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghjklmnpqrstuvwxyz"
635 return suuid(truncate_to=hexlen, alphabet=alphabet)
635 return suuid(truncate_to=hexlen, alphabet=alphabet)
636
636
637
637
638 def suuid(url=None, truncate_to=22, alphabet=None):
638 def suuid(url=None, truncate_to=22, alphabet=None):
639 """
639 """
640 Generate and return a short URL safe UUID.
640 Generate and return a short URL safe UUID.
641
641
642 If the url parameter is provided, set the namespace to the provided
642 If the url parameter is provided, set the namespace to the provided
643 URL and generate a UUID.
643 URL and generate a UUID.
644
644
645 :param url to get the uuid for
645 :param url to get the uuid for
646 :truncate_to: truncate the basic 22 UUID to shorter version
646 :truncate_to: truncate the basic 22 UUID to shorter version
647
647
648 The IDs won't be universally unique any longer, but the probability of
648 The IDs won't be universally unique any longer, but the probability of
649 a collision will still be very low.
649 a collision will still be very low.
650 """
650 """
651 # Define our alphabet.
651 # Define our alphabet.
652 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
652 _ALPHABET = alphabet or "23456789ABCDEFGHJKLMNPQRSTUVWXYZ"
653
653
654 # If no URL is given, generate a random UUID.
654 # If no URL is given, generate a random UUID.
655 if url is None:
655 if url is None:
656 unique_id = uuid.uuid4().int
656 unique_id = uuid.uuid4().int
657 else:
657 else:
658 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
658 unique_id = uuid.uuid3(uuid.NAMESPACE_URL, url).int
659
659
660 alphabet_length = len(_ALPHABET)
660 alphabet_length = len(_ALPHABET)
661 output = []
661 output = []
662 while unique_id > 0:
662 while unique_id > 0:
663 digit = unique_id % alphabet_length
663 digit = unique_id % alphabet_length
664 output.append(_ALPHABET[digit])
664 output.append(_ALPHABET[digit])
665 unique_id = int(unique_id / alphabet_length)
665 unique_id = int(unique_id / alphabet_length)
666 return "".join(output)[:truncate_to]
666 return "".join(output)[:truncate_to]
667
667
668
668
669 def get_current_rhodecode_user(request=None):
669 def get_current_rhodecode_user(request=None):
670 """
670 """
671 Gets rhodecode user from request
671 Gets rhodecode user from request
672 """
672 """
673 import pyramid.threadlocal
673 import pyramid.threadlocal
674 pyramid_request = request or pyramid.threadlocal.get_current_request()
674 pyramid_request = request or pyramid.threadlocal.get_current_request()
675
675
676 # web case
676 # web case
677 if pyramid_request and hasattr(pyramid_request, 'user'):
677 if pyramid_request and hasattr(pyramid_request, 'user'):
678 return pyramid_request.user
678 return pyramid_request.user
679
679
680 # api case
680 # api case
681 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
681 if pyramid_request and hasattr(pyramid_request, 'rpc_user'):
682 return pyramid_request.rpc_user
682 return pyramid_request.rpc_user
683
683
684 return None
684 return None
685
685
686
686
687 def action_logger_generic(action, namespace=''):
687 def action_logger_generic(action, namespace=''):
688 """
688 """
689 A generic logger for actions useful to the system overview, tries to find
689 A generic logger for actions useful to the system overview, tries to find
690 an acting user for the context of the call otherwise reports unknown user
690 an acting user for the context of the call otherwise reports unknown user
691
691
692 :param action: logging message eg 'comment 5 deleted'
692 :param action: logging message eg 'comment 5 deleted'
693 :param type: string
693 :param type: string
694
694
695 :param namespace: namespace of the logging message eg. 'repo.comments'
695 :param namespace: namespace of the logging message eg. 'repo.comments'
696 :param type: string
696 :param type: string
697
697
698 """
698 """
699
699
700 logger_name = 'rhodecode.actions'
700 logger_name = 'rhodecode.actions'
701
701
702 if namespace:
702 if namespace:
703 logger_name += '.' + namespace
703 logger_name += '.' + namespace
704
704
705 log = logging.getLogger(logger_name)
705 log = logging.getLogger(logger_name)
706
706
707 # get a user if we can
707 # get a user if we can
708 user = get_current_rhodecode_user()
708 user = get_current_rhodecode_user()
709
709
710 logfunc = log.info
710 logfunc = log.info
711
711
712 if not user:
712 if not user:
713 user = '<unknown user>'
713 user = '<unknown user>'
714 logfunc = log.warning
714 logfunc = log.warning
715
715
716 logfunc(f'Logging action by {user}: {action}')
716 logfunc(f'Logging action by {user}: {action}')
717
717
718
718
719 def escape_split(text, sep=',', maxsplit=-1):
719 def escape_split(text, sep=',', maxsplit=-1):
720 r"""
720 r"""
721 Allows for escaping of the separator: e.g. arg='foo\, bar'
721 Allows for escaping of the separator: e.g. arg='foo\, bar'
722
722
723 It should be noted that the way bash et. al. do command line parsing, those
723 It should be noted that the way bash et. al. do command line parsing, those
724 single quotes are required.
724 single quotes are required.
725 """
725 """
726 escaped_sep = r'\%s' % sep
726 escaped_sep = r'\%s' % sep
727
727
728 if escaped_sep not in text:
728 if escaped_sep not in text:
729 return text.split(sep, maxsplit)
729 return text.split(sep, maxsplit)
730
730
731 before, _mid, after = text.partition(escaped_sep)
731 before, _mid, after = text.partition(escaped_sep)
732 startlist = before.split(sep, maxsplit) # a regular split is fine here
732 startlist = before.split(sep, maxsplit) # a regular split is fine here
733 unfinished = startlist[-1]
733 unfinished = startlist[-1]
734 startlist = startlist[:-1]
734 startlist = startlist[:-1]
735
735
736 # recurse because there may be more escaped separators
736 # recurse because there may be more escaped separators
737 endlist = escape_split(after, sep, maxsplit)
737 endlist = escape_split(after, sep, maxsplit)
738
738
739 # finish building the escaped value. we use endlist[0] becaue the first
739 # finish building the escaped value. we use endlist[0] becaue the first
740 # part of the string sent in recursion is the rest of the escaped value.
740 # part of the string sent in recursion is the rest of the escaped value.
741 unfinished += sep + endlist[0]
741 unfinished += sep + endlist[0]
742
742
743 return startlist + [unfinished] + endlist[1:] # put together all the parts
743 return startlist + [unfinished] + endlist[1:] # put together all the parts
744
744
745
745
746 class OptionalAttr(object):
746 class OptionalAttr(object):
747 """
747 """
748 Special Optional Option that defines other attribute. Example::
748 Special Optional Option that defines other attribute. Example::
749
749
750 def test(apiuser, userid=Optional(OAttr('apiuser')):
750 def test(apiuser, userid=Optional(OAttr('apiuser')):
751 user = Optional.extract(userid)
751 user = Optional.extract(userid)
752 # calls
752 # calls
753
753
754 """
754 """
755
755
756 def __init__(self, attr_name):
756 def __init__(self, attr_name):
757 self.attr_name = attr_name
757 self.attr_name = attr_name
758
758
759 def __repr__(self):
759 def __repr__(self):
760 return '<OptionalAttr:%s>' % self.attr_name
760 return '<OptionalAttr:%s>' % self.attr_name
761
761
762 def __call__(self):
762 def __call__(self):
763 return self
763 return self
764
764
765
765
766 # alias
766 # alias
767 OAttr = OptionalAttr
767 OAttr = OptionalAttr
768
768
769
769
770 class Optional(object):
770 class Optional(object):
771 """
771 """
772 Defines an optional parameter::
772 Defines an optional parameter::
773
773
774 param = param.getval() if isinstance(param, Optional) else param
774 param = param.getval() if isinstance(param, Optional) else param
775 param = param() if isinstance(param, Optional) else param
775 param = param() if isinstance(param, Optional) else param
776
776
777 is equivalent of::
777 is equivalent of::
778
778
779 param = Optional.extract(param)
779 param = Optional.extract(param)
780
780
781 """
781 """
782
782
783 def __init__(self, type_):
783 def __init__(self, type_):
784 self.type_ = type_
784 self.type_ = type_
785
785
786 def __repr__(self):
786 def __repr__(self):
787 return '<Optional:%s>' % self.type_.__repr__()
787 return '<Optional:%s>' % self.type_.__repr__()
788
788
789 def __call__(self):
789 def __call__(self):
790 return self.getval()
790 return self.getval()
791
791
792 def getval(self):
792 def getval(self):
793 """
793 """
794 returns value from this Optional instance
794 returns value from this Optional instance
795 """
795 """
796 if isinstance(self.type_, OAttr):
796 if isinstance(self.type_, OAttr):
797 # use params name
797 # use params name
798 return self.type_.attr_name
798 return self.type_.attr_name
799 return self.type_
799 return self.type_
800
800
801 @classmethod
801 @classmethod
802 def extract(cls, val):
802 def extract(cls, val):
803 """
803 """
804 Extracts value from Optional() instance
804 Extracts value from Optional() instance
805
805
806 :param val:
806 :param val:
807 :return: original value if it's not Optional instance else
807 :return: original value if it's not Optional instance else
808 value of instance
808 value of instance
809 """
809 """
810 if isinstance(val, cls):
810 if isinstance(val, cls):
811 return val.getval()
811 return val.getval()
812 return val
812 return val
813
813
814
814
815 def glob2re(pat):
815 def glob2re(pat):
816 import fnmatch
816 import fnmatch
817 return fnmatch.translate(pat)
817 return fnmatch.translate(pat)
818
818
819
819
820 def parse_byte_string(size_str):
820 def parse_byte_string(size_str):
821 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
821 match = re.match(r'(\d+)(MB|KB)', size_str, re.IGNORECASE)
822 if not match:
822 if not match:
823 raise ValueError(f'Given size:{size_str} is invalid, please make sure '
823 raise ValueError(f'Given size:{size_str} is invalid, please make sure '
824 f'to use format of <num>(MB|KB)')
824 f'to use format of <num>(MB|KB)')
825
825
826 _parts = match.groups()
826 _parts = match.groups()
827 num, type_ = _parts
827 num, type_ = _parts
828 return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
828 return int(num) * {'mb': 1024*1024, 'kb': 1024}[type_.lower()]
829
829
830
830
831 class CachedProperty(object):
831 class CachedProperty(object):
832 """
832 """
833 Lazy Attributes. With option to invalidate the cache by running a method
833 Lazy Attributes. With option to invalidate the cache by running a method
834
834
835 >>> class Foo(object):
835 >>> class Foo(object):
836 ...
836 ...
837 ... @CachedProperty
837 ... @CachedProperty
838 ... def heavy_func(self):
838 ... def heavy_func(self):
839 ... return 'super-calculation'
839 ... return 'super-calculation'
840 ...
840 ...
841 ... foo = Foo()
841 ... foo = Foo()
842 ... foo.heavy_func() # first computation
842 ... foo.heavy_func() # first computation
843 ... foo.heavy_func() # fetch from cache
843 ... foo.heavy_func() # fetch from cache
844 ... foo._invalidate_prop_cache('heavy_func')
844 ... foo._invalidate_prop_cache('heavy_func')
845
845
846 # at this point calling foo.heavy_func() will be re-computed
846 # at this point calling foo.heavy_func() will be re-computed
847 """
847 """
848
848
849 def __init__(self, func, func_name=None):
849 def __init__(self, func, func_name=None):
850
850
851 if func_name is None:
851 if func_name is None:
852 func_name = func.__name__
852 func_name = func.__name__
853 self.data = (func, func_name)
853 self.data = (func, func_name)
854 functools.update_wrapper(self, func)
854 functools.update_wrapper(self, func)
855
855
856 def __get__(self, inst, class_):
856 def __get__(self, inst, class_):
857 if inst is None:
857 if inst is None:
858 return self
858 return self
859
859
860 func, func_name = self.data
860 func, func_name = self.data
861 value = func(inst)
861 value = func(inst)
862 inst.__dict__[func_name] = value
862 inst.__dict__[func_name] = value
863 if '_invalidate_prop_cache' not in inst.__dict__:
863 if '_invalidate_prop_cache' not in inst.__dict__:
864 inst.__dict__['_invalidate_prop_cache'] = functools.partial(
864 inst.__dict__['_invalidate_prop_cache'] = functools.partial(
865 self._invalidate_prop_cache, inst)
865 self._invalidate_prop_cache, inst)
866 return value
866 return value
867
867
868 def _invalidate_prop_cache(self, inst, name):
868 def _invalidate_prop_cache(self, inst, name):
869 inst.__dict__.pop(name, None)
869 inst.__dict__.pop(name, None)
870
870
871
871
872 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
872 def retry(func=None, exception=Exception, n_tries=5, delay=5, backoff=1, logger=True):
873 """
873 """
874 Retry decorator with exponential backoff.
874 Retry decorator with exponential backoff.
875
875
876 Parameters
876 Parameters
877 ----------
877 ----------
878 func : typing.Callable, optional
878 func : typing.Callable, optional
879 Callable on which the decorator is applied, by default None
879 Callable on which the decorator is applied, by default None
880 exception : Exception or tuple of Exceptions, optional
880 exception : Exception or tuple of Exceptions, optional
881 Exception(s) that invoke retry, by default Exception
881 Exception(s) that invoke retry, by default Exception
882 n_tries : int, optional
882 n_tries : int, optional
883 Number of tries before giving up, by default 5
883 Number of tries before giving up, by default 5
884 delay : int, optional
884 delay : int, optional
885 Initial delay between retries in seconds, by default 5
885 Initial delay between retries in seconds, by default 5
886 backoff : int, optional
886 backoff : int, optional
887 Backoff multiplier e.g. value of 2 will double the delay, by default 1
887 Backoff multiplier e.g. value of 2 will double the delay, by default 1
888 logger : bool, optional
888 logger : bool, optional
889 Option to log or print, by default False
889 Option to log or print, by default False
890
890
891 Returns
891 Returns
892 -------
892 -------
893 typing.Callable
893 typing.Callable
894 Decorated callable that calls itself when exception(s) occur.
894 Decorated callable that calls itself when exception(s) occur.
895
895
896 Examples
896 Examples
897 --------
897 --------
898 >>> import random
898 >>> import random
899 >>> @retry(exception=Exception, n_tries=3)
899 >>> @retry(exception=Exception, n_tries=3)
900 ... def test_random(text):
900 ... def test_random(text):
901 ... x = random.random()
901 ... x = random.random()
902 ... if x < 0.5:
902 ... if x < 0.5:
903 ... raise Exception("Fail")
903 ... raise Exception("Fail")
904 ... else:
904 ... else:
905 ... print("Success: ", text)
905 ... print("Success: ", text)
906 >>> test_random("It works!")
906 >>> test_random("It works!")
907 """
907 """
908
908
909 if func is None:
909 if func is None:
910 return functools.partial(
910 return functools.partial(
911 retry,
911 retry,
912 exception=exception,
912 exception=exception,
913 n_tries=n_tries,
913 n_tries=n_tries,
914 delay=delay,
914 delay=delay,
915 backoff=backoff,
915 backoff=backoff,
916 logger=logger,
916 logger=logger,
917 )
917 )
918
918
919 @functools.wraps(func)
919 @functools.wraps(func)
920 def wrapper(*args, **kwargs):
920 def wrapper(*args, **kwargs):
921 _n_tries, n_delay = n_tries, delay
921 _n_tries, n_delay = n_tries, delay
922 log = logging.getLogger('rhodecode.retry')
922 log = logging.getLogger('rhodecode.retry')
923
923
924 while _n_tries > 1:
924 while _n_tries > 1:
925 try:
925 try:
926 return func(*args, **kwargs)
926 return func(*args, **kwargs)
927 except exception as e:
927 except exception as e:
928 e_details = repr(e)
928 e_details = repr(e)
929 msg = "Exception on calling func {func}: {e}, " \
929 msg = "Exception on calling func {func}: {e}, " \
930 "Retrying in {n_delay} seconds..."\
930 "Retrying in {n_delay} seconds..."\
931 .format(func=func, e=e_details, n_delay=n_delay)
931 .format(func=func, e=e_details, n_delay=n_delay)
932 if logger:
932 if logger:
933 log.warning(msg)
933 log.warning(msg)
934 else:
934 else:
935 print(msg)
935 print(msg)
936 time.sleep(n_delay)
936 time.sleep(n_delay)
937 _n_tries -= 1
937 _n_tries -= 1
938 n_delay *= backoff
938 n_delay *= backoff
939
939
940 return func(*args, **kwargs)
940 return func(*args, **kwargs)
941
941
942 return wrapper
942 return wrapper
943
943
944
944
945 def user_agent_normalizer(user_agent_raw, safe=True):
945 def user_agent_normalizer(user_agent_raw, safe=True):
946 log = logging.getLogger('rhodecode.user_agent_normalizer')
946 log = logging.getLogger('rhodecode.user_agent_normalizer')
947 ua = (user_agent_raw or '').strip().lower()
947 ua = (user_agent_raw or '').strip().lower()
948 ua = ua.replace('"', '')
948 ua = ua.replace('"', '')
949
949
950 try:
950 try:
951 if 'mercurial/proto-1.0' in ua:
951 if 'mercurial/proto-1.0' in ua:
952 ua = ua.replace('mercurial/proto-1.0', '')
952 ua = ua.replace('mercurial/proto-1.0', '')
953 ua = ua.replace('(', '').replace(')', '').strip()
953 ua = ua.replace('(', '').replace(')', '').strip()
954 ua = ua.replace('mercurial ', 'mercurial/')
954 ua = ua.replace('mercurial ', 'mercurial/')
955 elif ua.startswith('git'):
955 elif ua.startswith('git'):
956 parts = ua.split(' ')
956 parts = ua.split(' ')
957 if parts:
957 if parts:
958 ua = parts[0]
958 ua = parts[0]
959 ua = re.sub(r'\.windows\.\d', '', ua).strip()
959 ua = re.sub(r'\.windows\.\d', '', ua).strip()
960
960
961 return ua
961 return ua
962 except Exception:
962 except Exception:
963 log.exception('Failed to parse scm user-agent')
963 log.exception('Failed to parse scm user-agent')
964 if not safe:
964 if not safe:
965 raise
965 raise
966
966
967 return ua
967 return ua
968
968
969
969
970 def get_available_port(min_port=40000, max_port=55555, use_range=False):
970 def get_available_port(min_port=40000, max_port=55555, use_range=False):
971 hostname = ''
971 hostname = ''
972 for _ in range(min_port, max_port):
972 for _check_port in range(min_port, max_port):
973 pick_port = 0
973 pick_port = 0
974 if use_range:
974 if use_range:
975 pick_port = random.randint(min_port, max_port)
975 pick_port = random.randint(min_port, max_port)
976
976
977 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
977 with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
978 try:
978 try:
979 s.bind((hostname, pick_port))
979 s.bind((hostname, pick_port))
980 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
980 s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
981 return s.getsockname()[1]
981 return s.getsockname()[1]
982 except OSError:
983 continue
984 except socket.error as e:
982 except socket.error as e:
985 if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]:
983 if e.args[0] in [errno.EADDRINUSE, errno.ECONNREFUSED]:
986 continue
984 continue
987 raise
985 raise
986 except OSError:
987 continue
@@ -1,1984 +1,1984 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Base module for all VCS systems
20 Base module for all VCS systems
21 """
21 """
22 import os
22 import os
23 import re
23 import re
24 import time
24 import time
25 import shutil
25 import shutil
26 import datetime
26 import datetime
27 import fnmatch
27 import fnmatch
28 import itertools
28 import itertools
29 import logging
29 import logging
30 import dataclasses
30 import dataclasses
31 import warnings
31 import warnings
32
32
33 from zope.cachedescriptors.property import Lazy as LazyProperty
33 from zope.cachedescriptors.property import Lazy as LazyProperty
34
34
35
35
36 import rhodecode
36 import rhodecode
37 from rhodecode.translation import lazy_ugettext
37 from rhodecode.translation import lazy_ugettext
38 from rhodecode.lib.utils2 import safe_str, CachedProperty
38 from rhodecode.lib.utils2 import safe_str, CachedProperty
39 from rhodecode.lib.vcs.utils import author_name, author_email
39 from rhodecode.lib.vcs.utils import author_name, author_email
40 from rhodecode.lib.vcs.conf import settings
40 from rhodecode.lib.vcs.conf import settings
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
42 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
43 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
44 NodeDoesNotExistError, NodeNotChangedError, VCSError,
45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
45 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
46 RepositoryError)
46 RepositoryError)
47
47
48
48
49 log = logging.getLogger(__name__)
49 log = logging.getLogger(__name__)
50
50
51
51
52 FILEMODE_DEFAULT = 0o100644
52 FILEMODE_DEFAULT = 0o100644
53 FILEMODE_EXECUTABLE = 0o100755
53 FILEMODE_EXECUTABLE = 0o100755
54 EMPTY_COMMIT_ID = '0' * 40
54 EMPTY_COMMIT_ID = '0' * 40
55
55
56
56
57 @dataclasses.dataclass
57 @dataclasses.dataclass
58 class Reference:
58 class Reference:
59 type: str
59 type: str
60 name: str
60 name: str
61 commit_id: str
61 commit_id: str
62
62
63 def __iter__(self):
63 def __iter__(self):
64 yield self.type
64 yield self.type
65 yield self.name
65 yield self.name
66 yield self.commit_id
66 yield self.commit_id
67
67
68 @property
68 @property
69 def branch(self):
69 def branch(self):
70 if self.type == 'branch':
70 if self.type == 'branch':
71 return self.name
71 return self.name
72
72
73 @property
73 @property
74 def bookmark(self):
74 def bookmark(self):
75 if self.type == 'book':
75 if self.type == 'book':
76 return self.name
76 return self.name
77
77
78 @property
78 @property
79 def to_str(self):
79 def to_str(self):
80 return reference_to_unicode(self)
80 return reference_to_unicode(self)
81
81
82 def asdict(self):
82 def asdict(self):
83 return dict(
83 return dict(
84 type=self.type,
84 type=self.type,
85 name=self.name,
85 name=self.name,
86 commit_id=self.commit_id
86 commit_id=self.commit_id
87 )
87 )
88
88
89
89
90 def unicode_to_reference(raw: str):
90 def unicode_to_reference(raw: str):
91 """
91 """
92 Convert a unicode (or string) to a reference object.
92 Convert a unicode (or string) to a reference object.
93 If unicode evaluates to False it returns None.
93 If unicode evaluates to False it returns None.
94 """
94 """
95 if raw:
95 if raw:
96 refs = raw.split(':')
96 refs = raw.split(':')
97 return Reference(*refs)
97 return Reference(*refs)
98 else:
98 else:
99 return None
99 return None
100
100
101
101
102 def reference_to_unicode(ref: Reference):
102 def reference_to_unicode(ref: Reference):
103 """
103 """
104 Convert a reference object to unicode.
104 Convert a reference object to unicode.
105 If reference is None it returns None.
105 If reference is None it returns None.
106 """
106 """
107 if ref:
107 if ref:
108 return ':'.join(ref)
108 return ':'.join(ref)
109 else:
109 else:
110 return None
110 return None
111
111
112
112
113 class MergeFailureReason(object):
113 class MergeFailureReason(object):
114 """
114 """
115 Enumeration with all the reasons why the server side merge could fail.
115 Enumeration with all the reasons why the server side merge could fail.
116
116
117 DO NOT change the number of the reasons, as they may be stored in the
117 DO NOT change the number of the reasons, as they may be stored in the
118 database.
118 database.
119
119
120 Changing the name of a reason is acceptable and encouraged to deprecate old
120 Changing the name of a reason is acceptable and encouraged to deprecate old
121 reasons.
121 reasons.
122 """
122 """
123
123
124 # Everything went well.
124 # Everything went well.
125 NONE = 0
125 NONE = 0
126
126
127 # An unexpected exception was raised. Check the logs for more details.
127 # An unexpected exception was raised. Check the logs for more details.
128 UNKNOWN = 1
128 UNKNOWN = 1
129
129
130 # The merge was not successful, there are conflicts.
130 # The merge was not successful, there are conflicts.
131 MERGE_FAILED = 2
131 MERGE_FAILED = 2
132
132
133 # The merge succeeded but we could not push it to the target repository.
133 # The merge succeeded but we could not push it to the target repository.
134 PUSH_FAILED = 3
134 PUSH_FAILED = 3
135
135
136 # The specified target is not a head in the target repository.
136 # The specified target is not a head in the target repository.
137 TARGET_IS_NOT_HEAD = 4
137 TARGET_IS_NOT_HEAD = 4
138
138
139 # The source repository contains more branches than the target. Pushing
139 # The source repository contains more branches than the target. Pushing
140 # the merge will create additional branches in the target.
140 # the merge will create additional branches in the target.
141 HG_SOURCE_HAS_MORE_BRANCHES = 5
141 HG_SOURCE_HAS_MORE_BRANCHES = 5
142
142
143 # The target reference has multiple heads. That does not allow to correctly
143 # The target reference has multiple heads. That does not allow to correctly
144 # identify the target location. This could only happen for mercurial
144 # identify the target location. This could only happen for mercurial
145 # branches.
145 # branches.
146 HG_TARGET_HAS_MULTIPLE_HEADS = 6
146 HG_TARGET_HAS_MULTIPLE_HEADS = 6
147
147
148 # The target repository is locked
148 # The target repository is locked
149 TARGET_IS_LOCKED = 7
149 TARGET_IS_LOCKED = 7
150
150
151 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
151 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
152 # A involved commit could not be found.
152 # A involved commit could not be found.
153 _DEPRECATED_MISSING_COMMIT = 8
153 _DEPRECATED_MISSING_COMMIT = 8
154
154
155 # The target repo reference is missing.
155 # The target repo reference is missing.
156 MISSING_TARGET_REF = 9
156 MISSING_TARGET_REF = 9
157
157
158 # The source repo reference is missing.
158 # The source repo reference is missing.
159 MISSING_SOURCE_REF = 10
159 MISSING_SOURCE_REF = 10
160
160
161 # The merge was not successful, there are conflicts related to sub
161 # The merge was not successful, there are conflicts related to sub
162 # repositories.
162 # repositories.
163 SUBREPO_MERGE_FAILED = 11
163 SUBREPO_MERGE_FAILED = 11
164
164
165
165
166 class UpdateFailureReason(object):
166 class UpdateFailureReason(object):
167 """
167 """
168 Enumeration with all the reasons why the pull request update could fail.
168 Enumeration with all the reasons why the pull request update could fail.
169
169
170 DO NOT change the number of the reasons, as they may be stored in the
170 DO NOT change the number of the reasons, as they may be stored in the
171 database.
171 database.
172
172
173 Changing the name of a reason is acceptable and encouraged to deprecate old
173 Changing the name of a reason is acceptable and encouraged to deprecate old
174 reasons.
174 reasons.
175 """
175 """
176
176
177 # Everything went well.
177 # Everything went well.
178 NONE = 0
178 NONE = 0
179
179
180 # An unexpected exception was raised. Check the logs for more details.
180 # An unexpected exception was raised. Check the logs for more details.
181 UNKNOWN = 1
181 UNKNOWN = 1
182
182
183 # The pull request is up to date.
183 # The pull request is up to date.
184 NO_CHANGE = 2
184 NO_CHANGE = 2
185
185
186 # The pull request has a reference type that is not supported for update.
186 # The pull request has a reference type that is not supported for update.
187 WRONG_REF_TYPE = 3
187 WRONG_REF_TYPE = 3
188
188
189 # Update failed because the target reference is missing.
189 # Update failed because the target reference is missing.
190 MISSING_TARGET_REF = 4
190 MISSING_TARGET_REF = 4
191
191
192 # Update failed because the source reference is missing.
192 # Update failed because the source reference is missing.
193 MISSING_SOURCE_REF = 5
193 MISSING_SOURCE_REF = 5
194
194
195
195
196 class MergeResponse(object):
196 class MergeResponse(object):
197
197
198 # uses .format(**metadata) for variables
198 # uses .format(**metadata) for variables
199 MERGE_STATUS_MESSAGES = {
199 MERGE_STATUS_MESSAGES = {
200 MergeFailureReason.NONE: lazy_ugettext(
200 MergeFailureReason.NONE: lazy_ugettext(
201 'This pull request can be automatically merged.'),
201 'This pull request can be automatically merged.'),
202 MergeFailureReason.UNKNOWN: lazy_ugettext(
202 MergeFailureReason.UNKNOWN: lazy_ugettext(
203 'This pull request cannot be merged because of an unhandled exception. '
203 'This pull request cannot be merged because of an unhandled exception. '
204 '{exception}'),
204 '{exception}'),
205 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
205 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
206 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
206 'This pull request cannot be merged because of merge conflicts. {unresolved_files}'),
207 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
207 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
208 'This pull request could not be merged because push to '
208 'This pull request could not be merged because push to '
209 'target:`{target}@{merge_commit}` failed.'),
209 'target:`{target}@{merge_commit}` failed.'),
210 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
210 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
211 'This pull request cannot be merged because the target '
211 'This pull request cannot be merged because the target '
212 '`{target_ref.name}` is not a head.'),
212 '`{target_ref.name}` is not a head.'),
213 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
213 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
214 'This pull request cannot be merged because the source contains '
214 'This pull request cannot be merged because the source contains '
215 'more branches than the target.'),
215 'more branches than the target.'),
216 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
216 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
217 'This pull request cannot be merged because the target `{target_ref.name}` '
217 'This pull request cannot be merged because the target `{target_ref.name}` '
218 'has multiple heads: `{heads}`.'),
218 'has multiple heads: `{heads}`.'),
219 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
219 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
220 'This pull request cannot be merged because the target repository is '
220 'This pull request cannot be merged because the target repository is '
221 'locked by {locked_by}.'),
221 'locked by {locked_by}.'),
222
222
223 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
223 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
224 'This pull request cannot be merged because the target '
224 'This pull request cannot be merged because the target '
225 'reference `{target_ref.name}` is missing.'),
225 'reference `{target_ref.name}` is missing.'),
226 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
226 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
227 'This pull request cannot be merged because the source '
227 'This pull request cannot be merged because the source '
228 'reference `{source_ref.name}` is missing.'),
228 'reference `{source_ref.name}` is missing.'),
229 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
229 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
230 'This pull request cannot be merged because of conflicts related '
230 'This pull request cannot be merged because of conflicts related '
231 'to sub repositories.'),
231 'to sub repositories.'),
232
232
233 # Deprecations
233 # Deprecations
234 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
234 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
235 'This pull request cannot be merged because the target or the '
235 'This pull request cannot be merged because the target or the '
236 'source reference is missing.'),
236 'source reference is missing.'),
237
237
238 }
238 }
239
239
240 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
240 def __init__(self, possible, executed, merge_ref: Reference, failure_reason, metadata=None):
241 self.possible = possible
241 self.possible = possible
242 self.executed = executed
242 self.executed = executed
243 self.merge_ref = merge_ref
243 self.merge_ref = merge_ref
244 self.failure_reason = failure_reason
244 self.failure_reason = failure_reason
245 self.metadata = metadata or {}
245 self.metadata = metadata or {}
246
246
247 def __repr__(self):
247 def __repr__(self):
248 return f'<MergeResponse:{self.label} {self.failure_reason}>'
248 return f'<MergeResponse:{self.label} {self.failure_reason}>'
249
249
250 def __eq__(self, other):
250 def __eq__(self, other):
251 same_instance = isinstance(other, self.__class__)
251 same_instance = isinstance(other, self.__class__)
252 return same_instance \
252 return same_instance \
253 and self.possible == other.possible \
253 and self.possible == other.possible \
254 and self.executed == other.executed \
254 and self.executed == other.executed \
255 and self.failure_reason == other.failure_reason
255 and self.failure_reason == other.failure_reason
256
256
257 @property
257 @property
258 def label(self):
258 def label(self):
259 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
259 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
260 not k.startswith('_'))
260 not k.startswith('_'))
261 return label_dict.get(self.failure_reason)
261 return label_dict.get(self.failure_reason)
262
262
263 @property
263 @property
264 def merge_status_message(self):
264 def merge_status_message(self):
265 """
265 """
266 Return a human friendly error message for the given merge status code.
266 Return a human friendly error message for the given merge status code.
267 """
267 """
268 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
268 msg = safe_str(self.MERGE_STATUS_MESSAGES[self.failure_reason])
269
269
270 try:
270 try:
271 return msg.format(**self.metadata)
271 return msg.format(**self.metadata)
272 except Exception:
272 except Exception:
273 log.exception('Failed to format %s message', self)
273 log.exception('Failed to format %s message', self)
274 return msg
274 return msg
275
275
276 def asdict(self):
276 def asdict(self):
277 data = {}
277 data = {}
278 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
278 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
279 'merge_status_message']:
279 'merge_status_message']:
280 data[k] = getattr(self, k)
280 data[k] = getattr(self, k)
281 return data
281 return data
282
282
283
283
284 class TargetRefMissing(ValueError):
284 class TargetRefMissing(ValueError):
285 pass
285 pass
286
286
287
287
288 class SourceRefMissing(ValueError):
288 class SourceRefMissing(ValueError):
289 pass
289 pass
290
290
291
291
292 class BaseRepository(object):
292 class BaseRepository(object):
293 """
293 """
294 Base Repository for final backends
294 Base Repository for final backends
295
295
296 .. attribute:: DEFAULT_BRANCH_NAME
296 .. attribute:: DEFAULT_BRANCH_NAME
297
297
298 name of default branch (i.e. "trunk" for svn, "master" for git etc.
298 name of default branch (i.e. "trunk" for svn, "master" for git etc.
299
299
300 .. attribute:: commit_ids
300 .. attribute:: commit_ids
301
301
302 list of all available commit ids, in ascending order
302 list of all available commit ids, in ascending order
303
303
304 .. attribute:: path
304 .. attribute:: path
305
305
306 absolute path to the repository
306 absolute path to the repository
307
307
308 .. attribute:: bookmarks
308 .. attribute:: bookmarks
309
309
310 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
310 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
311 there are no bookmarks or the backend implementation does not support
311 there are no bookmarks or the backend implementation does not support
312 bookmarks.
312 bookmarks.
313
313
314 .. attribute:: tags
314 .. attribute:: tags
315
315
316 Mapping from name to :term:`Commit ID` of the tag.
316 Mapping from name to :term:`Commit ID` of the tag.
317
317
318 """
318 """
319
319
320 DEFAULT_BRANCH_NAME = None
320 DEFAULT_BRANCH_NAME = None
321 DEFAULT_CONTACT = "Unknown"
321 DEFAULT_CONTACT = "Unknown"
322 DEFAULT_DESCRIPTION = "unknown"
322 DEFAULT_DESCRIPTION = "unknown"
323 EMPTY_COMMIT_ID = '0' * 40
323 EMPTY_COMMIT_ID = '0' * 40
324 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
324 COMMIT_ID_PAT = re.compile(r'[0-9a-fA-F]{40}')
325
325
326 path = None
326 path = None
327
327
328 _is_empty = None
328 _is_empty = None
329 _commit_ids = {}
329 _commit_ids = {}
330
330
331 def __init__(self, repo_path, config=None, create=False, **kwargs):
331 def __init__(self, repo_path, config=None, create=False, **kwargs):
332 """
332 """
333 Initializes repository. Raises RepositoryError if repository could
333 Initializes repository. Raises RepositoryError if repository could
334 not be find at the given ``repo_path`` or directory at ``repo_path``
334 not be find at the given ``repo_path`` or directory at ``repo_path``
335 exists and ``create`` is set to True.
335 exists and ``create`` is set to True.
336
336
337 :param repo_path: local path of the repository
337 :param repo_path: local path of the repository
338 :param config: repository configuration
338 :param config: repository configuration
339 :param create=False: if set to True, would try to create repository.
339 :param create=False: if set to True, would try to create repository.
340 :param src_url=None: if set, should be proper url from which repository
340 :param src_url=None: if set, should be proper url from which repository
341 would be cloned; requires ``create`` parameter to be set to True -
341 would be cloned; requires ``create`` parameter to be set to True -
342 raises RepositoryError if src_url is set and create evaluates to
342 raises RepositoryError if src_url is set and create evaluates to
343 False
343 False
344 """
344 """
345 raise NotImplementedError
345 raise NotImplementedError
346
346
347 def __repr__(self):
347 def __repr__(self):
348 return f'<{self.__class__.__name__} at {self.path}>'
348 return f'<{self.__class__.__name__} at {self.path}>'
349
349
350 def __len__(self):
350 def __len__(self):
351 return self.count()
351 return self.count()
352
352
353 def __eq__(self, other):
353 def __eq__(self, other):
354 same_instance = isinstance(other, self.__class__)
354 same_instance = isinstance(other, self.__class__)
355 return same_instance and other.path == self.path
355 return same_instance and other.path == self.path
356
356
357 def __ne__(self, other):
357 def __ne__(self, other):
358 return not self.__eq__(other)
358 return not self.__eq__(other)
359
359
360 def get_create_shadow_cache_pr_path(self, db_repo):
360 def get_create_shadow_cache_pr_path(self, db_repo):
361 path = db_repo.cached_diffs_dir
361 path = db_repo.cached_diffs_dir
362 if not os.path.exists(path):
362 if not os.path.exists(path):
363 os.makedirs(path, 0o755)
363 os.makedirs(path, 0o755)
364 return path
364 return path
365
365
366 @classmethod
366 @classmethod
367 def get_default_config(cls, default=None):
367 def get_default_config(cls, default=None):
368 config = Config()
368 config = Config()
369 if default and isinstance(default, list):
369 if default and isinstance(default, list):
370 for section, key, val in default:
370 for section, key, val in default:
371 config.set(section, key, val)
371 config.set(section, key, val)
372 return config
372 return config
373
373
374 @LazyProperty
374 @LazyProperty
375 def _remote(self):
375 def _remote(self):
376 raise NotImplementedError
376 raise NotImplementedError
377
377
378 def _heads(self, branch=None):
378 def _heads(self, branch=None):
379 return []
379 return []
380
380
381 @LazyProperty
381 @LazyProperty
382 def EMPTY_COMMIT(self):
382 def EMPTY_COMMIT(self):
383 return EmptyCommit(self.EMPTY_COMMIT_ID)
383 return EmptyCommit(self.EMPTY_COMMIT_ID)
384
384
385 @LazyProperty
385 @LazyProperty
386 def alias(self):
386 def alias(self):
387 for k, v in settings.BACKENDS.items():
387 for k, v in settings.BACKENDS.items():
388 if v.split('.')[-1] == str(self.__class__.__name__):
388 if v.split('.')[-1] == str(self.__class__.__name__):
389 return k
389 return k
390
390
391 @LazyProperty
391 @LazyProperty
392 def name(self):
392 def name(self):
393 return safe_str(os.path.basename(self.path))
393 return safe_str(os.path.basename(self.path))
394
394
395 @LazyProperty
395 @LazyProperty
396 def description(self):
396 def description(self):
397 raise NotImplementedError
397 raise NotImplementedError
398
398
399 def refs(self):
399 def refs(self):
400 """
400 """
401 returns a `dict` with branches, bookmarks, tags, and closed_branches
401 returns a `dict` with branches, bookmarks, tags, and closed_branches
402 for this repository
402 for this repository
403 """
403 """
404 return dict(
404 return dict(
405 branches=self.branches,
405 branches=self.branches,
406 branches_closed=self.branches_closed,
406 branches_closed=self.branches_closed,
407 tags=self.tags,
407 tags=self.tags,
408 bookmarks=self.bookmarks
408 bookmarks=self.bookmarks
409 )
409 )
410
410
411 @LazyProperty
411 @LazyProperty
412 def branches(self):
412 def branches(self):
413 """
413 """
414 A `dict` which maps branch names to commit ids.
414 A `dict` which maps branch names to commit ids.
415 """
415 """
416 raise NotImplementedError
416 raise NotImplementedError
417
417
418 @LazyProperty
418 @LazyProperty
419 def branches_closed(self):
419 def branches_closed(self):
420 """
420 """
421 A `dict` which maps tags names to commit ids.
421 A `dict` which maps tags names to commit ids.
422 """
422 """
423 raise NotImplementedError
423 raise NotImplementedError
424
424
425 @LazyProperty
425 @LazyProperty
426 def bookmarks(self):
426 def bookmarks(self):
427 """
427 """
428 A `dict` which maps tags names to commit ids.
428 A `dict` which maps tags names to commit ids.
429 """
429 """
430 raise NotImplementedError
430 raise NotImplementedError
431
431
432 @LazyProperty
432 @LazyProperty
433 def tags(self):
433 def tags(self):
434 """
434 """
435 A `dict` which maps tags names to commit ids.
435 A `dict` which maps tags names to commit ids.
436 """
436 """
437 raise NotImplementedError
437 raise NotImplementedError
438
438
439 @LazyProperty
439 @LazyProperty
440 def size(self):
440 def size(self):
441 """
441 """
442 Returns combined size in bytes for all repository files
442 Returns combined size in bytes for all repository files
443 """
443 """
444 tip = self.get_commit()
444 tip = self.get_commit()
445 return tip.size
445 return tip.size
446
446
447 def size_at_commit(self, commit_id):
447 def size_at_commit(self, commit_id):
448 commit = self.get_commit(commit_id)
448 commit = self.get_commit(commit_id)
449 return commit.size
449 return commit.size
450
450
451 def _check_for_empty(self):
451 def _check_for_empty(self):
452 no_commits = len(self._commit_ids) == 0
452 no_commits = len(self._commit_ids) == 0
453 if no_commits:
453 if no_commits:
454 # check on remote to be sure
454 # check on remote to be sure
455 return self._remote.is_empty()
455 return self._remote.is_empty()
456 else:
456 else:
457 return False
457 return False
458
458
459 def is_empty(self):
459 def is_empty(self):
460 if rhodecode.is_test:
460 if rhodecode.is_test:
461 return self._check_for_empty()
461 return self._check_for_empty()
462
462
463 if self._is_empty is None:
463 if self._is_empty is None:
464 # cache empty for production, but not tests
464 # cache empty for production, but not tests
465 self._is_empty = self._check_for_empty()
465 self._is_empty = self._check_for_empty()
466
466
467 return self._is_empty
467 return self._is_empty
468
468
469 @staticmethod
469 @staticmethod
470 def check_url(url, config):
470 def check_url(url, config):
471 """
471 """
472 Function will check given url and try to verify if it's a valid
472 Function will check given url and try to verify if it's a valid
473 link.
473 link.
474 """
474 """
475 raise NotImplementedError
475 raise NotImplementedError
476
476
477 @staticmethod
477 @staticmethod
478 def is_valid_repository(path):
478 def is_valid_repository(path):
479 """
479 """
480 Check if given `path` contains a valid repository of this backend
480 Check if given `path` contains a valid repository of this backend
481 """
481 """
482 raise NotImplementedError
482 raise NotImplementedError
483
483
484 # ==========================================================================
484 # ==========================================================================
485 # COMMITS
485 # COMMITS
486 # ==========================================================================
486 # ==========================================================================
487
487
488 @CachedProperty
488 @CachedProperty
489 def commit_ids(self):
489 def commit_ids(self):
490 raise NotImplementedError
490 raise NotImplementedError
491
491
492 def append_commit_id(self, commit_id):
492 def append_commit_id(self, commit_id):
493 if commit_id not in self.commit_ids:
493 if commit_id not in self.commit_ids:
494 self._rebuild_cache(self.commit_ids + [commit_id])
494 self._rebuild_cache(self.commit_ids + [commit_id])
495
495
496 # clear cache
496 # clear cache
497 self._invalidate_prop_cache('commit_ids')
497 self._invalidate_prop_cache('commit_ids')
498 self._is_empty = False
498 self._is_empty = False
499
499
500 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
500 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
501 translate_tag=None, maybe_unreachable=False, reference_obj=None):
501 translate_tag=None, maybe_unreachable=False, reference_obj=None):
502 """
502 """
503 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
503 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
504 are both None, most recent commit is returned.
504 are both None, most recent commit is returned.
505
505
506 :param pre_load: Optional. List of commit attributes to load.
506 :param pre_load: Optional. List of commit attributes to load.
507
507
508 :raises ``EmptyRepositoryError``: if there are no commits
508 :raises ``EmptyRepositoryError``: if there are no commits
509 """
509 """
510 raise NotImplementedError
510 raise NotImplementedError
511
511
512 def __iter__(self):
512 def __iter__(self):
513 for commit_id in self.commit_ids:
513 for commit_id in self.commit_ids:
514 yield self.get_commit(commit_id=commit_id)
514 yield self.get_commit(commit_id=commit_id)
515
515
516 def get_commits(
516 def get_commits(
517 self, start_id=None, end_id=None, start_date=None, end_date=None,
517 self, start_id=None, end_id=None, start_date=None, end_date=None,
518 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
518 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
519 """
519 """
520 Returns iterator of `BaseCommit` objects from start to end
520 Returns iterator of `BaseCommit` objects from start to end
521 not inclusive. This should behave just like a list, ie. end is not
521 not inclusive. This should behave just like a list, ie. end is not
522 inclusive.
522 inclusive.
523
523
524 :param start_id: None or str, must be a valid commit id
524 :param start_id: None or str, must be a valid commit id
525 :param end_id: None or str, must be a valid commit id
525 :param end_id: None or str, must be a valid commit id
526 :param start_date:
526 :param start_date:
527 :param end_date:
527 :param end_date:
528 :param branch_name:
528 :param branch_name:
529 :param show_hidden:
529 :param show_hidden:
530 :param pre_load:
530 :param pre_load:
531 :param translate_tags:
531 :param translate_tags:
532 """
532 """
533 raise NotImplementedError
533 raise NotImplementedError
534
534
535 def __getitem__(self, key):
535 def __getitem__(self, key):
536 """
536 """
537 Allows index based access to the commit objects of this repository.
537 Allows index based access to the commit objects of this repository.
538 """
538 """
539 pre_load = ["author", "branch", "date", "message", "parents"]
539 pre_load = ["author", "branch", "date", "message", "parents"]
540 if isinstance(key, slice):
540 if isinstance(key, slice):
541 return self._get_range(key, pre_load)
541 return self._get_range(key, pre_load)
542 return self.get_commit(commit_idx=key, pre_load=pre_load)
542 return self.get_commit(commit_idx=key, pre_load=pre_load)
543
543
544 def _get_range(self, slice_obj, pre_load):
544 def _get_range(self, slice_obj, pre_load):
545 for commit_id in self.commit_ids.__getitem__(slice_obj):
545 for commit_id in self.commit_ids.__getitem__(slice_obj):
546 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
546 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
547
547
548 def count(self):
548 def count(self):
549 return len(self.commit_ids)
549 return len(self.commit_ids)
550
550
551 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
551 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
552 """
552 """
553 Creates and returns a tag for the given ``commit_id``.
553 Creates and returns a tag for the given ``commit_id``.
554
554
555 :param name: name for new tag
555 :param name: name for new tag
556 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
556 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
557 :param commit_id: commit id for which new tag would be created
557 :param commit_id: commit id for which new tag would be created
558 :param message: message of the tag's commit
558 :param message: message of the tag's commit
559 :param date: date of tag's commit
559 :param date: date of tag's commit
560
560
561 :raises TagAlreadyExistError: if tag with same name already exists
561 :raises TagAlreadyExistError: if tag with same name already exists
562 """
562 """
563 raise NotImplementedError
563 raise NotImplementedError
564
564
565 def remove_tag(self, name, user, message=None, date=None):
565 def remove_tag(self, name, user, message=None, date=None):
566 """
566 """
567 Removes tag with the given ``name``.
567 Removes tag with the given ``name``.
568
568
569 :param name: name of the tag to be removed
569 :param name: name of the tag to be removed
570 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
570 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
571 :param message: message of the tag's removal commit
571 :param message: message of the tag's removal commit
572 :param date: date of tag's removal commit
572 :param date: date of tag's removal commit
573
573
574 :raises TagDoesNotExistError: if tag with given name does not exists
574 :raises TagDoesNotExistError: if tag with given name does not exists
575 """
575 """
576 raise NotImplementedError
576 raise NotImplementedError
577
577
578 def get_diff(
578 def get_diff(
579 self, commit1, commit2, path=None, ignore_whitespace=False,
579 self, commit1, commit2, path=None, ignore_whitespace=False,
580 context=3, path1=None):
580 context=3, path1=None):
581 """
581 """
582 Returns (git like) *diff*, as plain text. Shows changes introduced by
582 Returns (git like) *diff*, as plain text. Shows changes introduced by
583 `commit2` since `commit1`.
583 `commit2` since `commit1`.
584
584
585 :param commit1: Entry point from which diff is shown. Can be
585 :param commit1: Entry point from which diff is shown. Can be
586 ``self.EMPTY_COMMIT`` - in this case, patch showing all
586 ``self.EMPTY_COMMIT`` - in this case, patch showing all
587 the changes since empty state of the repository until `commit2`
587 the changes since empty state of the repository until `commit2`
588 :param commit2: Until which commit changes should be shown.
588 :param commit2: Until which commit changes should be shown.
589 :param path: Can be set to a path of a file to create a diff of that
589 :param path: Can be set to a path of a file to create a diff of that
590 file. If `path1` is also set, this value is only associated to
590 file. If `path1` is also set, this value is only associated to
591 `commit2`.
591 `commit2`.
592 :param ignore_whitespace: If set to ``True``, would not show whitespace
592 :param ignore_whitespace: If set to ``True``, would not show whitespace
593 changes. Defaults to ``False``.
593 changes. Defaults to ``False``.
594 :param context: How many lines before/after changed lines should be
594 :param context: How many lines before/after changed lines should be
595 shown. Defaults to ``3``.
595 shown. Defaults to ``3``.
596 :param path1: Can be set to a path to associate with `commit1`. This
596 :param path1: Can be set to a path to associate with `commit1`. This
597 parameter works only for backends which support diff generation for
597 parameter works only for backends which support diff generation for
598 different paths. Other backends will raise a `ValueError` if `path1`
598 different paths. Other backends will raise a `ValueError` if `path1`
599 is set and has a different value than `path`.
599 is set and has a different value than `path`.
600 :param file_path: filter this diff by given path pattern
600 :param file_path: filter this diff by given path pattern
601 """
601 """
602 raise NotImplementedError
602 raise NotImplementedError
603
603
604 def strip(self, commit_id, branch=None):
604 def strip(self, commit_id, branch=None):
605 """
605 """
606 Strip given commit_id from the repository
606 Strip given commit_id from the repository
607 """
607 """
608 raise NotImplementedError
608 raise NotImplementedError
609
609
610 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
610 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
611 """
611 """
612 Return a latest common ancestor commit if one exists for this repo
612 Return a latest common ancestor commit if one exists for this repo
613 `commit_id1` vs `commit_id2` from `repo2`.
613 `commit_id1` vs `commit_id2` from `repo2`.
614
614
615 :param commit_id1: Commit it from this repository to use as a
615 :param commit_id1: Commit it from this repository to use as a
616 target for the comparison.
616 target for the comparison.
617 :param commit_id2: Source commit id to use for comparison.
617 :param commit_id2: Source commit id to use for comparison.
618 :param repo2: Source repository to use for comparison.
618 :param repo2: Source repository to use for comparison.
619 """
619 """
620 raise NotImplementedError
620 raise NotImplementedError
621
621
622 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
622 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
623 """
623 """
624 Compare this repository's revision `commit_id1` with `commit_id2`.
624 Compare this repository's revision `commit_id1` with `commit_id2`.
625
625
626 Returns a tuple(commits, ancestor) that would be merged from
626 Returns a tuple(commits, ancestor) that would be merged from
627 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
627 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
628 will be returned as ancestor.
628 will be returned as ancestor.
629
629
630 :param commit_id1: Commit it from this repository to use as a
630 :param commit_id1: Commit it from this repository to use as a
631 target for the comparison.
631 target for the comparison.
632 :param commit_id2: Source commit id to use for comparison.
632 :param commit_id2: Source commit id to use for comparison.
633 :param repo2: Source repository to use for comparison.
633 :param repo2: Source repository to use for comparison.
634 :param merge: If set to ``True`` will do a merge compare which also
634 :param merge: If set to ``True`` will do a merge compare which also
635 returns the common ancestor.
635 returns the common ancestor.
636 :param pre_load: Optional. List of commit attributes to load.
636 :param pre_load: Optional. List of commit attributes to load.
637 """
637 """
638 raise NotImplementedError
638 raise NotImplementedError
639
639
640 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
640 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
641 user_name='', user_email='', message='', dry_run=False,
641 user_name='', user_email='', message='', dry_run=False,
642 use_rebase=False, close_branch=False):
642 use_rebase=False, close_branch=False):
643 """
643 """
644 Merge the revisions specified in `source_ref` from `source_repo`
644 Merge the revisions specified in `source_ref` from `source_repo`
645 onto the `target_ref` of this repository.
645 onto the `target_ref` of this repository.
646
646
647 `source_ref` and `target_ref` are named tupls with the following
647 `source_ref` and `target_ref` are named tupls with the following
648 fields `type`, `name` and `commit_id`.
648 fields `type`, `name` and `commit_id`.
649
649
650 Returns a MergeResponse named tuple with the following fields
650 Returns a MergeResponse named tuple with the following fields
651 'possible', 'executed', 'source_commit', 'target_commit',
651 'possible', 'executed', 'source_commit', 'target_commit',
652 'merge_commit'.
652 'merge_commit'.
653
653
654 :param repo_id: `repo_id` target repo id.
654 :param repo_id: `repo_id` target repo id.
655 :param workspace_id: `workspace_id` unique identifier.
655 :param workspace_id: `workspace_id` unique identifier.
656 :param target_ref: `target_ref` points to the commit on top of which
656 :param target_ref: `target_ref` points to the commit on top of which
657 the `source_ref` should be merged.
657 the `source_ref` should be merged.
658 :param source_repo: The repository that contains the commits to be
658 :param source_repo: The repository that contains the commits to be
659 merged.
659 merged.
660 :param source_ref: `source_ref` points to the topmost commit from
660 :param source_ref: `source_ref` points to the topmost commit from
661 the `source_repo` which should be merged.
661 the `source_repo` which should be merged.
662 :param user_name: Merge commit `user_name`.
662 :param user_name: Merge commit `user_name`.
663 :param user_email: Merge commit `user_email`.
663 :param user_email: Merge commit `user_email`.
664 :param message: Merge commit `message`.
664 :param message: Merge commit `message`.
665 :param dry_run: If `True` the merge will not take place.
665 :param dry_run: If `True` the merge will not take place.
666 :param use_rebase: If `True` commits from the source will be rebased
666 :param use_rebase: If `True` commits from the source will be rebased
667 on top of the target instead of being merged.
667 on top of the target instead of being merged.
668 :param close_branch: If `True` branch will be close before merging it
668 :param close_branch: If `True` branch will be close before merging it
669 """
669 """
670 if dry_run:
670 if dry_run:
671 message = message or settings.MERGE_DRY_RUN_MESSAGE
671 message = message or settings.MERGE_DRY_RUN_MESSAGE
672 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
672 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
673 user_name = user_name or settings.MERGE_DRY_RUN_USER
673 user_name = user_name or settings.MERGE_DRY_RUN_USER
674 else:
674 else:
675 if not user_name:
675 if not user_name:
676 raise ValueError('user_name cannot be empty')
676 raise ValueError('user_name cannot be empty')
677 if not user_email:
677 if not user_email:
678 raise ValueError('user_email cannot be empty')
678 raise ValueError('user_email cannot be empty')
679 if not message:
679 if not message:
680 raise ValueError('message cannot be empty')
680 raise ValueError('message cannot be empty')
681
681
682 try:
682 try:
683 return self._merge_repo(
683 return self._merge_repo(
684 repo_id, workspace_id, target_ref, source_repo,
684 repo_id, workspace_id, target_ref, source_repo,
685 source_ref, message, user_name, user_email, dry_run=dry_run,
685 source_ref, message, user_name, user_email, dry_run=dry_run,
686 use_rebase=use_rebase, close_branch=close_branch)
686 use_rebase=use_rebase, close_branch=close_branch)
687 except RepositoryError as exc:
687 except RepositoryError as exc:
688 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
688 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
689 return MergeResponse(
689 return MergeResponse(
690 False, False, None, MergeFailureReason.UNKNOWN,
690 False, False, None, MergeFailureReason.UNKNOWN,
691 metadata={'exception': str(exc)})
691 metadata={'exception': str(exc)})
692
692
693 def _merge_repo(self, repo_id, workspace_id, target_ref,
693 def _merge_repo(self, repo_id, workspace_id, target_ref,
694 source_repo, source_ref, merge_message,
694 source_repo, source_ref, merge_message,
695 merger_name, merger_email, dry_run=False,
695 merger_name, merger_email, dry_run=False,
696 use_rebase=False, close_branch=False):
696 use_rebase=False, close_branch=False):
697 """Internal implementation of merge."""
697 """Internal implementation of merge."""
698 raise NotImplementedError
698 raise NotImplementedError
699
699
700 def _maybe_prepare_merge_workspace(
700 def _maybe_prepare_merge_workspace(
701 self, repo_id, workspace_id, target_ref, source_ref):
701 self, repo_id, workspace_id, target_ref, source_ref):
702 """
702 """
703 Create the merge workspace.
703 Create the merge workspace.
704
704
705 :param workspace_id: `workspace_id` unique identifier.
705 :param workspace_id: `workspace_id` unique identifier.
706 """
706 """
707 raise NotImplementedError
707 raise NotImplementedError
708
708
709 @classmethod
709 @classmethod
710 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
710 def _get_legacy_shadow_repository_path(cls, repo_path, workspace_id):
711 """
711 """
712 Legacy version that was used before. We still need it for
712 Legacy version that was used before. We still need it for
713 backward compat
713 backward compat
714 """
714 """
715 return os.path.join(
715 return os.path.join(
716 os.path.dirname(repo_path),
716 os.path.dirname(repo_path),
717 f'.__shadow_{os.path.basename(repo_path)}_{workspace_id}')
717 f'.__shadow_{os.path.basename(repo_path)}_{workspace_id}')
718
718
719 @classmethod
719 @classmethod
720 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
720 def _get_shadow_repository_path(cls, repo_path, repo_id, workspace_id):
721 # The name of the shadow repository must start with '.', so it is
721 # The name of the shadow repository must start with '.', so it is
722 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
722 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
723 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
723 legacy_repository_path = cls._get_legacy_shadow_repository_path(repo_path, workspace_id)
724 if os.path.exists(legacy_repository_path):
724 if os.path.exists(legacy_repository_path):
725 return legacy_repository_path
725 return legacy_repository_path
726 else:
726 else:
727 return os.path.join(
727 return os.path.join(
728 os.path.dirname(repo_path),
728 os.path.dirname(repo_path),
729 f'.__shadow_repo_{repo_id}_{workspace_id}')
729 f'.__shadow_repo_{repo_id}_{workspace_id}')
730
730
731 def cleanup_merge_workspace(self, repo_id, workspace_id):
731 def cleanup_merge_workspace(self, repo_id, workspace_id):
732 """
732 """
733 Remove merge workspace.
733 Remove merge workspace.
734
734
735 This function MUST not fail in case there is no workspace associated to
735 This function MUST not fail in case there is no workspace associated to
736 the given `workspace_id`.
736 the given `workspace_id`.
737
737
738 :param workspace_id: `workspace_id` unique identifier.
738 :param workspace_id: `workspace_id` unique identifier.
739 """
739 """
740 shadow_repository_path = self._get_shadow_repository_path(
740 shadow_repository_path = self._get_shadow_repository_path(
741 self.path, repo_id, workspace_id)
741 self.path, repo_id, workspace_id)
742 shadow_repository_path_del = '{}.{}.delete'.format(
742 shadow_repository_path_del = '{}.{}.delete'.format(
743 shadow_repository_path, time.time())
743 shadow_repository_path, time.time())
744
744
745 # move the shadow repo, so it never conflicts with the one used.
745 # move the shadow repo, so it never conflicts with the one used.
746 # we use this method because shutil.rmtree had some edge case problems
746 # we use this method because shutil.rmtree had some edge case problems
747 # removing symlinked repositories
747 # removing symlinked repositories
748 if not os.path.isdir(shadow_repository_path):
748 if not os.path.isdir(shadow_repository_path):
749 return
749 return
750
750
751 shutil.move(shadow_repository_path, shadow_repository_path_del)
751 shutil.move(shadow_repository_path, shadow_repository_path_del)
752 try:
752 try:
753 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
753 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
754 except Exception:
754 except Exception:
755 log.exception('Failed to gracefully remove shadow repo under %s',
755 log.exception('Failed to gracefully remove shadow repo under %s',
756 shadow_repository_path_del)
756 shadow_repository_path_del)
757 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
757 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
758
758
759 # ========== #
759 # ========== #
760 # COMMIT API #
760 # COMMIT API #
761 # ========== #
761 # ========== #
762
762
763 @LazyProperty
763 @LazyProperty
764 def in_memory_commit(self):
764 def in_memory_commit(self):
765 """
765 """
766 Returns :class:`InMemoryCommit` object for this repository.
766 Returns :class:`InMemoryCommit` object for this repository.
767 """
767 """
768 raise NotImplementedError
768 raise NotImplementedError
769
769
770 # ======================== #
770 # ======================== #
771 # UTILITIES FOR SUBCLASSES #
771 # UTILITIES FOR SUBCLASSES #
772 # ======================== #
772 # ======================== #
773
773
774 def _validate_diff_commits(self, commit1, commit2):
774 def _validate_diff_commits(self, commit1, commit2):
775 """
775 """
776 Validates that the given commits are related to this repository.
776 Validates that the given commits are related to this repository.
777
777
778 Intended as a utility for sub classes to have a consistent validation
778 Intended as a utility for sub classes to have a consistent validation
779 of input parameters in methods like :meth:`get_diff`.
779 of input parameters in methods like :meth:`get_diff`.
780 """
780 """
781 self._validate_commit(commit1)
781 self._validate_commit(commit1)
782 self._validate_commit(commit2)
782 self._validate_commit(commit2)
783 if (isinstance(commit1, EmptyCommit) and
783 if (isinstance(commit1, EmptyCommit) and
784 isinstance(commit2, EmptyCommit)):
784 isinstance(commit2, EmptyCommit)):
785 raise ValueError("Cannot compare two empty commits")
785 raise ValueError("Cannot compare two empty commits")
786
786
787 def _validate_commit(self, commit):
787 def _validate_commit(self, commit):
788 if not isinstance(commit, BaseCommit):
788 if not isinstance(commit, BaseCommit):
789 raise TypeError(
789 raise TypeError(
790 "%s is not of type BaseCommit" % repr(commit))
790 "%s is not of type BaseCommit" % repr(commit))
791 if commit.repository != self and not isinstance(commit, EmptyCommit):
791 if commit.repository != self and not isinstance(commit, EmptyCommit):
792 raise ValueError(
792 raise ValueError(
793 "Commit %s must be a valid commit from this repository %s, "
793 "Commit %s must be a valid commit from this repository %s, "
794 "related to this repository instead %s." %
794 "related to this repository instead %s." %
795 (commit, self, commit.repository))
795 (commit, self, commit.repository))
796
796
797 def _validate_commit_id(self, commit_id):
797 def _validate_commit_id(self, commit_id):
798 if not isinstance(commit_id, str):
798 if not isinstance(commit_id, str):
799 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
799 raise TypeError(f"commit_id must be a string value got {type(commit_id)} instead")
800
800
801 def _validate_commit_idx(self, commit_idx):
801 def _validate_commit_idx(self, commit_idx):
802 if not isinstance(commit_idx, int):
802 if not isinstance(commit_idx, int):
803 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
803 raise TypeError(f"commit_idx must be a numeric value, got {type(commit_idx)}")
804
804
805 def _validate_branch_name(self, branch_name):
805 def _validate_branch_name(self, branch_name):
806 if branch_name and branch_name not in self.branches_all:
806 if branch_name and branch_name not in self.branches_all:
807 msg = (f"Branch {branch_name} not found in {self}")
807 msg = (f"Branch {branch_name} not found in {self}")
808 raise BranchDoesNotExistError(msg)
808 raise BranchDoesNotExistError(msg)
809
809
810 #
810 #
811 # Supporting deprecated API parts
811 # Supporting deprecated API parts
812 # TODO: johbo: consider to move this into a mixin
812 # TODO: johbo: consider to move this into a mixin
813 #
813 #
814
814
815 @property
815 @property
816 def EMPTY_CHANGESET(self):
816 def EMPTY_CHANGESET(self):
817 warnings.warn(
817 warnings.warn(
818 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
818 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
819 return self.EMPTY_COMMIT_ID
819 return self.EMPTY_COMMIT_ID
820
820
821 @property
821 @property
822 def revisions(self):
822 def revisions(self):
823 warnings.warn("Use commits attribute instead", DeprecationWarning)
823 warnings.warn("Use commits attribute instead", DeprecationWarning)
824 return self.commit_ids
824 return self.commit_ids
825
825
826 @revisions.setter
826 @revisions.setter
827 def revisions(self, value):
827 def revisions(self, value):
828 warnings.warn("Use commits attribute instead", DeprecationWarning)
828 warnings.warn("Use commits attribute instead", DeprecationWarning)
829 self.commit_ids = value
829 self.commit_ids = value
830
830
831 def get_changeset(self, revision=None, pre_load=None):
831 def get_changeset(self, revision=None, pre_load=None):
832 warnings.warn("Use get_commit instead", DeprecationWarning)
832 warnings.warn("Use get_commit instead", DeprecationWarning)
833 commit_id = None
833 commit_id = None
834 commit_idx = None
834 commit_idx = None
835 if isinstance(revision, str):
835 if isinstance(revision, str):
836 commit_id = revision
836 commit_id = revision
837 else:
837 else:
838 commit_idx = revision
838 commit_idx = revision
839 return self.get_commit(
839 return self.get_commit(
840 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
840 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
841
841
842 def get_changesets(
842 def get_changesets(
843 self, start=None, end=None, start_date=None, end_date=None,
843 self, start=None, end=None, start_date=None, end_date=None,
844 branch_name=None, pre_load=None):
844 branch_name=None, pre_load=None):
845 warnings.warn("Use get_commits instead", DeprecationWarning)
845 warnings.warn("Use get_commits instead", DeprecationWarning)
846 start_id = self._revision_to_commit(start)
846 start_id = self._revision_to_commit(start)
847 end_id = self._revision_to_commit(end)
847 end_id = self._revision_to_commit(end)
848 return self.get_commits(
848 return self.get_commits(
849 start_id=start_id, end_id=end_id, start_date=start_date,
849 start_id=start_id, end_id=end_id, start_date=start_date,
850 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
850 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
851
851
852 def _revision_to_commit(self, revision):
852 def _revision_to_commit(self, revision):
853 """
853 """
854 Translates a revision to a commit_id
854 Translates a revision to a commit_id
855
855
856 Helps to support the old changeset based API which allows to use
856 Helps to support the old changeset based API which allows to use
857 commit ids and commit indices interchangeable.
857 commit ids and commit indices interchangeable.
858 """
858 """
859 if revision is None:
859 if revision is None:
860 return revision
860 return revision
861
861
862 if isinstance(revision, str):
862 if isinstance(revision, str):
863 commit_id = revision
863 commit_id = revision
864 else:
864 else:
865 commit_id = self.commit_ids[revision]
865 commit_id = self.commit_ids[revision]
866 return commit_id
866 return commit_id
867
867
868 @property
868 @property
869 def in_memory_changeset(self):
869 def in_memory_changeset(self):
870 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
870 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
871 return self.in_memory_commit
871 return self.in_memory_commit
872
872
873 def get_path_permissions(self, username):
873 def get_path_permissions(self, username):
874 """
874 """
875 Returns a path permission checker or None if not supported
875 Returns a path permission checker or None if not supported
876
876
877 :param username: session user name
877 :param username: session user name
878 :return: an instance of BasePathPermissionChecker or None
878 :return: an instance of BasePathPermissionChecker or None
879 """
879 """
880 return None
880 return None
881
881
882 def install_hooks(self, force=False):
882 def install_hooks(self, force=False):
883 return self._remote.install_hooks(force)
883 return self._remote.install_hooks(force)
884
884
885 def get_hooks_info(self):
885 def get_hooks_info(self):
886 return self._remote.get_hooks_info()
886 return self._remote.get_hooks_info()
887
887
888 def vcsserver_invalidate_cache(self, delete=False):
888 def vcsserver_invalidate_cache(self, delete=False):
889 return self._remote.vcsserver_invalidate_cache(delete)
889 return self._remote.vcsserver_invalidate_cache(delete)
890
890
891
891
892 class BaseCommit(object):
892 class BaseCommit(object):
893 """
893 """
894 Each backend should implement it's commit representation.
894 Each backend should implement it's commit representation.
895
895
896 **Attributes**
896 **Attributes**
897
897
898 ``repository``
898 ``repository``
899 repository object within which commit exists
899 repository object within which commit exists
900
900
901 ``id``
901 ``id``
902 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
902 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
903 just ``tip``.
903 just ``tip``.
904
904
905 ``raw_id``
905 ``raw_id``
906 raw commit representation (i.e. full 40 length sha for git
906 raw commit representation (i.e. full 40 length sha for git
907 backend)
907 backend)
908
908
909 ``short_id``
909 ``short_id``
910 shortened (if apply) version of ``raw_id``; it would be simple
910 shortened (if apply) version of ``raw_id``; it would be simple
911 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
911 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
912 as ``raw_id`` for subversion
912 as ``raw_id`` for subversion
913
913
914 ``idx``
914 ``idx``
915 commit index
915 commit index
916
916
917 ``files``
917 ``files``
918 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
918 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
919
919
920 ``dirs``
920 ``dirs``
921 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
921 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
922
922
923 ``nodes``
923 ``nodes``
924 combined list of ``Node`` objects
924 combined list of ``Node`` objects
925
925
926 ``author``
926 ``author``
927 author of the commit, as unicode
927 author of the commit, as unicode
928
928
929 ``message``
929 ``message``
930 message of the commit, as unicode
930 message of the commit, as unicode
931
931
932 ``parents``
932 ``parents``
933 list of parent commits
933 list of parent commits
934
934
935 """
935 """
936 repository = None
936 repository = None
937 branch = None
937 branch = None
938
938
939 """
939 """
940 Depending on the backend this should be set to the branch name of the
940 Depending on the backend this should be set to the branch name of the
941 commit. Backends not supporting branches on commits should leave this
941 commit. Backends not supporting branches on commits should leave this
942 value as ``None``.
942 value as ``None``.
943 """
943 """
944
944
945 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
945 _ARCHIVE_PREFIX_TEMPLATE = '{repo_name}-{short_id}'
946 """
946 """
947 This template is used to generate a default prefix for repository archives
947 This template is used to generate a default prefix for repository archives
948 if no prefix has been specified.
948 if no prefix has been specified.
949 """
949 """
950
950
951 def __repr__(self):
951 def __repr__(self):
952 return self.__str__()
952 return self.__str__()
953
953
954 def __str__(self):
954 def __str__(self):
955 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
955 return f'<{self.__class__.__name__} at {self.idx}:{self.short_id}>'
956
956
957 def __eq__(self, other):
957 def __eq__(self, other):
958 same_instance = isinstance(other, self.__class__)
958 same_instance = isinstance(other, self.__class__)
959 return same_instance and self.raw_id == other.raw_id
959 return same_instance and self.raw_id == other.raw_id
960
960
961 def __json__(self):
961 def __json__(self):
962 parents = []
962 parents = []
963 try:
963 try:
964 for parent in self.parents:
964 for parent in self.parents:
965 parents.append({'raw_id': parent.raw_id})
965 parents.append({'raw_id': parent.raw_id})
966 except NotImplementedError:
966 except NotImplementedError:
967 # empty commit doesn't have parents implemented
967 # empty commit doesn't have parents implemented
968 pass
968 pass
969
969
970 return {
970 return {
971 'short_id': self.short_id,
971 'short_id': self.short_id,
972 'raw_id': self.raw_id,
972 'raw_id': self.raw_id,
973 'revision': self.idx,
973 'revision': self.idx,
974 'message': self.message,
974 'message': self.message,
975 'date': self.date,
975 'date': self.date,
976 'author': self.author,
976 'author': self.author,
977 'parents': parents,
977 'parents': parents,
978 'branch': self.branch
978 'branch': self.branch
979 }
979 }
980
980
981 def __getstate__(self):
981 def __getstate__(self):
982 d = self.__dict__.copy()
982 d = self.__dict__.copy()
983 d.pop('_remote', None)
983 d.pop('_remote', None)
984 d.pop('repository', None)
984 d.pop('repository', None)
985 return d
985 return d
986
986
987 def get_remote(self):
987 def get_remote(self):
988 return self._remote
988 return self._remote
989
989
990 def serialize(self):
990 def serialize(self):
991 return self.__json__()
991 return self.__json__()
992
992
993 def _get_refs(self):
993 def _get_refs(self):
994 return {
994 return {
995 'branches': [self.branch] if self.branch else [],
995 'branches': [self.branch] if self.branch else [],
996 'bookmarks': getattr(self, 'bookmarks', []),
996 'bookmarks': getattr(self, 'bookmarks', []),
997 'tags': self.tags
997 'tags': self.tags
998 }
998 }
999
999
1000 @LazyProperty
1000 @LazyProperty
1001 def last(self):
1001 def last(self):
1002 """
1002 """
1003 ``True`` if this is last commit in repository, ``False``
1003 ``True`` if this is last commit in repository, ``False``
1004 otherwise; trying to access this attribute while there is no
1004 otherwise; trying to access this attribute while there is no
1005 commits would raise `EmptyRepositoryError`
1005 commits would raise `EmptyRepositoryError`
1006 """
1006 """
1007 if self.repository is None:
1007 if self.repository is None:
1008 raise CommitError("Cannot check if it's most recent commit")
1008 raise CommitError("Cannot check if it's most recent commit")
1009 return self.raw_id == self.repository.commit_ids[-1]
1009 return self.raw_id == self.repository.commit_ids[-1]
1010
1010
1011 @LazyProperty
1011 @LazyProperty
1012 def parents(self):
1012 def parents(self):
1013 """
1013 """
1014 Returns list of parent commits.
1014 Returns list of parent commits.
1015 """
1015 """
1016 raise NotImplementedError
1016 raise NotImplementedError
1017
1017
1018 @LazyProperty
1018 @LazyProperty
1019 def first_parent(self):
1019 def first_parent(self):
1020 """
1020 """
1021 Returns list of parent commits.
1021 Returns list of parent commits.
1022 """
1022 """
1023 return self.parents[0] if self.parents else EmptyCommit()
1023 return self.parents[0] if self.parents else EmptyCommit()
1024
1024
1025 @property
1025 @property
1026 def merge(self):
1026 def merge(self):
1027 """
1027 """
1028 Returns boolean if commit is a merge.
1028 Returns boolean if commit is a merge.
1029 """
1029 """
1030 return len(self.parents) > 1
1030 return len(self.parents) > 1
1031
1031
1032 @LazyProperty
1032 @LazyProperty
1033 def children(self):
1033 def children(self):
1034 """
1034 """
1035 Returns list of child commits.
1035 Returns list of child commits.
1036 """
1036 """
1037 raise NotImplementedError
1037 raise NotImplementedError
1038
1038
1039 @LazyProperty
1039 @LazyProperty
1040 def id(self):
1040 def id(self):
1041 """
1041 """
1042 Returns string identifying this commit.
1042 Returns string identifying this commit.
1043 """
1043 """
1044 raise NotImplementedError
1044 raise NotImplementedError
1045
1045
1046 @LazyProperty
1046 @LazyProperty
1047 def raw_id(self):
1047 def raw_id(self):
1048 """
1048 """
1049 Returns raw string identifying this commit.
1049 Returns raw string identifying this commit.
1050 """
1050 """
1051 raise NotImplementedError
1051 raise NotImplementedError
1052
1052
1053 @LazyProperty
1053 @LazyProperty
1054 def short_id(self):
1054 def short_id(self):
1055 """
1055 """
1056 Returns shortened version of ``raw_id`` attribute, as string,
1056 Returns shortened version of ``raw_id`` attribute, as string,
1057 identifying this commit, useful for presentation to users.
1057 identifying this commit, useful for presentation to users.
1058 """
1058 """
1059 raise NotImplementedError
1059 raise NotImplementedError
1060
1060
1061 @LazyProperty
1061 @LazyProperty
1062 def idx(self):
1062 def idx(self):
1063 """
1063 """
1064 Returns integer identifying this commit.
1064 Returns integer identifying this commit.
1065 """
1065 """
1066 raise NotImplementedError
1066 raise NotImplementedError
1067
1067
1068 @LazyProperty
1068 @LazyProperty
1069 def committer(self):
1069 def committer(self):
1070 """
1070 """
1071 Returns committer for this commit
1071 Returns committer for this commit
1072 """
1072 """
1073 raise NotImplementedError
1073 raise NotImplementedError
1074
1074
1075 @LazyProperty
1075 @LazyProperty
1076 def committer_name(self):
1076 def committer_name(self):
1077 """
1077 """
1078 Returns committer name for this commit
1078 Returns committer name for this commit
1079 """
1079 """
1080
1080
1081 return author_name(self.committer)
1081 return author_name(self.committer)
1082
1082
1083 @LazyProperty
1083 @LazyProperty
1084 def committer_email(self):
1084 def committer_email(self):
1085 """
1085 """
1086 Returns committer email address for this commit
1086 Returns committer email address for this commit
1087 """
1087 """
1088
1088
1089 return author_email(self.committer)
1089 return author_email(self.committer)
1090
1090
1091 @LazyProperty
1091 @LazyProperty
1092 def author(self):
1092 def author(self):
1093 """
1093 """
1094 Returns author for this commit
1094 Returns author for this commit
1095 """
1095 """
1096
1096
1097 raise NotImplementedError
1097 raise NotImplementedError
1098
1098
1099 @LazyProperty
1099 @LazyProperty
1100 def author_name(self):
1100 def author_name(self):
1101 """
1101 """
1102 Returns author name for this commit
1102 Returns author name for this commit
1103 """
1103 """
1104
1104
1105 return author_name(self.author)
1105 return author_name(self.author)
1106
1106
1107 @LazyProperty
1107 @LazyProperty
1108 def author_email(self):
1108 def author_email(self):
1109 """
1109 """
1110 Returns author email address for this commit
1110 Returns author email address for this commit
1111 """
1111 """
1112
1112
1113 return author_email(self.author)
1113 return author_email(self.author)
1114
1114
1115 def get_file_mode(self, path: bytes):
1115 def get_file_mode(self, path: bytes):
1116 """
1116 """
1117 Returns stat mode of the file at `path`.
1117 Returns stat mode of the file at `path`.
1118 """
1118 """
1119 raise NotImplementedError
1119 raise NotImplementedError
1120
1120
1121 def is_link(self, path):
1121 def is_link(self, path):
1122 """
1122 """
1123 Returns ``True`` if given `path` is a symlink
1123 Returns ``True`` if given `path` is a symlink
1124 """
1124 """
1125 raise NotImplementedError
1125 raise NotImplementedError
1126
1126
1127 def is_node_binary(self, path):
1127 def is_node_binary(self, path):
1128 """
1128 """
1129 Returns ``True`` is given path is a binary file
1129 Returns ``True`` is given path is a binary file
1130 """
1130 """
1131 raise NotImplementedError
1131 raise NotImplementedError
1132
1132
1133 def node_md5_hash(self, path):
1133 def node_md5_hash(self, path):
1134 """
1134 """
1135 Returns md5 hash of a node data
1135 Returns md5 hash of a node data
1136 """
1136 """
1137 raise NotImplementedError
1137 raise NotImplementedError
1138
1138
1139 def get_file_content(self, path) -> bytes:
1139 def get_file_content(self, path) -> bytes:
1140 """
1140 """
1141 Returns content of the file at the given `path`.
1141 Returns content of the file at the given `path`.
1142 """
1142 """
1143 raise NotImplementedError
1143 raise NotImplementedError
1144
1144
1145 def get_file_content_streamed(self, path):
1145 def get_file_content_streamed(self, path):
1146 """
1146 """
1147 returns a streaming response from vcsserver with file content
1147 returns a streaming response from vcsserver with file content
1148 """
1148 """
1149 raise NotImplementedError
1149 raise NotImplementedError
1150
1150
1151 def get_file_size(self, path):
1151 def get_file_size(self, path):
1152 """
1152 """
1153 Returns size of the file at the given `path`.
1153 Returns size of the file at the given `path`.
1154 """
1154 """
1155 raise NotImplementedError
1155 raise NotImplementedError
1156
1156
1157 def get_path_commit(self, path, pre_load=None):
1157 def get_path_commit(self, path, pre_load=None):
1158 """
1158 """
1159 Returns last commit of the file at the given `path`.
1159 Returns last commit of the file at the given `path`.
1160
1160
1161 :param pre_load: Optional. List of commit attributes to load.
1161 :param pre_load: Optional. List of commit attributes to load.
1162 """
1162 """
1163 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1163 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1164 if not commits:
1164 if not commits:
1165 raise RepositoryError(
1165 raise RepositoryError(
1166 'Failed to fetch history for path {}. '
1166 'Failed to fetch history for path {}. '
1167 'Please check if such path exists in your repository'.format(
1167 'Please check if such path exists in your repository'.format(
1168 path))
1168 path))
1169 return commits[0]
1169 return commits[0]
1170
1170
1171 def get_path_history(self, path, limit=None, pre_load=None):
1171 def get_path_history(self, path, limit=None, pre_load=None):
1172 """
1172 """
1173 Returns history of file as reversed list of :class:`BaseCommit`
1173 Returns history of file as reversed list of :class:`BaseCommit`
1174 objects for which file at given `path` has been modified.
1174 objects for which file at given `path` has been modified.
1175
1175
1176 :param limit: Optional. Allows to limit the size of the returned
1176 :param limit: Optional. Allows to limit the size of the returned
1177 history. This is intended as a hint to the underlying backend, so
1177 history. This is intended as a hint to the underlying backend, so
1178 that it can apply optimizations depending on the limit.
1178 that it can apply optimizations depending on the limit.
1179 :param pre_load: Optional. List of commit attributes to load.
1179 :param pre_load: Optional. List of commit attributes to load.
1180 """
1180 """
1181 raise NotImplementedError
1181 raise NotImplementedError
1182
1182
1183 def get_file_annotate(self, path, pre_load=None):
1183 def get_file_annotate(self, path, pre_load=None):
1184 """
1184 """
1185 Returns a generator of four element tuples with
1185 Returns a generator of four element tuples with
1186 lineno, sha, commit lazy loader and line
1186 lineno, sha, commit lazy loader and line
1187
1187
1188 :param pre_load: Optional. List of commit attributes to load.
1188 :param pre_load: Optional. List of commit attributes to load.
1189 """
1189 """
1190 raise NotImplementedError
1190 raise NotImplementedError
1191
1191
1192 def get_nodes(self, path, pre_load=None):
1192 def get_nodes(self, path, pre_load=None):
1193 """
1193 """
1194 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1194 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1195 state of commit at the given ``path``.
1195 state of commit at the given ``path``.
1196
1196
1197 :raises ``CommitError``: if node at the given ``path`` is not
1197 :raises ``CommitError``: if node at the given ``path`` is not
1198 instance of ``DirNode``
1198 instance of ``DirNode``
1199 """
1199 """
1200 raise NotImplementedError
1200 raise NotImplementedError
1201
1201
1202 def get_node(self, path):
1202 def get_node(self, path):
1203 """
1203 """
1204 Returns ``Node`` object from the given ``path``.
1204 Returns ``Node`` object from the given ``path``.
1205
1205
1206 :raises ``NodeDoesNotExistError``: if there is no node at the given
1206 :raises ``NodeDoesNotExistError``: if there is no node at the given
1207 ``path``
1207 ``path``
1208 """
1208 """
1209 raise NotImplementedError
1209 raise NotImplementedError
1210
1210
1211 def get_largefile_node(self, path):
1211 def get_largefile_node(self, path):
1212 """
1212 """
1213 Returns the path to largefile from Mercurial/Git-lfs storage.
1213 Returns the path to largefile from Mercurial/Git-lfs storage.
1214 or None if it's not a largefile node
1214 or None if it's not a largefile node
1215 """
1215 """
1216 return None
1216 return None
1217
1217
1218 def archive_repo(self, archive_name_key, kind='tgz', subrepos=None,
1218 def archive_repo(self, archive_name_key, kind='tgz', subrepos=None,
1219 archive_dir_name=None, write_metadata=False, mtime=None,
1219 archive_dir_name=None, write_metadata=False, mtime=None,
1220 archive_at_path='/', cache_config=None):
1220 archive_at_path='/', cache_config=None):
1221 """
1221 """
1222 Creates an archive containing the contents of the repository.
1222 Creates an archive containing the contents of the repository.
1223
1223
1224 :param archive_name_key: unique key under this archive should be generated
1224 :param archive_name_key: unique key under this archive should be generated
1225 :param kind: one of the following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1225 :param kind: one of the following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1226 :param archive_dir_name: name of root directory in archive.
1226 :param archive_dir_name: name of root directory in archive.
1227 Default is repository name and commit's short_id joined with dash:
1227 Default is repository name and commit's short_id joined with dash:
1228 ``"{repo_name}-{short_id}"``.
1228 ``"{repo_name}-{short_id}"``.
1229 :param write_metadata: write a metadata file into archive.
1229 :param write_metadata: write a metadata file into archive.
1230 :param mtime: custom modification time for archive creation, defaults
1230 :param mtime: custom modification time for archive creation, defaults
1231 to time.time() if not given.
1231 to time.time() if not given.
1232 :param archive_at_path: pack files at this path (default '/')
1232 :param archive_at_path: pack files at this path (default '/')
1233 :param cache_config: config spec to send to vcsserver to configure the backend to store files
1233 :param cache_config: config spec to send to vcsserver to configure the backend to store files
1234
1234
1235 :raise VCSError: If prefix has a problem.
1235 :raise VCSError: If prefix has a problem.
1236 """
1236 """
1237 cache_config = cache_config or {}
1237 cache_config = cache_config or {}
1238 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1238 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1239 if kind not in allowed_kinds:
1239 if kind not in allowed_kinds:
1240 raise ImproperArchiveTypeError(
1240 raise ImproperArchiveTypeError(
1241 f'Archive kind ({kind}) not supported use one of {allowed_kinds}')
1241 f'Archive kind ({kind}) not supported use one of {allowed_kinds}')
1242
1242
1243 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1243 archive_dir_name = self._validate_archive_prefix(archive_dir_name)
1244 mtime = mtime is not None or time.mktime(self.date.timetuple())
1244 mtime = mtime is not None or time.mktime(self.date.timetuple())
1245 commit_id = self.raw_id
1245 commit_id = self.raw_id
1246
1246
1247 return self.repository._remote.archive_repo(
1247 return self.repository._remote.archive_repo(
1248 archive_name_key, kind, mtime, archive_at_path,
1248 archive_name_key, kind, mtime, archive_at_path,
1249 archive_dir_name, commit_id, cache_config)
1249 archive_dir_name, commit_id, cache_config)
1250
1250
1251 def _validate_archive_prefix(self, archive_dir_name):
1251 def _validate_archive_prefix(self, archive_dir_name):
1252 if archive_dir_name is None:
1252 if archive_dir_name is None:
1253 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1253 archive_dir_name = self._ARCHIVE_PREFIX_TEMPLATE.format(
1254 repo_name=safe_str(self.repository.name),
1254 repo_name=safe_str(self.repository.name),
1255 short_id=self.short_id)
1255 short_id=self.short_id)
1256 elif not isinstance(archive_dir_name, str):
1256 elif not isinstance(archive_dir_name, str):
1257 raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}")
1257 raise ValueError(f"archive_dir_name is not str object but: {type(archive_dir_name)}")
1258 elif archive_dir_name.startswith('/'):
1258 elif archive_dir_name.startswith('/'):
1259 raise VCSError("Prefix cannot start with leading slash")
1259 raise VCSError("Prefix cannot start with leading slash")
1260 elif archive_dir_name.strip() == '':
1260 elif archive_dir_name.strip() == '':
1261 raise VCSError("Prefix cannot be empty")
1261 raise VCSError("Prefix cannot be empty")
1262 elif not archive_dir_name.isascii():
1262 elif not archive_dir_name.isascii():
1263 raise VCSError("Prefix cannot contain non ascii characters")
1263 raise VCSError("Prefix cannot contain non ascii characters")
1264 return archive_dir_name
1264 return archive_dir_name
1265
1265
1266 @LazyProperty
1266 @LazyProperty
1267 def root(self):
1267 def root(self):
1268 """
1268 """
1269 Returns ``RootNode`` object for this commit.
1269 Returns ``RootNode`` object for this commit.
1270 """
1270 """
1271 return self.get_node('')
1271 return self.get_node('')
1272
1272
1273 def next(self, branch=None):
1273 def next(self, branch=None):
1274 """
1274 """
1275 Returns next commit from current, if branch is gives it will return
1275 Returns next commit from current, if branch is gives it will return
1276 next commit belonging to this branch
1276 next commit belonging to this branch
1277
1277
1278 :param branch: show commits within the given named branch
1278 :param branch: show commits within the given named branch
1279 """
1279 """
1280 indexes = range(self.idx + 1, self.repository.count())
1280 indexes = range(self.idx + 1, self.repository.count())
1281 return self._find_next(indexes, branch)
1281 return self._find_next(indexes, branch)
1282
1282
1283 def prev(self, branch=None):
1283 def prev(self, branch=None):
1284 """
1284 """
1285 Returns previous commit from current, if branch is gives it will
1285 Returns previous commit from current, if branch is gives it will
1286 return previous commit belonging to this branch
1286 return previous commit belonging to this branch
1287
1287
1288 :param branch: show commit within the given named branch
1288 :param branch: show commit within the given named branch
1289 """
1289 """
1290 indexes = range(self.idx - 1, -1, -1)
1290 indexes = range(self.idx - 1, -1, -1)
1291 return self._find_next(indexes, branch)
1291 return self._find_next(indexes, branch)
1292
1292
1293 def _find_next(self, indexes, branch=None):
1293 def _find_next(self, indexes, branch=None):
1294 if branch and self.branch != branch:
1294 if branch and self.branch != branch:
1295 raise VCSError('Branch option used on commit not belonging '
1295 raise VCSError('Branch option used on commit not belonging '
1296 'to that branch')
1296 'to that branch')
1297
1297
1298 for next_idx in indexes:
1298 for next_idx in indexes:
1299 commit = self.repository.get_commit(commit_idx=next_idx)
1299 commit = self.repository.get_commit(commit_idx=next_idx)
1300 if branch and branch != commit.branch:
1300 if branch and branch != commit.branch:
1301 continue
1301 continue
1302 return commit
1302 return commit
1303 raise CommitDoesNotExistError
1303 raise CommitDoesNotExistError
1304
1304
1305 def diff(self, ignore_whitespace=True, context=3):
1305 def diff(self, ignore_whitespace=True, context=3):
1306 """
1306 """
1307 Returns a `Diff` object representing the change made by this commit.
1307 Returns a `Diff` object representing the change made by this commit.
1308 """
1308 """
1309 parent = self.first_parent
1309 parent = self.first_parent
1310 diff = self.repository.get_diff(
1310 diff = self.repository.get_diff(
1311 parent, self,
1311 parent, self,
1312 ignore_whitespace=ignore_whitespace,
1312 ignore_whitespace=ignore_whitespace,
1313 context=context)
1313 context=context)
1314 return diff
1314 return diff
1315
1315
1316 @LazyProperty
1316 @LazyProperty
1317 def added(self):
1317 def added(self):
1318 """
1318 """
1319 Returns list of added ``FileNode`` objects.
1319 Returns list of added ``FileNode`` objects.
1320 """
1320 """
1321 raise NotImplementedError
1321 raise NotImplementedError
1322
1322
1323 @LazyProperty
1323 @LazyProperty
1324 def changed(self):
1324 def changed(self):
1325 """
1325 """
1326 Returns list of modified ``FileNode`` objects.
1326 Returns list of modified ``FileNode`` objects.
1327 """
1327 """
1328 raise NotImplementedError
1328 raise NotImplementedError
1329
1329
1330 @LazyProperty
1330 @LazyProperty
1331 def removed(self):
1331 def removed(self):
1332 """
1332 """
1333 Returns list of removed ``FileNode`` objects.
1333 Returns list of removed ``FileNode`` objects.
1334 """
1334 """
1335 raise NotImplementedError
1335 raise NotImplementedError
1336
1336
1337 @LazyProperty
1337 @LazyProperty
1338 def size(self):
1338 def size(self):
1339 """
1339 """
1340 Returns total number of bytes from contents of all filenodes.
1340 Returns total number of bytes from contents of all filenodes.
1341 """
1341 """
1342 return sum(node.size for node in self.get_filenodes_generator())
1342 return sum(node.size for node in self.get_filenodes_generator())
1343
1343
1344 def walk(self, topurl=''):
1344 def walk(self, topurl=''):
1345 """
1345 """
1346 Similar to os.walk method. Insted of filesystem it walks through
1346 Similar to os.walk method. Insted of filesystem it walks through
1347 commit starting at given ``topurl``. Returns generator of tuples
1347 commit starting at given ``topurl``. Returns generator of tuples
1348 (top_node, dirnodes, filenodes).
1348 (top_node, dirnodes, filenodes).
1349 """
1349 """
1350 from rhodecode.lib.vcs.nodes import DirNode
1350 from rhodecode.lib.vcs.nodes import DirNode
1351
1351
1352 if isinstance(topurl, DirNode):
1352 if isinstance(topurl, DirNode):
1353 top_node = topurl
1353 top_node = topurl
1354 else:
1354 else:
1355 top_node = self.get_node(topurl)
1355 top_node = self.get_node(topurl)
1356
1356
1357 has_default_pre_load = False
1357 has_default_pre_load = False
1358 if isinstance(top_node, DirNode):
1358 if isinstance(top_node, DirNode):
1359 # used to inject as we walk same defaults as given top_node
1359 # used to inject as we walk same defaults as given top_node
1360 default_pre_load = top_node.default_pre_load
1360 default_pre_load = top_node.default_pre_load
1361 has_default_pre_load = True
1361 has_default_pre_load = True
1362
1362
1363 if not top_node.is_dir():
1363 if not top_node.is_dir():
1364 return
1364 return
1365 yield top_node, top_node.dirs, top_node.files
1365 yield top_node, top_node.dirs, top_node.files
1366 for dir_node in top_node.dirs:
1366 for dir_node in top_node.dirs:
1367 if has_default_pre_load:
1367 if has_default_pre_load:
1368 dir_node.default_pre_load = default_pre_load
1368 dir_node.default_pre_load = default_pre_load
1369 yield from self.walk(dir_node)
1369 yield from self.walk(dir_node)
1370
1370
1371 def get_filenodes_generator(self):
1371 def get_filenodes_generator(self):
1372 """
1372 """
1373 Returns generator that yields *all* file nodes.
1373 Returns generator that yields *all* file nodes.
1374 """
1374 """
1375 for topnode, dirs, files in self.walk():
1375 for topnode, dirs, files in self.walk():
1376 yield from files
1376 yield from files
1377
1377
1378 #
1378 #
1379 # Utilities for sub classes to support consistent behavior
1379 # Utilities for sub classes to support consistent behavior
1380 #
1380 #
1381
1381
1382 def no_node_at_path(self, path):
1382 def no_node_at_path(self, path):
1383 return NodeDoesNotExistError(
1383 return NodeDoesNotExistError(
1384 f"There is no file nor directory at the given path: "
1384 f"There is no file nor directory at the given path: "
1385 f"`{safe_str(path)}` at commit {self.short_id}")
1385 f"`{safe_str(path)}` at commit {self.short_id}")
1386
1386
1387 def _fix_path(self, path: str) -> str:
1387 def _fix_path(self, path: str) -> str:
1388 """
1388 """
1389 Paths are stored without trailing slash so we need to get rid off it if
1389 Paths are stored without trailing slash so we need to get rid off it if
1390 needed.
1390 needed.
1391 """
1391 """
1392 return safe_str(path).rstrip('/')
1392 return safe_str(path).rstrip('/')
1393
1393
1394 #
1394 #
1395 # Deprecated API based on changesets
1395 # Deprecated API based on changesets
1396 #
1396 #
1397
1397
1398 @property
1398 @property
1399 def revision(self):
1399 def revision(self):
1400 warnings.warn("Use idx instead", DeprecationWarning)
1400 warnings.warn("Use idx instead", DeprecationWarning)
1401 return self.idx
1401 return self.idx
1402
1402
1403 @revision.setter
1403 @revision.setter
1404 def revision(self, value):
1404 def revision(self, value):
1405 warnings.warn("Use idx instead", DeprecationWarning)
1405 warnings.warn("Use idx instead", DeprecationWarning)
1406 self.idx = value
1406 self.idx = value
1407
1407
1408 def get_file_changeset(self, path):
1408 def get_file_changeset(self, path):
1409 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1409 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1410 return self.get_path_commit(path)
1410 return self.get_path_commit(path)
1411
1411
1412
1412
1413 class BaseChangesetClass(type):
1413 class BaseChangesetClass(type):
1414
1414
1415 def __instancecheck__(self, instance):
1415 def __instancecheck__(self, instance):
1416 return isinstance(instance, BaseCommit)
1416 return isinstance(instance, BaseCommit)
1417
1417
1418
1418
1419 class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass):
1419 class BaseChangeset(BaseCommit, metaclass=BaseChangesetClass):
1420
1420
1421 def __new__(cls, *args, **kwargs):
1421 def __new__(cls, *args, **kwargs):
1422 warnings.warn(
1422 warnings.warn(
1423 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1423 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1424 return super().__new__(cls, *args, **kwargs)
1424 return super().__new__(cls, *args, **kwargs)
1425
1425
1426
1426
1427 class BaseInMemoryCommit(object):
1427 class BaseInMemoryCommit(object):
1428 """
1428 """
1429 Represents differences between repository's state (most recent head) and
1429 Represents differences between repository's state (most recent head) and
1430 changes made *in place*.
1430 changes made *in place*.
1431
1431
1432 **Attributes**
1432 **Attributes**
1433
1433
1434 ``repository``
1434 ``repository``
1435 repository object for this in-memory-commit
1435 repository object for this in-memory-commit
1436
1436
1437 ``added``
1437 ``added``
1438 list of ``FileNode`` objects marked as *added*
1438 list of ``FileNode`` objects marked as *added*
1439
1439
1440 ``changed``
1440 ``changed``
1441 list of ``FileNode`` objects marked as *changed*
1441 list of ``FileNode`` objects marked as *changed*
1442
1442
1443 ``removed``
1443 ``removed``
1444 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1444 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1445 *removed*
1445 *removed*
1446
1446
1447 ``parents``
1447 ``parents``
1448 list of :class:`BaseCommit` instances representing parents of
1448 list of :class:`BaseCommit` instances representing parents of
1449 in-memory commit. Should always be 2-element sequence.
1449 in-memory commit. Should always be 2-element sequence.
1450
1450
1451 """
1451 """
1452
1452
1453 def __init__(self, repository):
1453 def __init__(self, repository):
1454 self.repository = repository
1454 self.repository = repository
1455 self.added = []
1455 self.added = []
1456 self.changed = []
1456 self.changed = []
1457 self.removed = []
1457 self.removed = []
1458 self.parents = []
1458 self.parents = []
1459
1459
1460 def add(self, *filenodes):
1460 def add(self, *filenodes):
1461 """
1461 """
1462 Marks given ``FileNode`` objects as *to be committed*.
1462 Marks given ``FileNode`` objects as *to be committed*.
1463
1463
1464 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1464 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1465 latest commit
1465 latest commit
1466 :raises ``NodeAlreadyAddedError``: if node with same path is already
1466 :raises ``NodeAlreadyAddedError``: if node with same path is already
1467 marked as *added*
1467 marked as *added*
1468 """
1468 """
1469 # Check if not already marked as *added* first
1469 # Check if not already marked as *added* first
1470 for node in filenodes:
1470 for node in filenodes:
1471 if node.path in (n.path for n in self.added):
1471 if node.path in (n.path for n in self.added):
1472 raise NodeAlreadyAddedError(
1472 raise NodeAlreadyAddedError(
1473 "Such FileNode %s is already marked for addition"
1473 "Such FileNode %s is already marked for addition"
1474 % node.path)
1474 % node.path)
1475 for node in filenodes:
1475 for node in filenodes:
1476 self.added.append(node)
1476 self.added.append(node)
1477
1477
1478 def change(self, *filenodes):
1478 def change(self, *filenodes):
1479 """
1479 """
1480 Marks given ``FileNode`` objects to be *changed* in next commit.
1480 Marks given ``FileNode`` objects to be *changed* in next commit.
1481
1481
1482 :raises ``EmptyRepositoryError``: if there are no commits yet
1482 :raises ``EmptyRepositoryError``: if there are no commits yet
1483 :raises ``NodeAlreadyExistsError``: if node with same path is already
1483 :raises ``NodeAlreadyExistsError``: if node with same path is already
1484 marked to be *changed*
1484 marked to be *changed*
1485 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1485 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1486 marked to be *removed*
1486 marked to be *removed*
1487 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1487 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1488 commit
1488 commit
1489 :raises ``NodeNotChangedError``: if node hasn't really be changed
1489 :raises ``NodeNotChangedError``: if node hasn't really be changed
1490 """
1490 """
1491 for node in filenodes:
1491 for node in filenodes:
1492 if node.path in (n.path for n in self.removed):
1492 if node.path in (n.path for n in self.removed):
1493 raise NodeAlreadyRemovedError(
1493 raise NodeAlreadyRemovedError(
1494 "Node at %s is already marked as removed" % node.path)
1494 "Node at %s is already marked as removed" % node.path)
1495 try:
1495 try:
1496 self.repository.get_commit()
1496 self.repository.get_commit()
1497 except EmptyRepositoryError:
1497 except EmptyRepositoryError:
1498 raise EmptyRepositoryError(
1498 raise EmptyRepositoryError(
1499 "Nothing to change - try to *add* new nodes rather than "
1499 "Nothing to change - try to *add* new nodes rather than "
1500 "changing them")
1500 "changing them")
1501 for node in filenodes:
1501 for node in filenodes:
1502 if node.path in (n.path for n in self.changed):
1502 if node.path in (n.path for n in self.changed):
1503 raise NodeAlreadyChangedError(
1503 raise NodeAlreadyChangedError(
1504 "Node at '%s' is already marked as changed" % node.path)
1504 "Node at '%s' is already marked as changed" % node.path)
1505 self.changed.append(node)
1505 self.changed.append(node)
1506
1506
1507 def remove(self, *filenodes):
1507 def remove(self, *filenodes):
1508 """
1508 """
1509 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1509 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1510 *removed* in next commit.
1510 *removed* in next commit.
1511
1511
1512 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1512 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1513 be *removed*
1513 be *removed*
1514 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1514 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1515 be *changed*
1515 be *changed*
1516 """
1516 """
1517 for node in filenodes:
1517 for node in filenodes:
1518 if node.path in (n.path for n in self.removed):
1518 if node.path in (n.path for n in self.removed):
1519 raise NodeAlreadyRemovedError(
1519 raise NodeAlreadyRemovedError(
1520 "Node is already marked to for removal at %s" % node.path)
1520 "Node is already marked to for removal at %s" % node.path)
1521 if node.path in (n.path for n in self.changed):
1521 if node.path in (n.path for n in self.changed):
1522 raise NodeAlreadyChangedError(
1522 raise NodeAlreadyChangedError(
1523 "Node is already marked to be changed at %s" % node.path)
1523 "Node is already marked to be changed at %s" % node.path)
1524 # We only mark node as *removed* - real removal is done by
1524 # We only mark node as *removed* - real removal is done by
1525 # commit method
1525 # commit method
1526 self.removed.append(node)
1526 self.removed.append(node)
1527
1527
1528 def reset(self):
1528 def reset(self):
1529 """
1529 """
1530 Resets this instance to initial state (cleans ``added``, ``changed``
1530 Resets this instance to initial state (cleans ``added``, ``changed``
1531 and ``removed`` lists).
1531 and ``removed`` lists).
1532 """
1532 """
1533 self.added = []
1533 self.added = []
1534 self.changed = []
1534 self.changed = []
1535 self.removed = []
1535 self.removed = []
1536 self.parents = []
1536 self.parents = []
1537
1537
1538 def get_ipaths(self):
1538 def get_ipaths(self):
1539 """
1539 """
1540 Returns generator of paths from nodes marked as added, changed or
1540 Returns generator of paths from nodes marked as added, changed or
1541 removed.
1541 removed.
1542 """
1542 """
1543 for node in itertools.chain(self.added, self.changed, self.removed):
1543 for node in itertools.chain(self.added, self.changed, self.removed):
1544 yield node.path
1544 yield node.path
1545
1545
1546 def get_paths(self):
1546 def get_paths(self):
1547 """
1547 """
1548 Returns list of paths from nodes marked as added, changed or removed.
1548 Returns list of paths from nodes marked as added, changed or removed.
1549 """
1549 """
1550 return list(self.get_ipaths())
1550 return list(self.get_ipaths())
1551
1551
1552 def check_integrity(self, parents=None):
1552 def check_integrity(self, parents=None):
1553 """
1553 """
1554 Checks in-memory commit's integrity. Also, sets parents if not
1554 Checks in-memory commit's integrity. Also, sets parents if not
1555 already set.
1555 already set.
1556
1556
1557 :raises CommitError: if any error occurs (i.e.
1557 :raises CommitError: if any error occurs (i.e.
1558 ``NodeDoesNotExistError``).
1558 ``NodeDoesNotExistError``).
1559 """
1559 """
1560 if not self.parents:
1560 if not self.parents:
1561 parents = parents or []
1561 parents = parents or []
1562 if len(parents) == 0:
1562 if len(parents) == 0:
1563 try:
1563 try:
1564 parents = [self.repository.get_commit(), None]
1564 parents = [self.repository.get_commit(), None]
1565 except EmptyRepositoryError:
1565 except EmptyRepositoryError:
1566 parents = [None, None]
1566 parents = [None, None]
1567 elif len(parents) == 1:
1567 elif len(parents) == 1:
1568 parents += [None]
1568 parents += [None]
1569 self.parents = parents
1569 self.parents = parents
1570
1570
1571 # Local parents, only if not None
1571 # Local parents, only if not None
1572 parents = [p for p in self.parents if p]
1572 parents = [p for p in self.parents if p]
1573
1573
1574 # Check nodes marked as added
1574 # Check nodes marked as added
1575 for p in parents:
1575 for p in parents:
1576 for node in self.added:
1576 for node in self.added:
1577 try:
1577 try:
1578 p.get_node(node.path)
1578 p.get_node(node.path)
1579 except NodeDoesNotExistError:
1579 except NodeDoesNotExistError:
1580 pass
1580 pass
1581 else:
1581 else:
1582 raise NodeAlreadyExistsError(
1582 raise NodeAlreadyExistsError(
1583 f"Node `{node.path}` already exists at {p}")
1583 f"Node `{node.path}` already exists at {p}")
1584
1584
1585 # Check nodes marked as changed
1585 # Check nodes marked as changed
1586 missing = set(self.changed)
1586 missing = set(self.changed)
1587 not_changed = set(self.changed)
1587 not_changed = set(self.changed)
1588 if self.changed and not parents:
1588 if self.changed and not parents:
1589 raise NodeDoesNotExistError(str(self.changed[0].path))
1589 raise NodeDoesNotExistError(str(self.changed[0].path))
1590 for p in parents:
1590 for p in parents:
1591 for node in self.changed:
1591 for node in self.changed:
1592 try:
1592 try:
1593 old = p.get_node(node.path)
1593 old = p.get_node(node.path)
1594 missing.remove(node)
1594 missing.remove(node)
1595 # if content actually changed, remove node from not_changed
1595 # if content actually changed, remove node from not_changed
1596 if old.content != node.content:
1596 if old.content != node.content:
1597 not_changed.remove(node)
1597 not_changed.remove(node)
1598 except NodeDoesNotExistError:
1598 except NodeDoesNotExistError:
1599 pass
1599 pass
1600 if self.changed and missing:
1600 if self.changed and missing:
1601 raise NodeDoesNotExistError(
1601 raise NodeDoesNotExistError(
1602 "Node `%s` marked as modified but missing in parents: %s"
1602 "Node `%s` marked as modified but missing in parents: %s"
1603 % (node.path, parents))
1603 % (node.path, parents))
1604
1604
1605 if self.changed and not_changed:
1605 if self.changed and not_changed:
1606 raise NodeNotChangedError(
1606 raise NodeNotChangedError(
1607 "Node `%s` wasn't actually changed (parents: %s)"
1607 "Node `%s` wasn't actually changed (parents: %s)"
1608 % (not_changed.pop().path, parents))
1608 % (not_changed.pop().path, parents))
1609
1609
1610 # Check nodes marked as removed
1610 # Check nodes marked as removed
1611 if self.removed and not parents:
1611 if self.removed and not parents:
1612 raise NodeDoesNotExistError(
1612 raise NodeDoesNotExistError(
1613 "Cannot remove node at %s as there "
1613 "Cannot remove node at %s as there "
1614 "were no parents specified" % self.removed[0].path)
1614 "were no parents specified" % self.removed[0].path)
1615 really_removed = set()
1615 really_removed = set()
1616 for p in parents:
1616 for p in parents:
1617 for node in self.removed:
1617 for node in self.removed:
1618 try:
1618 try:
1619 p.get_node(node.path)
1619 p.get_node(node.path)
1620 really_removed.add(node)
1620 really_removed.add(node)
1621 except CommitError:
1621 except CommitError:
1622 pass
1622 pass
1623 not_removed = set(self.removed) - really_removed
1623 not_removed = set(self.removed) - really_removed
1624 if not_removed:
1624 if not_removed:
1625 # TODO: johbo: This code branch does not seem to be covered
1625 # TODO: johbo: This code branch does not seem to be covered
1626 raise NodeDoesNotExistError(
1626 raise NodeDoesNotExistError(
1627 "Cannot remove node at %s from "
1627 "Cannot remove node at %s from "
1628 "following parents: %s" % (not_removed, parents))
1628 "following parents: %s" % (not_removed, parents))
1629
1629
1630 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1630 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1631 """
1631 """
1632 Performs in-memory commit (doesn't check workdir in any way) and
1632 Performs in-memory commit (doesn't check workdir in any way) and
1633 returns newly created :class:`BaseCommit`. Updates repository's
1633 returns newly created :class:`BaseCommit`. Updates repository's
1634 attribute `commits`.
1634 attribute `commits`.
1635
1635
1636 .. note::
1636 .. note::
1637
1637
1638 While overriding this method each backend's should call
1638 While overriding this method each backend's should call
1639 ``self.check_integrity(parents)`` in the first place.
1639 ``self.check_integrity(parents)`` in the first place.
1640
1640
1641 :param message: message of the commit
1641 :param message: message of the commit
1642 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1642 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1643 :param parents: single parent or sequence of parents from which commit
1643 :param parents: single parent or sequence of parents from which commit
1644 would be derived
1644 would be derived
1645 :param date: ``datetime.datetime`` instance. Defaults to
1645 :param date: ``datetime.datetime`` instance. Defaults to
1646 ``datetime.datetime.now()``.
1646 ``datetime.datetime.now()``.
1647 :param branch: branch name, as string. If none given, default backend's
1647 :param branch: branch name, as string. If none given, default backend's
1648 branch would be used.
1648 branch would be used.
1649
1649
1650 :raises ``CommitError``: if any error occurs while committing
1650 :raises ``CommitError``: if any error occurs while committing
1651 """
1651 """
1652 raise NotImplementedError
1652 raise NotImplementedError
1653
1653
1654
1654
1655 class BaseInMemoryChangesetClass(type):
1655 class BaseInMemoryChangesetClass(type):
1656
1656
1657 def __instancecheck__(self, instance):
1657 def __instancecheck__(self, instance):
1658 return isinstance(instance, BaseInMemoryCommit)
1658 return isinstance(instance, BaseInMemoryCommit)
1659
1659
1660
1660
1661 class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass):
1661 class BaseInMemoryChangeset(BaseInMemoryCommit, metaclass=BaseInMemoryChangesetClass):
1662
1662
1663 def __new__(cls, *args, **kwargs):
1663 def __new__(cls, *args, **kwargs):
1664 warnings.warn(
1664 warnings.warn(
1665 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1665 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1666 return super().__new__(cls, *args, **kwargs)
1666 return super().__new__(cls, *args, **kwargs)
1667
1667
1668
1668
1669 class EmptyCommit(BaseCommit):
1669 class EmptyCommit(BaseCommit):
1670 """
1670 """
1671 An dummy empty commit. It's possible to pass hash when creating
1671 An dummy empty commit. It's possible to pass hash when creating
1672 an EmptyCommit
1672 an EmptyCommit
1673 """
1673 """
1674
1674
1675 def __init__(
1675 def __init__(
1676 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1676 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1677 message='', author='', date=None):
1677 message='', author='', date=None):
1678 self._empty_commit_id = commit_id
1678 self._empty_commit_id = commit_id
1679 # TODO: johbo: Solve idx parameter, default value does not make
1679 # TODO: johbo: Solve idx parameter, default value does not make
1680 # too much sense
1680 # too much sense
1681 self.idx = idx
1681 self.idx = idx
1682 self.message = message
1682 self.message = message
1683 self.author = author
1683 self.author = author
1684 self.date = date or datetime.datetime.fromtimestamp(0)
1684 self.date = date or datetime.datetime.fromtimestamp(0)
1685 self.repository = repo
1685 self.repository = repo
1686 self.alias = alias
1686 self.alias = alias
1687
1687
1688 @LazyProperty
1688 @LazyProperty
1689 def raw_id(self):
1689 def raw_id(self):
1690 """
1690 """
1691 Returns raw string identifying this commit, useful for web
1691 Returns raw string identifying this commit, useful for web
1692 representation.
1692 representation.
1693 """
1693 """
1694
1694
1695 return self._empty_commit_id
1695 return self._empty_commit_id
1696
1696
1697 @LazyProperty
1697 @LazyProperty
1698 def branch(self):
1698 def branch(self):
1699 if self.alias:
1699 if self.alias:
1700 from rhodecode.lib.vcs.backends import get_backend
1700 from rhodecode.lib.vcs.backends import get_backend
1701 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1701 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1702
1702
1703 @LazyProperty
1703 @LazyProperty
1704 def short_id(self):
1704 def short_id(self):
1705 return self.raw_id[:12]
1705 return self.raw_id[:12]
1706
1706
1707 @LazyProperty
1707 @LazyProperty
1708 def id(self):
1708 def id(self):
1709 return self.raw_id
1709 return self.raw_id
1710
1710
1711 def get_path_commit(self, path, pre_load=None):
1711 def get_path_commit(self, path, pre_load=None):
1712 return self
1712 return self
1713
1713
1714 def get_file_content(self, path) -> bytes:
1714 def get_file_content(self, path) -> bytes:
1715 return b''
1715 return b''
1716
1716
1717 def get_file_content_streamed(self, path):
1717 def get_file_content_streamed(self, path):
1718 yield self.get_file_content(path)
1718 yield self.get_file_content(path)
1719
1719
1720 def get_file_size(self, path):
1720 def get_file_size(self, path):
1721 return 0
1721 return 0
1722
1722
1723
1723
1724 class EmptyChangesetClass(type):
1724 class EmptyChangesetClass(type):
1725
1725
1726 def __instancecheck__(self, instance):
1726 def __instancecheck__(self, instance):
1727 return isinstance(instance, EmptyCommit)
1727 return isinstance(instance, EmptyCommit)
1728
1728
1729
1729
1730 class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass):
1730 class EmptyChangeset(EmptyCommit, metaclass=EmptyChangesetClass):
1731
1731
1732 def __new__(cls, *args, **kwargs):
1732 def __new__(cls, *args, **kwargs):
1733 warnings.warn(
1733 warnings.warn(
1734 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1734 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1735 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1735 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1736
1736
1737 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1737 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1738 alias=None, revision=-1, message='', author='', date=None):
1738 alias=None, revision=-1, message='', author='', date=None):
1739 if requested_revision is not None:
1739 if requested_revision is not None:
1740 warnings.warn(
1740 warnings.warn(
1741 "Parameter requested_revision not supported anymore",
1741 "Parameter requested_revision not supported anymore",
1742 DeprecationWarning)
1742 DeprecationWarning)
1743 super().__init__(
1743 super().__init__(
1744 commit_id=cs, repo=repo, alias=alias, idx=revision,
1744 commit_id=cs, repo=repo, alias=alias, idx=revision,
1745 message=message, author=author, date=date)
1745 message=message, author=author, date=date)
1746
1746
1747 @property
1747 @property
1748 def revision(self):
1748 def revision(self):
1749 warnings.warn("Use idx instead", DeprecationWarning)
1749 warnings.warn("Use idx instead", DeprecationWarning)
1750 return self.idx
1750 return self.idx
1751
1751
1752 @revision.setter
1752 @revision.setter
1753 def revision(self, value):
1753 def revision(self, value):
1754 warnings.warn("Use idx instead", DeprecationWarning)
1754 warnings.warn("Use idx instead", DeprecationWarning)
1755 self.idx = value
1755 self.idx = value
1756
1756
1757
1757
1758 class EmptyRepository(BaseRepository):
1758 class EmptyRepository(BaseRepository):
1759 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1759 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1760 pass
1760 pass
1761
1761
1762 def get_diff(self, *args, **kwargs):
1762 def get_diff(self, *args, **kwargs):
1763 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1763 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1764 return GitDiff(b'')
1764 return GitDiff(b'')
1765
1765
1766
1766
1767 class CollectionGenerator(object):
1767 class CollectionGenerator(object):
1768
1768
1769 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1769 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1770 self.repo = repo
1770 self.repo = repo
1771 self.commit_ids = commit_ids
1771 self.commit_ids = commit_ids
1772 self.collection_size = collection_size
1772 self.collection_size = collection_size
1773 self.pre_load = pre_load
1773 self.pre_load = pre_load
1774 self.translate_tag = translate_tag
1774 self.translate_tag = translate_tag
1775
1775
1776 def __len__(self):
1776 def __len__(self):
1777 if self.collection_size is not None:
1777 if self.collection_size is not None:
1778 return self.collection_size
1778 return self.collection_size
1779 return self.commit_ids.__len__()
1779 return self.commit_ids.__len__()
1780
1780
1781 def __iter__(self):
1781 def __iter__(self):
1782 for commit_id in self.commit_ids:
1782 for commit_id in self.commit_ids:
1783 # TODO: johbo: Mercurial passes in commit indices or commit ids
1783 # TODO: johbo: Mercurial passes in commit indices or commit ids
1784 yield self._commit_factory(commit_id)
1784 yield self._commit_factory(commit_id)
1785
1785
1786 def _commit_factory(self, commit_id):
1786 def _commit_factory(self, commit_id):
1787 """
1787 """
1788 Allows backends to override the way commits are generated.
1788 Allows backends to override the way commits are generated.
1789 """
1789 """
1790 return self.repo.get_commit(
1790 return self.repo.get_commit(
1791 commit_id=commit_id, pre_load=self.pre_load,
1791 commit_id=commit_id, pre_load=self.pre_load,
1792 translate_tag=self.translate_tag)
1792 translate_tag=self.translate_tag)
1793
1793
1794 def __getitem__(self, key):
1794 def __getitem__(self, key):
1795 """Return either a single element by index, or a sliced collection."""
1795 """Return either a single element by index, or a sliced collection."""
1796
1796
1797 if isinstance(key, slice):
1797 if isinstance(key, slice):
1798 commit_ids = self.commit_ids[key.start:key.stop]
1798 commit_ids = self.commit_ids[key.start:key.stop]
1799
1799
1800 else:
1800 else:
1801 # single item
1801 # single item
1802 commit_ids = self.commit_ids[key]
1802 commit_ids = self.commit_ids[key]
1803
1803
1804 return self.__class__(
1804 return self.__class__(
1805 self.repo, commit_ids, pre_load=self.pre_load,
1805 self.repo, commit_ids, pre_load=self.pre_load,
1806 translate_tag=self.translate_tag)
1806 translate_tag=self.translate_tag)
1807
1807
1808 def __repr__(self):
1808 def __repr__(self):
1809 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1809 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1810
1810
1811
1811
1812 class Config(object):
1812 class Config(object):
1813 """
1813 """
1814 Represents the configuration for a repository.
1814 Represents the configuration for a repository.
1815
1815
1816 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1816 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1817 standard library. It implements only the needed subset.
1817 standard library. It implements only the needed subset.
1818 """
1818 """
1819
1819
1820 def __init__(self):
1820 def __init__(self):
1821 self._values = {}
1821 self._values = {}
1822
1822
1823 def copy(self):
1823 def copy(self):
1824 clone = Config()
1824 clone = Config()
1825 for section, values in self._values.items():
1825 for section, values in self._values.items():
1826 clone._values[section] = values.copy()
1826 clone._values[section] = values.copy()
1827 return clone
1827 return clone
1828
1828
1829 def __repr__(self):
1829 def __repr__(self):
1830 return '<Config({} sections) at {}>'.format(
1830 return '<Config({} sections) at {}>'.format(
1831 len(self._values), hex(id(self)))
1831 len(self._values), hex(id(self)))
1832
1832
1833 def items(self, section):
1833 def items(self, section):
1834 return self._values.get(section, {}).items()
1834 return self._values.get(section, {}).items()
1835
1835
1836 def get(self, section, option):
1836 def get(self, section, option):
1837 return self._values.get(section, {}).get(option)
1837 return self._values.get(section, {}).get(option)
1838
1838
1839 def set(self, section, option, value):
1839 def set(self, section, option, value):
1840 section_values = self._values.setdefault(section, {})
1840 section_values = self._values.setdefault(section, {})
1841 section_values[option] = value
1841 section_values[option] = value
1842
1842
1843 def clear_section(self, section):
1843 def clear_section(self, section):
1844 self._values[section] = {}
1844 self._values[section] = {}
1845
1845
1846 def serialize(self):
1846 def serialize(self):
1847 """
1847 """
1848 Creates a list of three tuples (section, key, value) representing
1848 Creates a list of three tuples (section, key, value) representing
1849 this config object.
1849 this config object.
1850 """
1850 """
1851 items = []
1851 items = []
1852 for section in self._values:
1852 for section in self._values:
1853 for option, value in self._values[section].items():
1853 for option, value in self._values[section].items():
1854 items.append(
1854 items.append(
1855 (safe_str(section), safe_str(option), safe_str(value)))
1855 (safe_str(section), safe_str(option), safe_str(value)))
1856 return items
1856 return items
1857
1857
1858
1858
1859 class Diff(object):
1859 class Diff(object):
1860 """
1860 """
1861 Represents a diff result from a repository backend.
1861 Represents a diff result from a repository backend.
1862
1862
1863 Subclasses have to provide a backend specific value for
1863 Subclasses have to provide a backend specific value for
1864 :attr:`_header_re` and :attr:`_meta_re`.
1864 :attr:`_header_re` and :attr:`_meta_re`.
1865 """
1865 """
1866 _meta_re = None
1866 _meta_re = None
1867 _header_re: bytes = re.compile(br"")
1867 _header_re: bytes = re.compile(br"")
1868
1868
1869 def __init__(self, raw_diff: bytes):
1869 def __init__(self, raw_diff: bytes):
1870 if not isinstance(raw_diff, bytes):
1870 if not isinstance(raw_diff, bytes):
1871 raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}')
1871 raise Exception(f'raw_diff must be bytes - got {type(raw_diff)}')
1872
1872
1873 self.raw = memoryview(raw_diff)
1873 self.raw = memoryview(raw_diff)
1874
1874
1875 def get_header_re(self):
1875 def get_header_re(self):
1876 return self._header_re
1876 return self._header_re
1877
1877
1878 def chunks(self):
1878 def chunks(self):
1879 """
1879 """
1880 split the diff in chunks of separate --git a/file b/file chunks
1880 split the diff in chunks of separate --git a/file b/file chunks
1881 to make diffs consistent we must prepend with \n, and make sure
1881 to make diffs consistent we must prepend with \n, and make sure
1882 we can detect last chunk as this was also has special rule
1882 we can detect last chunk as this was also has special rule
1883 """
1883 """
1884
1884
1885 diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git')
1885 diff_parts = (b'\n' + bytes(self.raw)).split(b'\ndiff --git')
1886
1886
1887 chunks = diff_parts[1:]
1887 chunks = diff_parts[1:]
1888 total_chunks = len(chunks)
1888 total_chunks = len(chunks)
1889
1889
1890 def diff_iter(_chunks):
1890 def diff_iter(_chunks):
1891 for cur_chunk, chunk in enumerate(_chunks, start=1):
1891 for cur_chunk, chunk in enumerate(_chunks, start=1):
1892 yield DiffChunk(chunk, self, cur_chunk == total_chunks)
1892 yield DiffChunk(chunk, self, cur_chunk == total_chunks)
1893 return diff_iter(chunks)
1893 return diff_iter(chunks)
1894
1894
1895
1895
1896 class DiffChunk(object):
1896 class DiffChunk(object):
1897
1897
1898 def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool):
1898 def __init__(self, chunk: bytes, diff_obj: Diff, is_last_chunk: bool):
1899 self.diff_obj = diff_obj
1899 self.diff_obj = diff_obj
1900
1900
1901 # since we split by \ndiff --git that part is lost from original diff
1901 # since we split by \ndiff --git that part is lost from original diff
1902 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1902 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1903 if not is_last_chunk:
1903 if not is_last_chunk:
1904 chunk += b'\n'
1904 chunk += b'\n'
1905 header_re = self.diff_obj.get_header_re()
1905 header_re = self.diff_obj.get_header_re()
1906 match = header_re.match(chunk)
1906 match = header_re.match(chunk)
1907 self.header = match.groupdict()
1907 self.header = match.groupdict()
1908 self.diff = chunk[match.end():]
1908 self.diff = chunk[match.end():]
1909 self.raw = chunk
1909 self.raw = chunk
1910
1910
1911 @property
1911 @property
1912 def header_as_str(self):
1912 def header_as_str(self):
1913 if self.header:
1913 if self.header:
1914 def safe_str_on_bytes(val):
1914 def safe_str_on_bytes(val):
1915 if isinstance(val, bytes):
1915 if isinstance(val, bytes):
1916 return safe_str(val)
1916 return safe_str(val)
1917 return val
1917 return val
1918 return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()}
1918 return {safe_str(k): safe_str_on_bytes(v) for k, v in self.header.items()}
1919
1919
1920 def __repr__(self):
1920 def __repr__(self):
1921 return f'DiffChunk({self.header_as_str})'
1921 return f'DiffChunk({self.header_as_str})'
1922
1922
1923
1923
1924 class BasePathPermissionChecker(object):
1924 class BasePathPermissionChecker(object):
1925
1925
1926 @staticmethod
1926 @staticmethod
1927 def create_from_patterns(includes, excludes):
1927 def create_from_patterns(includes, excludes):
1928 if includes and '*' in includes and not excludes:
1928 if includes and '*' in includes and not excludes:
1929 return AllPathPermissionChecker()
1929 return AllPathPermissionChecker()
1930 elif excludes and '*' in excludes:
1930 elif excludes and '*' in excludes:
1931 return NonePathPermissionChecker()
1931 return NonePathPermissionChecker()
1932 else:
1932 else:
1933 return PatternPathPermissionChecker(includes, excludes)
1933 return PatternPathPermissionChecker(includes, excludes)
1934
1934
1935 @property
1935 @property
1936 def has_full_access(self):
1936 def has_full_access(self):
1937 raise NotImplementedError()
1937 raise NotImplementedError()
1938
1938
1939 def has_access(self, path):
1939 def has_access(self, path):
1940 raise NotImplementedError()
1940 raise NotImplementedError()
1941
1941
1942
1942
1943 class AllPathPermissionChecker(BasePathPermissionChecker):
1943 class AllPathPermissionChecker(BasePathPermissionChecker):
1944
1944
1945 @property
1945 @property
1946 def has_full_access(self):
1946 def has_full_access(self):
1947 return True
1947 return True
1948
1948
1949 def has_access(self, path):
1949 def has_access(self, path):
1950 return True
1950 return True
1951
1951
1952
1952
1953 class NonePathPermissionChecker(BasePathPermissionChecker):
1953 class NonePathPermissionChecker(BasePathPermissionChecker):
1954
1954
1955 @property
1955 @property
1956 def has_full_access(self):
1956 def has_full_access(self):
1957 return False
1957 return False
1958
1958
1959 def has_access(self, path):
1959 def has_access(self, path):
1960 return False
1960 return False
1961
1961
1962
1962
1963 class PatternPathPermissionChecker(BasePathPermissionChecker):
1963 class PatternPathPermissionChecker(BasePathPermissionChecker):
1964
1964
1965 def __init__(self, includes, excludes):
1965 def __init__(self, includes, excludes):
1966 self.includes = includes
1966 self.includes = includes
1967 self.excludes = excludes
1967 self.excludes = excludes
1968 self.includes_re = [] if not includes else [
1968 self.includes_re = [] if not includes else [
1969 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1969 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1970 self.excludes_re = [] if not excludes else [
1970 self.excludes_re = [] if not excludes else [
1971 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1971 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1972
1972
1973 @property
1973 @property
1974 def has_full_access(self):
1974 def has_full_access(self):
1975 return '*' in self.includes and not self.excludes
1975 return '*' in self.includes and not self.excludes
1976
1976
1977 def has_access(self, path):
1977 def has_access(self, path):
1978 for regex in self.excludes_re:
1978 for regex in self.excludes_re:
1979 if regex.match(path):
1979 if regex.match(path):
1980 return False
1980 return False
1981 for regex in self.includes_re:
1981 for regex in self.includes_re:
1982 if regex.match(path):
1982 if regex.match(path):
1983 return True
1983 return True
1984 return False
1984 return False
@@ -1,1053 +1,1054 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 GIT repository module
20 GIT repository module
21 """
21 """
22
22
23 import logging
23 import logging
24 import os
24 import os
25 import re
25 import re
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from collections import OrderedDict
29 from collections import OrderedDict
30 from rhodecode.lib.datelib import (
30 from rhodecode.lib.datelib import (
31 utcdate_fromtimestamp, makedate, date_astimestamp)
31 utcdate_fromtimestamp, makedate, date_astimestamp)
32 from rhodecode.lib.hash_utils import safe_str
32 from rhodecode.lib.hash_utils import safe_str
33 from rhodecode.lib.utils2 import CachedProperty
33 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.vcs import connection, path as vcspath
34 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs.backends.base import (
35 from rhodecode.lib.vcs.backends.base import (
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 MergeFailureReason, Reference)
37 MergeFailureReason, Reference)
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitDoesNotExistError, EmptyRepositoryError,
42 CommitDoesNotExistError, EmptyRepositoryError,
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
44
44
45
45
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 class GitRepository(BaseRepository):
51 class GitRepository(BaseRepository):
52 """
52 """
53 Git repository backend.
53 Git repository backend.
54 """
54 """
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
66
66
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
68
69 # caches
69 # caches
70 self._commit_ids = {}
70 self._commit_ids = {}
71
71
72 @LazyProperty
72 @LazyProperty
73 def _remote(self):
73 def _remote(self):
74 repo_id = self.path
74 repo_id = self.path
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76
76
77 @LazyProperty
77 @LazyProperty
78 def bare(self):
78 def bare(self):
79 return self._remote.bare()
79 return self._remote.bare()
80
80
81 @LazyProperty
81 @LazyProperty
82 def head(self):
82 def head(self):
83 return self._remote.head()
83 return self._remote.head()
84
84
85 @CachedProperty
85 @CachedProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject commit ids from cache.
89 attribute allows external tools to inject commit ids from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = {commit_id: index
96 self._commit_ids = {commit_id: index
97 for index, commit_id in enumerate(commit_ids)}
97 for index, commit_id in enumerate(commit_ids)}
98
98
99 def run_git_command(self, cmd, **opts):
99 def run_git_command(self, cmd, **opts):
100 """
100 """
101 Runs given ``cmd`` as git command and returns tuple
101 Runs given ``cmd`` as git command and returns tuple
102 (stdout, stderr).
102 (stdout, stderr).
103
103
104 :param cmd: git command to be executed
104 :param cmd: git command to be executed
105 :param opts: env options to pass into Subprocess command
105 :param opts: env options to pass into Subprocess command
106 """
106 """
107 if not isinstance(cmd, list):
107 if not isinstance(cmd, list):
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
109
109
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 out, err = self._remote.run_git_command(cmd, **opts)
111 out, err = self._remote.run_git_command(cmd, **opts)
112 if err and not skip_stderr_log:
112 if err and not skip_stderr_log:
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 return out, err
114 return out, err
115
115
116 @staticmethod
116 @staticmethod
117 def check_url(url, config):
117 def check_url(url, config):
118 """
118 """
119 Function will check given url and try to verify if it's a valid
119 Function will check given url and try to verify if it's a valid
120 link. Sometimes it may happened that git will issue basic
120 link. Sometimes it may happened that git will issue basic
121 auth request that can cause whole API to hang when used from python
121 auth request that can cause whole API to hang when used from python
122 or other external calls.
122 or other external calls.
123
123
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 when the return code is non 200
125 when the return code is non 200
126 """
126 """
127 # check first if it's not an url
127 # check first if it's not an url
128 if os.path.isdir(url) or url.startswith('file:'):
128 if os.path.isdir(url) or url.startswith('file:'):
129 return True
129 return True
130
130
131 if '+' in url.split('://', 1)[0]:
131 if '+' in url.split('://', 1)[0]:
132 url = url.split('+', 1)[1]
132 url = url.split('+', 1)[1]
133
133
134 # Request the _remote to verify the url
134 # Request the _remote to verify the url
135 return connection.Git.check_url(url, config.serialize())
135 return connection.Git.check_url(url, config.serialize())
136
136
137 @staticmethod
137 @staticmethod
138 def is_valid_repository(path):
138 def is_valid_repository(path):
139 if os.path.isdir(os.path.join(path, '.git')):
139 if os.path.isdir(os.path.join(path, '.git')):
140 return True
140 return True
141 # check case of bare repository
141 # check case of bare repository
142 try:
142 try:
143 GitRepository(path)
143 GitRepository(path)
144 return True
144 return True
145 except VCSError:
145 except VCSError:
146 pass
146 pass
147 return False
147 return False
148
148
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 bare=False):
150 bare=False):
151 if create and os.path.exists(self.path):
151 if create and os.path.exists(self.path):
152 raise RepositoryError(
152 raise RepositoryError(
153 "Cannot create repository at %s, location already exist"
153 "Cannot create repository at %s, location already exist"
154 % self.path)
154 % self.path)
155
155
156 if bare and do_workspace_checkout:
156 if bare and do_workspace_checkout:
157 raise RepositoryError("Cannot update a bare repository")
157 raise RepositoryError("Cannot update a bare repository")
158 try:
158 try:
159
159
160 if src_url:
160 if src_url:
161 # check URL before any actions
161 # check URL before any actions
162 GitRepository.check_url(src_url, self.config)
162 GitRepository.check_url(src_url, self.config)
163
163
164 if create:
164 if create:
165 os.makedirs(self.path, mode=0o755)
165 os.makedirs(self.path, mode=0o755)
166
166
167 if bare:
167 if bare:
168 self._remote.init_bare()
168 self._remote.init_bare()
169 else:
169 else:
170 self._remote.init()
170 self._remote.init()
171
171
172 if src_url and bare:
172 if src_url and bare:
173 # bare repository only allows a fetch and checkout is not allowed
173 # bare repository only allows a fetch and checkout is not allowed
174 self.fetch(src_url, commit_ids=None)
174 self.fetch(src_url, commit_ids=None)
175 elif src_url:
175 elif src_url:
176 self.pull(src_url, commit_ids=None,
176 self.pull(src_url, commit_ids=None,
177 update_after=do_workspace_checkout)
177 update_after=do_workspace_checkout)
178
178
179 else:
179 else:
180 if not self._remote.assert_correct_path():
180 if not self._remote.assert_correct_path():
181 raise RepositoryError(
181 raise RepositoryError(
182 'Path "%s" does not contain a Git repository' %
182 'Path "%s" does not contain a Git repository' %
183 (self.path,))
183 (self.path,))
184
184
185 # TODO: johbo: check if we have to translate the OSError here
185 # TODO: johbo: check if we have to translate the OSError here
186 except OSError as err:
186 except OSError as err:
187 raise RepositoryError(err)
187 raise RepositoryError(err)
188
188
189 def _get_all_commit_ids(self):
189 def _get_all_commit_ids(self):
190 return self._remote.get_all_commit_ids()
190 return self._remote.get_all_commit_ids()
191
191
192 def _get_commit_ids(self, filters=None):
192 def _get_commit_ids(self, filters=None):
193 # we must check if this repo is not empty, since later command
193 # we must check if this repo is not empty, since later command
194 # fails if it is. And it's cheaper to ask than throw the subprocess
194 # fails if it is. And it's cheaper to ask than throw the subprocess
195 # errors
195 # errors
196
196
197 head = self._remote.head(show_exc=False)
197 head = self._remote.head(show_exc=False)
198
198
199 if not head:
199 if not head:
200 return []
200 return []
201
201
202 rev_filter = ['--branches', '--tags']
202 rev_filter = ['--branches', '--tags']
203 extra_filter = []
203 extra_filter = []
204
204
205 if filters:
205 if filters:
206 if filters.get('since'):
206 if filters.get('since'):
207 extra_filter.append('--since=%s' % (filters['since']))
207 extra_filter.append('--since=%s' % (filters['since']))
208 if filters.get('until'):
208 if filters.get('until'):
209 extra_filter.append('--until=%s' % (filters['until']))
209 extra_filter.append('--until=%s' % (filters['until']))
210 if filters.get('branch_name'):
210 if filters.get('branch_name'):
211 rev_filter = []
211 rev_filter = []
212 extra_filter.append(filters['branch_name'])
212 extra_filter.append(filters['branch_name'])
213 rev_filter.extend(extra_filter)
213 rev_filter.extend(extra_filter)
214
214
215 # if filters.get('start') or filters.get('end'):
215 # if filters.get('start') or filters.get('end'):
216 # # skip is offset, max-count is limit
216 # # skip is offset, max-count is limit
217 # if filters.get('start'):
217 # if filters.get('start'):
218 # extra_filter += ' --skip=%s' % filters['start']
218 # extra_filter += ' --skip=%s' % filters['start']
219 # if filters.get('end'):
219 # if filters.get('end'):
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
220 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
221
221
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
222 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
223 try:
223 try:
224 output, __ = self.run_git_command(cmd)
224 output, __ = self.run_git_command(cmd)
225 except RepositoryError:
225 except RepositoryError:
226 # Can be raised for empty repositories
226 # Can be raised for empty repositories
227 return []
227 return []
228 return output.splitlines()
228 return output.splitlines()
229
229
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
230 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
231
231
232 def is_null(value):
232 def is_null(value):
233 return len(value) == commit_id_or_idx.count('0')
233 return len(value) == commit_id_or_idx.count('0')
234
234
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
235 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
236 return self.commit_ids[-1]
236 return self.commit_ids[-1]
237
237
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
238 commit_missing_err = "Commit {} does not exist for `{}`".format(
239 *map(safe_str, [commit_id_or_idx, self.name]))
239 *map(safe_str, [commit_id_or_idx, self.name]))
240
240
241 is_bstr = isinstance(commit_id_or_idx, str)
241 is_bstr = isinstance(commit_id_or_idx, str)
242 is_branch = reference_obj and reference_obj.branch
242 is_branch = reference_obj and reference_obj.branch
243
243
244 lookup_ok = False
244 lookup_ok = False
245 if is_bstr:
245 if is_bstr:
246 # Need to call remote to translate id for tagging scenarios,
246 # Need to call remote to translate id for tagging scenarios,
247 # or branch that are numeric
247 # or branch that are numeric
248 try:
248 try:
249 remote_data = self._remote.get_object(commit_id_or_idx,
249 remote_data = self._remote.get_object(commit_id_or_idx,
250 maybe_unreachable=maybe_unreachable)
250 maybe_unreachable=maybe_unreachable)
251 commit_id_or_idx = remote_data["commit_id"]
251 commit_id_or_idx = remote_data["commit_id"]
252 lookup_ok = True
252 lookup_ok = True
253 except (CommitDoesNotExistError,):
253 except (CommitDoesNotExistError,):
254 lookup_ok = False
254 lookup_ok = False
255
255
256 if lookup_ok is False:
256 if lookup_ok is False:
257 is_numeric_idx = \
257 is_numeric_idx = \
258 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
258 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
259 or isinstance(commit_id_or_idx, int)
259 or isinstance(commit_id_or_idx, int)
260 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
260 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
261 try:
261 try:
262 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
262 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
263 lookup_ok = True
263 lookup_ok = True
264 except Exception:
264 except Exception:
265 raise CommitDoesNotExistError(commit_missing_err)
265 raise CommitDoesNotExistError(commit_missing_err)
266
266
267 # we failed regular lookup, and by integer number lookup
267 # we failed regular lookup, and by integer number lookup
268 if lookup_ok is False:
268 if lookup_ok is False:
269 raise CommitDoesNotExistError(commit_missing_err)
269 raise CommitDoesNotExistError(commit_missing_err)
270
270
271 # Ensure we return full id
271 # Ensure we return full id
272 if not SHA_PATTERN.match(str(commit_id_or_idx)):
272 if not SHA_PATTERN.match(str(commit_id_or_idx)):
273 raise CommitDoesNotExistError(
273 raise CommitDoesNotExistError(
274 "Given commit id %s not recognized" % commit_id_or_idx)
274 "Given commit id %s not recognized" % commit_id_or_idx)
275 return commit_id_or_idx
275 return commit_id_or_idx
276
276
277 def get_hook_location(self):
277 def get_hook_location(self):
278 """
278 """
279 returns absolute path to location where hooks are stored
279 returns absolute path to location where hooks are stored
280 """
280 """
281 loc = os.path.join(self.path, 'hooks')
281 loc = os.path.join(self.path, 'hooks')
282 if not self.bare:
282 if not self.bare:
283 loc = os.path.join(self.path, '.git', 'hooks')
283 loc = os.path.join(self.path, '.git', 'hooks')
284 return loc
284 return loc
285
285
286 @LazyProperty
286 @LazyProperty
287 def last_change(self):
287 def last_change(self):
288 """
288 """
289 Returns last change made on this repository as
289 Returns last change made on this repository as
290 `datetime.datetime` object.
290 `datetime.datetime` object.
291 """
291 """
292 try:
292 try:
293 return self.get_commit().date
293 return self.get_commit().date
294 except RepositoryError:
294 except RepositoryError:
295 tzoffset = makedate()[1]
295 tzoffset = makedate()[1]
296 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
296 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
297
297
298 def _get_fs_mtime(self):
298 def _get_fs_mtime(self):
299 idx_loc = '' if self.bare else '.git'
299 idx_loc = '' if self.bare else '.git'
300 # fallback to filesystem
300 # fallback to filesystem
301 in_path = os.path.join(self.path, idx_loc, "index")
301 in_path = os.path.join(self.path, idx_loc, "index")
302 he_path = os.path.join(self.path, idx_loc, "HEAD")
302 he_path = os.path.join(self.path, idx_loc, "HEAD")
303 if os.path.exists(in_path):
303 if os.path.exists(in_path):
304 return os.stat(in_path).st_mtime
304 return os.stat(in_path).st_mtime
305 else:
305 else:
306 return os.stat(he_path).st_mtime
306 return os.stat(he_path).st_mtime
307
307
308 @LazyProperty
308 @LazyProperty
309 def description(self):
309 def description(self):
310 description = self._remote.get_description()
310 description = self._remote.get_description()
311 return safe_str(description or self.DEFAULT_DESCRIPTION)
311 return safe_str(description or self.DEFAULT_DESCRIPTION)
312
312
313 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
313 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
314 if self.is_empty():
314 if self.is_empty():
315 return OrderedDict()
315 return OrderedDict()
316
316
317 result = []
317 result = []
318 for ref, sha in self._refs.items():
318 for ref, sha in self._refs.items():
319 if ref.startswith(prefix):
319 if ref.startswith(prefix):
320 ref_name = ref
320 ref_name = ref
321 if strip_prefix:
321 if strip_prefix:
322 ref_name = ref[len(prefix):]
322 ref_name = ref[len(prefix):]
323 result.append((safe_str(ref_name), sha))
323 result.append((safe_str(ref_name), sha))
324
324
325 def get_name(entry):
325 def get_name(entry):
326 return entry[0]
326 return entry[0]
327
327
328 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
328 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
329
329
330 def _get_branches(self):
330 def _get_branches(self):
331 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
331 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
332
332
333 @CachedProperty
333 @CachedProperty
334 def branches(self):
334 def branches(self):
335 return self._get_branches()
335 return self._get_branches()
336
336
337 @CachedProperty
337 @CachedProperty
338 def branches_closed(self):
338 def branches_closed(self):
339 return {}
339 return {}
340
340
341 @CachedProperty
341 @CachedProperty
342 def bookmarks(self):
342 def bookmarks(self):
343 return {}
343 return {}
344
344
345 @CachedProperty
345 @CachedProperty
346 def branches_all(self):
346 def branches_all(self):
347 all_branches = {}
347 all_branches = {}
348 all_branches.update(self.branches)
348 all_branches.update(self.branches)
349 all_branches.update(self.branches_closed)
349 all_branches.update(self.branches_closed)
350 return all_branches
350 return all_branches
351
351
352 @CachedProperty
352 @CachedProperty
353 def tags(self):
353 def tags(self):
354 return self._get_tags()
354 return self._get_tags()
355
355
356 def _get_tags(self):
356 def _get_tags(self):
357 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
357 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
358
358
359 def tag(self, name, user, commit_id=None, message=None, date=None,
359 def tag(self, name, user, commit_id=None, message=None, date=None,
360 **kwargs):
360 **kwargs):
361 # TODO: fix this method to apply annotated tags correct with message
361 # TODO: fix this method to apply annotated tags correct with message
362 """
362 """
363 Creates and returns a tag for the given ``commit_id``.
363 Creates and returns a tag for the given ``commit_id``.
364
364
365 :param name: name for new tag
365 :param name: name for new tag
366 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
366 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
367 :param commit_id: commit id for which new tag would be created
367 :param commit_id: commit id for which new tag would be created
368 :param message: message of the tag's commit
368 :param message: message of the tag's commit
369 :param date: date of tag's commit
369 :param date: date of tag's commit
370
370
371 :raises TagAlreadyExistError: if tag with same name already exists
371 :raises TagAlreadyExistError: if tag with same name already exists
372 """
372 """
373 if name in self.tags:
373 if name in self.tags:
374 raise TagAlreadyExistError("Tag %s already exists" % name)
374 raise TagAlreadyExistError("Tag %s already exists" % name)
375 commit = self.get_commit(commit_id=commit_id)
375 commit = self.get_commit(commit_id=commit_id)
376 message = message or f"Added tag {name} for commit {commit.raw_id}"
376 message = message or f"Added tag {name} for commit {commit.raw_id}"
377
377
378 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
378 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
379
379
380 self._invalidate_prop_cache('tags')
380 self._invalidate_prop_cache('tags')
381 self._invalidate_prop_cache('_refs')
381 self._invalidate_prop_cache('_refs')
382
382
383 return commit
383 return commit
384
384
385 def remove_tag(self, name, user, message=None, date=None):
385 def remove_tag(self, name, user, message=None, date=None):
386 """
386 """
387 Removes tag with the given ``name``.
387 Removes tag with the given ``name``.
388
388
389 :param name: name of the tag to be removed
389 :param name: name of the tag to be removed
390 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
390 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
391 :param message: message of the tag's removal commit
391 :param message: message of the tag's removal commit
392 :param date: date of tag's removal commit
392 :param date: date of tag's removal commit
393
393
394 :raises TagDoesNotExistError: if tag with given name does not exists
394 :raises TagDoesNotExistError: if tag with given name does not exists
395 """
395 """
396 if name not in self.tags:
396 if name not in self.tags:
397 raise TagDoesNotExistError("Tag %s does not exist" % name)
397 raise TagDoesNotExistError("Tag %s does not exist" % name)
398
398
399 self._remote.tag_remove(name)
399 self._remote.tag_remove(name)
400 self._invalidate_prop_cache('tags')
400 self._invalidate_prop_cache('tags')
401 self._invalidate_prop_cache('_refs')
401 self._invalidate_prop_cache('_refs')
402
402
403 def _get_refs(self):
403 def _get_refs(self):
404 return self._remote.get_refs()
404 return self._remote.get_refs()
405
405
406 @CachedProperty
406 @CachedProperty
407 def _refs(self):
407 def _refs(self):
408 return self._get_refs()
408 return self._get_refs()
409
409
410 @property
410 @property
411 def _ref_tree(self):
411 def _ref_tree(self):
412 node = tree = {}
412 node = tree = {}
413 for ref, sha in self._refs.items():
413 for ref, sha in self._refs.items():
414 path = ref.split('/')
414 path = ref.split('/')
415 for bit in path[:-1]:
415 for bit in path[:-1]:
416 node = node.setdefault(bit, {})
416 node = node.setdefault(bit, {})
417 node[path[-1]] = sha
417 node[path[-1]] = sha
418 node = tree
418 node = tree
419 return tree
419 return tree
420
420
421 def get_remote_ref(self, ref_name):
421 def get_remote_ref(self, ref_name):
422 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
422 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
423 try:
423 try:
424 return self._refs[ref_key]
424 return self._refs[ref_key]
425 except Exception:
425 except Exception:
426 return
426 return
427
427
428 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
428 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
429 translate_tag=True, maybe_unreachable=False, reference_obj=None):
429 translate_tag=True, maybe_unreachable=False, reference_obj=None):
430 """
430 """
431 Returns `GitCommit` object representing commit from git repository
431 Returns `GitCommit` object representing commit from git repository
432 at the given `commit_id` or head (most recent commit) if None given.
432 at the given `commit_id` or head (most recent commit) if None given.
433 """
433 """
434
434
435 if self.is_empty():
435 if self.is_empty():
436 raise EmptyRepositoryError("There are no commits yet")
436 raise EmptyRepositoryError("There are no commits yet")
437
437
438 if commit_id is not None:
438 if commit_id is not None:
439 self._validate_commit_id(commit_id)
439 self._validate_commit_id(commit_id)
440 try:
440 try:
441 # we have cached idx, use it without contacting the remote
441 # we have cached idx, use it without contacting the remote
442 idx = self._commit_ids[commit_id]
442 idx = self._commit_ids[commit_id]
443 return GitCommit(self, commit_id, idx, pre_load=pre_load)
443 return GitCommit(self, commit_id, idx, pre_load=pre_load)
444 except KeyError:
444 except KeyError:
445 pass
445 pass
446
446
447 elif commit_idx is not None:
447 elif commit_idx is not None:
448 self._validate_commit_idx(commit_idx)
448 self._validate_commit_idx(commit_idx)
449 try:
449 try:
450 _commit_id = self.commit_ids[commit_idx]
450 _commit_id = self.commit_ids[commit_idx]
451 if commit_idx < 0:
451 if commit_idx < 0:
452 commit_idx = self.commit_ids.index(_commit_id)
452 commit_idx = self.commit_ids.index(_commit_id)
453 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
453 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
454 except IndexError:
454 except IndexError:
455 commit_id = commit_idx
455 commit_id = commit_idx
456 else:
456 else:
457 commit_id = "tip"
457 commit_id = "tip"
458
458
459 if translate_tag:
459 if translate_tag:
460 commit_id = self._lookup_commit(
460 commit_id = self._lookup_commit(
461 commit_id, maybe_unreachable=maybe_unreachable,
461 commit_id, maybe_unreachable=maybe_unreachable,
462 reference_obj=reference_obj)
462 reference_obj=reference_obj)
463
463
464 try:
464 try:
465 idx = self._commit_ids[commit_id]
465 idx = self._commit_ids[commit_id]
466 except KeyError:
466 except KeyError:
467 idx = -1
467 idx = -1
468
468
469 return GitCommit(self, commit_id, idx, pre_load=pre_load)
469 return GitCommit(self, commit_id, idx, pre_load=pre_load)
470
470
471 def get_commits(
471 def get_commits(
472 self, start_id=None, end_id=None, start_date=None, end_date=None,
472 self, start_id=None, end_id=None, start_date=None, end_date=None,
473 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
473 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
474 """
474 """
475 Returns generator of `GitCommit` objects from start to end (both
475 Returns generator of `GitCommit` objects from start to end (both
476 are inclusive), in ascending date order.
476 are inclusive), in ascending date order.
477
477
478 :param start_id: None, str(commit_id)
478 :param start_id: None, str(commit_id)
479 :param end_id: None, str(commit_id)
479 :param end_id: None, str(commit_id)
480 :param start_date: if specified, commits with commit date less than
480 :param start_date: if specified, commits with commit date less than
481 ``start_date`` would be filtered out from returned set
481 ``start_date`` would be filtered out from returned set
482 :param end_date: if specified, commits with commit date greater than
482 :param end_date: if specified, commits with commit date greater than
483 ``end_date`` would be filtered out from returned set
483 ``end_date`` would be filtered out from returned set
484 :param branch_name: if specified, commits not reachable from given
484 :param branch_name: if specified, commits not reachable from given
485 branch would be filtered out from returned set
485 branch would be filtered out from returned set
486 :param show_hidden: Show hidden commits such as obsolete or hidden from
486 :param show_hidden: Show hidden commits such as obsolete or hidden from
487 Mercurial evolve
487 Mercurial evolve
488 :raise BranchDoesNotExistError: If given `branch_name` does not
488 :raise BranchDoesNotExistError: If given `branch_name` does not
489 exist.
489 exist.
490 :raise CommitDoesNotExistError: If commits for given `start` or
490 :raise CommitDoesNotExistError: If commits for given `start` or
491 `end` could not be found.
491 `end` could not be found.
492
492
493 """
493 """
494 if self.is_empty():
494 if self.is_empty():
495 raise EmptyRepositoryError("There are no commits yet")
495 raise EmptyRepositoryError("There are no commits yet")
496
496
497 self._validate_branch_name(branch_name)
497 self._validate_branch_name(branch_name)
498
498
499 if start_id is not None:
499 if start_id is not None:
500 self._validate_commit_id(start_id)
500 self._validate_commit_id(start_id)
501 if end_id is not None:
501 if end_id is not None:
502 self._validate_commit_id(end_id)
502 self._validate_commit_id(end_id)
503
503
504 start_raw_id = self._lookup_commit(start_id)
504 start_raw_id = self._lookup_commit(start_id)
505 start_pos = self._commit_ids[start_raw_id] if start_id else None
505 start_pos = self._commit_ids[start_raw_id] if start_id else None
506 end_raw_id = self._lookup_commit(end_id)
506 end_raw_id = self._lookup_commit(end_id)
507 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
507 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
508
508
509 if None not in [start_id, end_id] and start_pos > end_pos:
509 if None not in [start_id, end_id] and start_pos > end_pos:
510 raise RepositoryError(
510 raise RepositoryError(
511 "Start commit '%s' cannot be after end commit '%s'" %
511 "Start commit '%s' cannot be after end commit '%s'" %
512 (start_id, end_id))
512 (start_id, end_id))
513
513
514 if end_pos is not None:
514 if end_pos is not None:
515 end_pos += 1
515 end_pos += 1
516
516
517 filter_ = []
517 filter_ = []
518 if branch_name:
518 if branch_name:
519 filter_.append({'branch_name': branch_name})
519 filter_.append({'branch_name': branch_name})
520 if start_date and not end_date:
520 if start_date and not end_date:
521 filter_.append({'since': start_date})
521 filter_.append({'since': start_date})
522 if end_date and not start_date:
522 if end_date and not start_date:
523 filter_.append({'until': end_date})
523 filter_.append({'until': end_date})
524 if start_date and end_date:
524 if start_date and end_date:
525 filter_.append({'since': start_date})
525 filter_.append({'since': start_date})
526 filter_.append({'until': end_date})
526 filter_.append({'until': end_date})
527
527
528 # if start_pos or end_pos:
528 # if start_pos or end_pos:
529 # filter_.append({'start': start_pos})
529 # filter_.append({'start': start_pos})
530 # filter_.append({'end': end_pos})
530 # filter_.append({'end': end_pos})
531
531
532 if filter_:
532 if filter_:
533 revfilters = {
533 revfilters = {
534 'branch_name': branch_name,
534 'branch_name': branch_name,
535 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
535 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
536 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
536 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
537 'start': start_pos,
537 'start': start_pos,
538 'end': end_pos,
538 'end': end_pos,
539 }
539 }
540 commit_ids = self._get_commit_ids(filters=revfilters)
540 commit_ids = self._get_commit_ids(filters=revfilters)
541
541
542 else:
542 else:
543 commit_ids = self.commit_ids
543 commit_ids = self.commit_ids
544
544
545 if start_pos or end_pos:
545 if start_pos or end_pos:
546 commit_ids = commit_ids[start_pos: end_pos]
546 commit_ids = commit_ids[start_pos: end_pos]
547
547
548 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
548 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
549 translate_tag=translate_tags)
549 translate_tag=translate_tags)
550
550
551 def get_diff(
551 def get_diff(
552 self, commit1, commit2, path='', ignore_whitespace=False,
552 self, commit1, commit2, path='', ignore_whitespace=False,
553 context=3, path1=None):
553 context=3, path1=None):
554 """
554 """
555 Returns (git like) *diff*, as plain text. Shows changes introduced by
555 Returns (git like) *diff*, as plain text. Shows changes introduced by
556 ``commit2`` since ``commit1``.
556 ``commit2`` since ``commit1``.
557
557
558 :param commit1: Entry point from which diff is shown. Can be
558 :param commit1: Entry point from which diff is shown. Can be
559 ``self.EMPTY_COMMIT`` - in this case, patch showing all
559 ``self.EMPTY_COMMIT`` - in this case, patch showing all
560 the changes since empty state of the repository until ``commit2``
560 the changes since empty state of the repository until ``commit2``
561 :param commit2: Until which commits changes should be shown.
561 :param commit2: Until which commits changes should be shown.
562 :param path:
562 :param path:
563 :param ignore_whitespace: If set to ``True``, would not show whitespace
563 :param ignore_whitespace: If set to ``True``, would not show whitespace
564 changes. Defaults to ``False``.
564 changes. Defaults to ``False``.
565 :param context: How many lines before/after changed lines should be
565 :param context: How many lines before/after changed lines should be
566 shown. Defaults to ``3``.
566 shown. Defaults to ``3``.
567 :param path1:
567 :param path1:
568 """
568 """
569 self._validate_diff_commits(commit1, commit2)
569 self._validate_diff_commits(commit1, commit2)
570 if path1 is not None and path1 != path:
570 if path1 is not None and path1 != path:
571 raise ValueError("Diff of two different paths not supported.")
571 raise ValueError("Diff of two different paths not supported.")
572
572
573 if path:
573 if path:
574 file_filter = path
574 file_filter = path
575 else:
575 else:
576 file_filter = None
576 file_filter = None
577
577
578 diff = self._remote.diff(
578 diff = self._remote.diff(
579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
580 opt_ignorews=ignore_whitespace,
580 opt_ignorews=ignore_whitespace,
581 context=context)
581 context=context)
582
582
583 return GitDiff(diff)
583 return GitDiff(diff)
584
584
585 def strip(self, commit_id, branch_name):
585 def strip(self, commit_id, branch_name):
586 commit = self.get_commit(commit_id=commit_id)
586 commit = self.get_commit(commit_id=commit_id)
587 if commit.merge:
587 if commit.merge:
588 raise Exception('Cannot reset to merge commit')
588 raise Exception('Cannot reset to merge commit')
589
589
590 # parent is going to be the new head now
590 # parent is going to be the new head now
591 commit = commit.parents[0]
591 commit = commit.parents[0]
592 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
592 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
593
593
594 # clear cached properties
594 # clear cached properties
595 self._invalidate_prop_cache('commit_ids')
595 self._invalidate_prop_cache('commit_ids')
596 self._invalidate_prop_cache('_refs')
596 self._invalidate_prop_cache('_refs')
597 self._invalidate_prop_cache('branches')
597 self._invalidate_prop_cache('branches')
598
598
599 return len(self.commit_ids)
599 return len(self.commit_ids)
600
600
601 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
601 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
602 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
602 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
603 self, commit_id1, repo2, commit_id2)
603 self, commit_id1, repo2, commit_id2)
604
604
605 if commit_id1 == commit_id2:
605 if commit_id1 == commit_id2:
606 return commit_id1
606 return commit_id1
607
607
608 if self != repo2:
608 if self != repo2:
609 commits = self._remote.get_missing_revs(
609 commits = self._remote.get_missing_revs(
610 commit_id1, commit_id2, repo2.path)
610 commit_id1, commit_id2, repo2.path)
611 if commits:
611 if commits:
612 commit = repo2.get_commit(commits[-1])
612 commit = repo2.get_commit(commits[-1])
613 if commit.parents:
613 if commit.parents:
614 ancestor_id = commit.parents[0].raw_id
614 ancestor_id = commit.parents[0].raw_id
615 else:
615 else:
616 ancestor_id = None
616 ancestor_id = None
617 else:
617 else:
618 # no commits from other repo, ancestor_id is the commit_id2
618 # no commits from other repo, ancestor_id is the commit_id2
619 ancestor_id = commit_id2
619 ancestor_id = commit_id2
620 else:
620 else:
621 output, __ = self.run_git_command(
621 output, __ = self.run_git_command(
622 ['merge-base', commit_id1, commit_id2])
622 ['merge-base', commit_id1, commit_id2])
623 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
623 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
624
624
625 log.debug('Found common ancestor with sha: %s', ancestor_id)
625 log.debug('Found common ancestor with sha: %s', ancestor_id)
626
626
627 return ancestor_id
627 return ancestor_id
628
628
629 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
629 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
630 repo1 = self
630 repo1 = self
631 ancestor_id = None
631 ancestor_id = None
632
632
633 if commit_id1 == commit_id2:
633 if commit_id1 == commit_id2:
634 commits = []
634 commits = []
635 elif repo1 != repo2:
635 elif repo1 != repo2:
636 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
636 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
637 repo2.path)
637 repo2.path)
638 commits = [
638 commits = [
639 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
639 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
640 for commit_id in reversed(missing_ids)]
640 for commit_id in reversed(missing_ids)]
641 else:
641 else:
642 output, __ = repo1.run_git_command(
642 output, __ = repo1.run_git_command(
643 ['log', '--reverse', '--pretty=format: %H', '-s',
643 ['log', '--reverse', '--pretty=format: %H', '-s',
644 f'{commit_id1}..{commit_id2}'])
644 f'{commit_id1}..{commit_id2}'])
645 commits = [
645 commits = [
646 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
646 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
647 for commit_id in self.COMMIT_ID_PAT.findall(output)]
647 for commit_id in self.COMMIT_ID_PAT.findall(output)]
648
648
649 return commits
649 return commits
650
650
651 @LazyProperty
651 @LazyProperty
652 def in_memory_commit(self):
652 def in_memory_commit(self):
653 """
653 """
654 Returns ``GitInMemoryCommit`` object for this repository.
654 Returns ``GitInMemoryCommit`` object for this repository.
655 """
655 """
656 return GitInMemoryCommit(self)
656 return GitInMemoryCommit(self)
657
657
658 def pull(self, url, commit_ids=None, update_after=False):
658 def pull(self, url, commit_ids=None, update_after=False):
659 """
659 """
660 Pull changes from external location. Pull is different in GIT
660 Pull changes from external location. Pull is different in GIT
661 that fetch since it's doing a checkout
661 that fetch since it's doing a checkout
662
662
663 :param commit_ids: Optional. Can be set to a list of commit ids
663 :param commit_ids: Optional. Can be set to a list of commit ids
664 which shall be pulled from the other repository.
664 which shall be pulled from the other repository.
665 """
665 """
666 refs = None
666 refs = None
667 if commit_ids is not None:
667 if commit_ids is not None:
668 remote_refs = self._remote.get_remote_refs(url)
668 remote_refs = self._remote.get_remote_refs(url)
669 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
670 self._remote.pull(url, refs=refs, update_after=update_after)
670 self._remote.pull(url, refs=refs, update_after=update_after)
671 self._remote.invalidate_vcs_cache()
671 self._remote.invalidate_vcs_cache()
672
672
673 def fetch(self, url, commit_ids=None):
673 def fetch(self, url, commit_ids=None):
674 """
674 """
675 Fetch all git objects from external location.
675 Fetch all git objects from external location.
676 """
676 """
677 self._remote.sync_fetch(url, refs=commit_ids)
677 self._remote.sync_fetch(url, refs=commit_ids)
678 self._remote.invalidate_vcs_cache()
678 self._remote.invalidate_vcs_cache()
679
679
680 def push(self, url):
680 def push(self, url):
681 refs = None
681 refs = None
682 self._remote.sync_push(url, refs=refs)
682 self._remote.sync_push(url, refs=refs)
683
683
684 def set_refs(self, ref_name, commit_id):
684 def set_refs(self, ref_name, commit_id):
685 self._remote.set_refs(ref_name, commit_id)
685 self._remote.set_refs(ref_name, commit_id)
686 self._invalidate_prop_cache('_refs')
686 self._invalidate_prop_cache('_refs')
687
687
688 def remove_ref(self, ref_name):
688 def remove_ref(self, ref_name):
689 self._remote.remove_ref(ref_name)
689 self._remote.remove_ref(ref_name)
690 self._invalidate_prop_cache('_refs')
690 self._invalidate_prop_cache('_refs')
691
691
692 def run_gc(self, prune=True):
692 def run_gc(self, prune=True):
693 cmd = ['gc', '--aggressive']
693 cmd = ['gc', '--aggressive']
694 if prune:
694 if prune:
695 cmd += ['--prune=now']
695 cmd += ['--prune=now']
696 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
696 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
697 return stderr
697 return stderr
698
698
699 def _update_server_info(self):
699 def _update_server_info(self):
700 """
700 """
701 runs gits update-server-info command in this repo instance
701 runs gits update-server-info command in this repo instance
702 """
702 """
703 self._remote.update_server_info()
703 self._remote.update_server_info()
704
704
705 def _current_branch(self):
705 def _current_branch(self):
706 """
706 """
707 Return the name of the current branch.
707 Return the name of the current branch.
708
708
709 It only works for non bare repositories (i.e. repositories with a
709 It only works for non bare repositories (i.e. repositories with a
710 working copy)
710 working copy)
711 """
711 """
712 if self.bare:
712 if self.bare:
713 raise RepositoryError('Bare git repos do not have active branches')
713 raise RepositoryError('Bare git repos do not have active branches')
714
714
715 if self.is_empty():
715 if self.is_empty():
716 return None
716 return None
717
717
718 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
719 return stdout.strip()
719 return stdout.strip()
720
720
721 def _checkout(self, branch_name, create=False, force=False):
721 def _checkout(self, branch_name, create=False, force=False):
722 """
722 """
723 Checkout a branch in the working directory.
723 Checkout a branch in the working directory.
724
724
725 It tries to create the branch if create is True, failing if the branch
725 It tries to create the branch if create is True, failing if the branch
726 already exists.
726 already exists.
727
727
728 It only works for non bare repositories (i.e. repositories with a
728 It only works for non bare repositories (i.e. repositories with a
729 working copy)
729 working copy)
730 """
730 """
731 if self.bare:
731 if self.bare:
732 raise RepositoryError('Cannot checkout branches in a bare git repo')
732 raise RepositoryError('Cannot checkout branches in a bare git repo')
733
733
734 cmd = ['checkout']
734 cmd = ['checkout']
735 if force:
735 if force:
736 cmd.append('-f')
736 cmd.append('-f')
737 if create:
737 if create:
738 cmd.append('-b')
738 cmd.append('-b')
739 cmd.append(branch_name)
739 cmd.append(branch_name)
740 self.run_git_command(cmd, fail_on_stderr=False)
740 self.run_git_command(cmd, fail_on_stderr=False)
741
741
742 def _create_branch(self, branch_name, commit_id):
742 def _create_branch(self, branch_name, commit_id):
743 """
743 """
744 creates a branch in a GIT repo
744 creates a branch in a GIT repo
745 """
745 """
746 self._remote.create_branch(branch_name, commit_id)
746 self._remote.create_branch(branch_name, commit_id)
747
747
748 def _identify(self):
748 def _identify(self):
749 """
749 """
750 Return the current state of the working directory.
750 Return the current state of the working directory.
751 """
751 """
752 if self.bare:
752 if self.bare:
753 raise RepositoryError('Bare git repos do not have active branches')
753 raise RepositoryError('Bare git repos do not have active branches')
754
754
755 if self.is_empty():
755 if self.is_empty():
756 return None
756 return None
757
757
758 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
759 return stdout.strip()
759 return stdout.strip()
760
760
761 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 def _local_clone(self, clone_path, branch_name, source_branch=None):
762 """
762 """
763 Create a local clone of the current repo.
763 Create a local clone of the current repo.
764 """
764 """
765 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 # N.B.(skreft): the --branch option is required as otherwise the shallow
766 # clone will only fetch the active branch.
766 # clone will only fetch the active branch.
767 cmd = ['clone', '--branch', branch_name,
767 cmd = ['clone', '--branch', branch_name,
768 self.path, os.path.abspath(clone_path)]
768 self.path, os.path.abspath(clone_path)]
769
769
770 self.run_git_command(cmd, fail_on_stderr=False)
770 self.run_git_command(cmd, fail_on_stderr=False)
771
771
772 # if we get the different source branch, make sure we also fetch it for
772 # if we get the different source branch, make sure we also fetch it for
773 # merge conditions
773 # merge conditions
774 if source_branch and source_branch != branch_name:
774 if source_branch and source_branch != branch_name:
775 # check if the ref exists.
775 # check if the ref exists.
776 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 shadow_repo = GitRepository(os.path.abspath(clone_path))
777 if shadow_repo.get_remote_ref(source_branch):
777 if shadow_repo.get_remote_ref(source_branch):
778 cmd = ['fetch', self.path, source_branch]
778 cmd = ['fetch', self.path, source_branch]
779 self.run_git_command(cmd, fail_on_stderr=False)
779 self.run_git_command(cmd, fail_on_stderr=False)
780
780
781 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 def _local_fetch(self, repository_path, branch_name, use_origin=False):
782 """
782 """
783 Fetch a branch from a local repository.
783 Fetch a branch from a local repository.
784 """
784 """
785 repository_path = os.path.abspath(repository_path)
785 repository_path = os.path.abspath(repository_path)
786 if repository_path == self.path:
786 if repository_path == self.path:
787 raise ValueError('Cannot fetch from the same repository')
787 raise ValueError('Cannot fetch from the same repository')
788
788
789 if use_origin:
789 if use_origin:
790 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 branch_name = '+{branch}:refs/heads/{branch}'.format(
791 branch=branch_name)
791 branch=branch_name)
792
792
793 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 cmd = ['fetch', '--no-tags', '--update-head-ok',
794 repository_path, branch_name]
794 repository_path, branch_name]
795 self.run_git_command(cmd, fail_on_stderr=False)
795 self.run_git_command(cmd, fail_on_stderr=False)
796
796
797 def _local_reset(self, branch_name):
797 def _local_reset(self, branch_name):
798 branch_name = f'{branch_name}'
798 branch_name = f'{branch_name}'
799 cmd = ['reset', '--hard', branch_name, '--']
799 cmd = ['reset', '--hard', branch_name, '--']
800 self.run_git_command(cmd, fail_on_stderr=False)
800 self.run_git_command(cmd, fail_on_stderr=False)
801
801
802 def _last_fetch_heads(self):
802 def _last_fetch_heads(self):
803 """
803 """
804 Return the last fetched heads that need merging.
804 Return the last fetched heads that need merging.
805
805
806 The algorithm is defined at
806 The algorithm is defined at
807 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
808 """
808 """
809 if not self.bare:
809 if not self.bare:
810 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
811 else:
811 else:
812 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
813
813
814 heads = []
814 heads = []
815 with open(fetch_heads_path) as f:
815 with open(fetch_heads_path) as f:
816 for line in f:
816 for line in f:
817 if ' not-for-merge ' in line:
817 if ' not-for-merge ' in line:
818 continue
818 continue
819 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 line = re.sub('\t.*', '', line, flags=re.DOTALL)
820 heads.append(line)
820 heads.append(line)
821
821
822 return heads
822 return heads
823
823
824 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
824 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
825 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
825 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
826
826
827 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 def _local_pull(self, repository_path, branch_name, ff_only=True):
828 """
828 """
829 Pull a branch from a local repository.
829 Pull a branch from a local repository.
830 """
830 """
831 if self.bare:
831 if self.bare:
832 raise RepositoryError('Cannot pull into a bare git repository')
832 raise RepositoryError('Cannot pull into a bare git repository')
833 # N.B.(skreft): The --ff-only option is to make sure this is a
833 # N.B.(skreft): The --ff-only option is to make sure this is a
834 # fast-forward (i.e., we are only pulling new changes and there are no
834 # fast-forward (i.e., we are only pulling new changes and there are no
835 # conflicts with our current branch)
835 # conflicts with our current branch)
836 # Additionally, that option needs to go before --no-tags, otherwise git
836 # Additionally, that option needs to go before --no-tags, otherwise git
837 # pull complains about it being an unknown flag.
837 # pull complains about it being an unknown flag.
838 cmd = ['pull']
838 cmd = ['pull']
839 if ff_only:
839 if ff_only:
840 cmd.append('--ff-only')
840 cmd.append('--ff-only')
841 cmd.extend(['--no-tags', repository_path, branch_name])
841 cmd.extend(['--no-tags', repository_path, branch_name])
842 self.run_git_command(cmd, fail_on_stderr=False)
842 self.run_git_command(cmd, fail_on_stderr=False)
843
843
844 def _local_merge(self, merge_message, user_name, user_email, heads):
844 def _local_merge(self, merge_message, user_name, user_email, heads):
845 """
845 """
846 Merge the given head into the checked out branch.
846 Merge the given head into the checked out branch.
847
847
848 It will force a merge commit.
848 It will force a merge commit.
849
849
850 Currently it raises an error if the repo is empty, as it is not possible
850 Currently it raises an error if the repo is empty, as it is not possible
851 to create a merge commit in an empty repo.
851 to create a merge commit in an empty repo.
852
852
853 :param merge_message: The message to use for the merge commit.
853 :param merge_message: The message to use for the merge commit.
854 :param heads: the heads to merge.
854 :param heads: the heads to merge.
855 """
855 """
856 if self.bare:
856 if self.bare:
857 raise RepositoryError('Cannot merge into a bare git repository')
857 raise RepositoryError('Cannot merge into a bare git repository')
858
858
859 if not heads:
859 if not heads:
860 return
860 return
861
861
862 if self.is_empty():
862 if self.is_empty():
863 # TODO(skreft): do something more robust in this case.
863 # TODO(skreft): do something more robust in this case.
864 raise RepositoryError('Do not know how to merge into empty repositories yet')
864 raise RepositoryError('Do not know how to merge into empty repositories yet')
865 unresolved = None
865 unresolved = None
866
866
867 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
868 # commit message. We also specify the user who is doing the merge.
868 # commit message. We also specify the user who is doing the merge.
869 cmd = ['-c', f'user.name="{user_name}"',
869 cmd = ['-c', f'user.name="{user_name}"',
870 '-c', f'user.email={user_email}',
870 '-c', f'user.email={user_email}',
871 'merge', '--no-ff', '-m', safe_str(merge_message)]
871 'merge', '--no-ff', '-m', safe_str(merge_message)]
872
872
873 merge_cmd = cmd + heads
873 merge_cmd = cmd + heads
874
874
875 try:
875 try:
876 self.run_git_command(merge_cmd, fail_on_stderr=False)
876 self.run_git_command(merge_cmd, fail_on_stderr=False)
877 except RepositoryError:
877 except RepositoryError:
878 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
878 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
879 fail_on_stderr=False)[0].splitlines()
879 fail_on_stderr=False)[0].splitlines()
880 # NOTE(marcink): we add U notation for consistent with HG backend output
880 # NOTE(marcink): we add U notation for consistent with HG backend output
881 unresolved = [f'U {f}' for f in files]
881 unresolved = [f'U {f}' for f in files]
882
882
883 # Cleanup any merge leftovers
883 # Cleanup any merge leftovers
884 self._remote.invalidate_vcs_cache()
884 self._remote.invalidate_vcs_cache()
885 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
885 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
886
886
887 if unresolved:
887 if unresolved:
888 raise UnresolvedFilesInRepo(unresolved)
888 raise UnresolvedFilesInRepo(unresolved)
889 else:
889 else:
890 raise
890 raise
891
891
892 def _local_push(
892 def _local_push(
893 self, source_branch, repository_path, target_branch,
893 self, source_branch, repository_path, target_branch,
894 enable_hooks=False, rc_scm_data=None):
894 enable_hooks=False, rc_scm_data=None):
895 """
895 """
896 Push the source_branch to the given repository and target_branch.
896 Push the source_branch to the given repository and target_branch.
897
897
898 Currently it if the target_branch is not master and the target repo is
898 Currently it if the target_branch is not master and the target repo is
899 empty, the push will work, but then GitRepository won't be able to find
899 empty, the push will work, but then GitRepository won't be able to find
900 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
900 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
901 pointing to master, which does not exist).
901 pointing to master, which does not exist).
902
902
903 It does not run the hooks in the target repo.
903 It does not run the hooks in the target repo.
904 """
904 """
905 # TODO(skreft): deal with the case in which the target repo is empty,
905 # TODO(skreft): deal with the case in which the target repo is empty,
906 # and the target_branch is not master.
906 # and the target_branch is not master.
907 target_repo = GitRepository(repository_path)
907 target_repo = GitRepository(repository_path)
908 if (not target_repo.bare and
908 if (not target_repo.bare and
909 target_repo._current_branch() == target_branch):
909 target_repo._current_branch() == target_branch):
910 # Git prevents pushing to the checked out branch, so simulate it by
910 # Git prevents pushing to the checked out branch, so simulate it by
911 # pulling into the target repository.
911 # pulling into the target repository.
912 target_repo._local_pull(self.path, source_branch)
912 target_repo._local_pull(self.path, source_branch)
913 else:
913 else:
914 cmd = ['push', os.path.abspath(repository_path),
914 cmd = ['push', os.path.abspath(repository_path),
915 f'{source_branch}:{target_branch}']
915 f'{source_branch}:{target_branch}']
916 gitenv = {}
916 gitenv = {}
917 if rc_scm_data:
917 if rc_scm_data:
918 gitenv.update({'RC_SCM_DATA': rc_scm_data})
918 gitenv.update({'RC_SCM_DATA': rc_scm_data})
919
919
920 if not enable_hooks:
920 if not enable_hooks:
921 gitenv['RC_SKIP_HOOKS'] = '1'
921 gitenv['RC_SKIP_HOOKS'] = '1'
922 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
922 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
923
923
924 def _get_new_pr_branch(self, source_branch, target_branch):
924 def _get_new_pr_branch(self, source_branch, target_branch):
925 prefix = f'pr_{source_branch}-{target_branch}_'
925 prefix = f'pr_{source_branch}-{target_branch}_'
926 pr_branches = []
926 pr_branches = []
927 for branch in self.branches:
927 for branch in self.branches:
928 if branch.startswith(prefix):
928 if branch.startswith(prefix):
929 pr_branches.append(int(branch[len(prefix):]))
929 pr_branches.append(int(branch[len(prefix):]))
930
930
931 if not pr_branches:
931 if not pr_branches:
932 branch_id = 0
932 branch_id = 0
933 else:
933 else:
934 branch_id = max(pr_branches) + 1
934 branch_id = max(pr_branches) + 1
935
935
936 return '%s%d' % (prefix, branch_id)
936 return '%s%d' % (prefix, branch_id)
937
937
938 def _maybe_prepare_merge_workspace(
938 def _maybe_prepare_merge_workspace(
939 self, repo_id, workspace_id, target_ref, source_ref):
939 self, repo_id, workspace_id, target_ref, source_ref):
940 shadow_repository_path = self._get_shadow_repository_path(
940 shadow_repository_path = self._get_shadow_repository_path(
941 self.path, repo_id, workspace_id)
941 self.path, repo_id, workspace_id)
942 if not os.path.exists(shadow_repository_path):
942 if not os.path.exists(shadow_repository_path):
943 self._local_clone(
943 self._local_clone(
944 shadow_repository_path, target_ref.name, source_ref.name)
944 shadow_repository_path, target_ref.name, source_ref.name)
945 log.debug('Prepared %s shadow repository in %s',
945 log.debug('Prepared %s shadow repository in %s',
946 self.alias, shadow_repository_path)
946 self.alias, shadow_repository_path)
947
947
948 return shadow_repository_path
948 return shadow_repository_path
949
949
950 def _merge_repo(self, repo_id, workspace_id, target_ref,
950 def _merge_repo(self, repo_id, workspace_id, target_ref,
951 source_repo, source_ref, merge_message,
951 source_repo, source_ref, merge_message,
952 merger_name, merger_email, dry_run=False,
952 merger_name, merger_email, dry_run=False,
953 use_rebase=False, close_branch=False):
953 use_rebase=False, close_branch=False):
954
954
955 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
955 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
956 'rebase' if use_rebase else 'merge', dry_run)
956 'rebase' if use_rebase else 'merge', dry_run)
957
957 if target_ref.commit_id != self.branches[target_ref.name]:
958 if target_ref.commit_id != self.branches[target_ref.name]:
958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
959 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
959 target_ref.commit_id, self.branches[target_ref.name])
960 target_ref.commit_id, self.branches[target_ref.name])
960 return MergeResponse(
961 return MergeResponse(
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
962 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
962 metadata={'target_ref': target_ref})
963 metadata={'target_ref': target_ref})
963
964
964 shadow_repository_path = self._maybe_prepare_merge_workspace(
965 shadow_repository_path = self._maybe_prepare_merge_workspace(
965 repo_id, workspace_id, target_ref, source_ref)
966 repo_id, workspace_id, target_ref, source_ref)
966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
967 shadow_repo = self.get_shadow_instance(shadow_repository_path)
967
968
968 # checkout source, if it's different. Otherwise we could not
969 # checkout source, if it's different. Otherwise we could not
969 # fetch proper commits for merge testing
970 # fetch proper commits for merge testing
970 if source_ref.name != target_ref.name:
971 if source_ref.name != target_ref.name:
971 if shadow_repo.get_remote_ref(source_ref.name):
972 if shadow_repo.get_remote_ref(source_ref.name):
972 shadow_repo._checkout(source_ref.name, force=True)
973 shadow_repo._checkout(source_ref.name, force=True)
973
974
974 # checkout target, and fetch changes
975 # checkout target, and fetch changes
975 shadow_repo._checkout(target_ref.name, force=True)
976 shadow_repo._checkout(target_ref.name, force=True)
976
977
977 # fetch/reset pull the target, in case it is changed
978 # fetch/reset pull the target, in case it is changed
978 # this handles even force changes
979 # this handles even force changes
979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
980 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
980 shadow_repo._local_reset(target_ref.name)
981 shadow_repo._local_reset(target_ref.name)
981
982
982 # Need to reload repo to invalidate the cache, or otherwise we cannot
983 # Need to reload repo to invalidate the cache, or otherwise we cannot
983 # retrieve the last target commit.
984 # retrieve the last target commit.
984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
985 shadow_repo = self.get_shadow_instance(shadow_repository_path)
985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
986 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
987 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
987 target_ref, target_ref.commit_id,
988 target_ref, target_ref.commit_id,
988 shadow_repo.branches[target_ref.name])
989 shadow_repo.branches[target_ref.name])
989 return MergeResponse(
990 return MergeResponse(
990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
991 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
991 metadata={'target_ref': target_ref})
992 metadata={'target_ref': target_ref})
992
993
993 # calculate new branch
994 # calculate new branch
994 pr_branch = shadow_repo._get_new_pr_branch(
995 pr_branch = shadow_repo._get_new_pr_branch(
995 source_ref.name, target_ref.name)
996 source_ref.name, target_ref.name)
996 log.debug('using pull-request merge branch: `%s`', pr_branch)
997 log.debug('using pull-request merge branch: `%s`', pr_branch)
997 # checkout to temp branch, and fetch changes
998 # checkout to temp branch, and fetch changes
998 shadow_repo._checkout(pr_branch, create=True)
999 shadow_repo._checkout(pr_branch, create=True)
999 try:
1000 try:
1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1001 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1001 except RepositoryError:
1002 except RepositoryError:
1002 log.exception('Failure when doing local fetch on '
1003 log.exception('Failure when doing local fetch on '
1003 'shadow repo: %s', shadow_repo)
1004 'shadow repo: %s', shadow_repo)
1004 return MergeResponse(
1005 return MergeResponse(
1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1006 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1006 metadata={'source_ref': source_ref})
1007 metadata={'source_ref': source_ref})
1007
1008
1008 merge_ref = None
1009 merge_ref = None
1009 merge_failure_reason = MergeFailureReason.NONE
1010 merge_failure_reason = MergeFailureReason.NONE
1010 metadata = {}
1011 metadata = {}
1011 try:
1012 try:
1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1013 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1013 [source_ref.commit_id])
1014 [source_ref.commit_id])
1014 merge_possible = True
1015 merge_possible = True
1015
1016
1016 # Need to invalidate the cache, or otherwise we
1017 # Need to invalidate the cache, or otherwise we
1017 # cannot retrieve the merge commit.
1018 # cannot retrieve the merge commit.
1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1019 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1019 merge_commit_id = shadow_repo.branches[pr_branch]
1020 merge_commit_id = shadow_repo.branches[pr_branch]
1020
1021
1021 # Set a reference pointing to the merge commit. This reference may
1022 # Set a reference pointing to the merge commit. This reference may
1022 # be used to easily identify the last successful merge commit in
1023 # be used to easily identify the last successful merge commit in
1023 # the shadow repository.
1024 # the shadow repository.
1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1025 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1026 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1026 except RepositoryError as e:
1027 except RepositoryError as e:
1027 log.exception('Failure when doing local merge on git shadow repo')
1028 log.exception('Failure when doing local merge on git shadow repo')
1028 if isinstance(e, UnresolvedFilesInRepo):
1029 if isinstance(e, UnresolvedFilesInRepo):
1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1030 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1030
1031
1031 merge_possible = False
1032 merge_possible = False
1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1033 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1033
1034
1034 if merge_possible and not dry_run:
1035 if merge_possible and not dry_run:
1035 try:
1036 try:
1036 shadow_repo._local_push(
1037 shadow_repo._local_push(
1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1038 pr_branch, self.path, target_ref.name, enable_hooks=True,
1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1039 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1039 merge_succeeded = True
1040 merge_succeeded = True
1040 except RepositoryError:
1041 except RepositoryError:
1041 log.exception(
1042 log.exception(
1042 'Failure when doing local push from the shadow '
1043 'Failure when doing local push from the shadow '
1043 'repository to the target repository at %s.', self.path)
1044 'repository to the target repository at %s.', self.path)
1044 merge_succeeded = False
1045 merge_succeeded = False
1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1046 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1046 metadata['target'] = 'git shadow repo'
1047 metadata['target'] = 'git shadow repo'
1047 metadata['merge_commit'] = pr_branch
1048 metadata['merge_commit'] = pr_branch
1048 else:
1049 else:
1049 merge_succeeded = False
1050 merge_succeeded = False
1050
1051
1051 return MergeResponse(
1052 return MergeResponse(
1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1053 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1053 metadata=metadata)
1054 metadata=metadata)
@@ -1,427 +1,428 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 """
20 """
21 Helpers for fixture generation
21 Helpers for fixture generation
22 """
22 """
23
23
24 import os
24 import os
25 import time
25 import time
26 import tempfile
26 import tempfile
27 import shutil
27 import shutil
28 import configparser
28 import configparser
29
29
30 from rhodecode.model.settings import SettingsModel
30 from rhodecode.model.settings import SettingsModel
31 from rhodecode.tests import *
32 from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap
31 from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap
33 from rhodecode.model.meta import Session
32 from rhodecode.model.meta import Session
34 from rhodecode.model.repo import RepoModel
33 from rhodecode.model.repo import RepoModel
35 from rhodecode.model.user import UserModel
34 from rhodecode.model.user import UserModel
36 from rhodecode.model.repo_group import RepoGroupModel
35 from rhodecode.model.repo_group import RepoGroupModel
37 from rhodecode.model.user_group import UserGroupModel
36 from rhodecode.model.user_group import UserGroupModel
38 from rhodecode.model.gist import GistModel
37 from rhodecode.model.gist import GistModel
39 from rhodecode.model.auth_token import AuthTokenModel
38 from rhodecode.model.auth_token import AuthTokenModel
40 from rhodecode.model.scm import ScmModel
39 from rhodecode.model.scm import ScmModel
41 from rhodecode.authentication.plugins.auth_rhodecode import \
40 from rhodecode.authentication.plugins.auth_rhodecode import \
42 RhodeCodeAuthPlugin
41 RhodeCodeAuthPlugin
43
42
43 from rhodecode.tests import TEST_USER_ADMIN_LOGIN
44
44 dn = os.path.dirname
45 dn = os.path.dirname
45 FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures')
46 FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures')
46
47
47
48
48 def error_function(*args, **kwargs):
49 def error_function(*args, **kwargs):
49 raise Exception('Total Crash !')
50 raise Exception('Total Crash !')
50
51
51
52
52 class TestINI(object):
53 class TestINI(object):
53 """
54 """
54 Allows to create a new test.ini file as a copy of existing one with edited
55 Allows to create a new test.ini file as a copy of existing one with edited
55 data. Example usage::
56 data. Example usage::
56
57
57 with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path:
58 with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path:
58 print('paster server %s' % new_test_ini)
59 print('paster server %s' % new_test_ini)
59 """
60 """
60
61
61 def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT',
62 def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT',
62 destroy=True, dir=None):
63 destroy=True, dir=None):
63 self.ini_file_path = ini_file_path
64 self.ini_file_path = ini_file_path
64 self.ini_params = ini_params
65 self.ini_params = ini_params
65 self.new_path = None
66 self.new_path = None
66 self.new_path_prefix = new_file_prefix
67 self.new_path_prefix = new_file_prefix
67 self._destroy = destroy
68 self._destroy = destroy
68 self._dir = dir
69 self._dir = dir
69
70
70 def __enter__(self):
71 def __enter__(self):
71 return self.create()
72 return self.create()
72
73
73 def __exit__(self, exc_type, exc_val, exc_tb):
74 def __exit__(self, exc_type, exc_val, exc_tb):
74 self.destroy()
75 self.destroy()
75
76
76 def create(self):
77 def create(self):
77 parser = configparser.ConfigParser()
78 parser = configparser.ConfigParser()
78 parser.read(self.ini_file_path)
79 parser.read(self.ini_file_path)
79
80
80 for data in self.ini_params:
81 for data in self.ini_params:
81 section, ini_params = list(data.items())[0]
82 section, ini_params = list(data.items())[0]
82
83
83 for key, val in ini_params.items():
84 for key, val in ini_params.items():
84 parser[section][key] = str(val)
85 parser[section][key] = str(val)
85
86
86 with tempfile.NamedTemporaryFile(
87 with tempfile.NamedTemporaryFile(
87 mode='w',
88 mode='w',
88 prefix=self.new_path_prefix, suffix='.ini', dir=self._dir,
89 prefix=self.new_path_prefix, suffix='.ini', dir=self._dir,
89 delete=False) as new_ini_file:
90 delete=False) as new_ini_file:
90 parser.write(new_ini_file)
91 parser.write(new_ini_file)
91 self.new_path = new_ini_file.name
92 self.new_path = new_ini_file.name
92
93
93 return self.new_path
94 return self.new_path
94
95
95 def destroy(self):
96 def destroy(self):
96 if self._destroy:
97 if self._destroy:
97 os.remove(self.new_path)
98 os.remove(self.new_path)
98
99
99
100
100 class Fixture(object):
101 class Fixture(object):
101
102
102 def anon_access(self, status):
103 def anon_access(self, status):
103 """
104 """
104 Context process for disabling anonymous access. use like:
105 Context process for disabling anonymous access. use like:
105 fixture = Fixture()
106 fixture = Fixture()
106 with fixture.anon_access(False):
107 with fixture.anon_access(False):
107 #tests
108 #tests
108
109
109 after this block anon access will be set to `not status`
110 after this block anon access will be set to `not status`
110 """
111 """
111
112
112 class context(object):
113 class context(object):
113 def __enter__(self):
114 def __enter__(self):
114 anon = User.get_default_user()
115 anon = User.get_default_user()
115 anon.active = status
116 anon.active = status
116 Session().add(anon)
117 Session().add(anon)
117 Session().commit()
118 Session().commit()
118 time.sleep(1.5) # must sleep for cache (1s to expire)
119 time.sleep(1.5) # must sleep for cache (1s to expire)
119
120
120 def __exit__(self, exc_type, exc_val, exc_tb):
121 def __exit__(self, exc_type, exc_val, exc_tb):
121 anon = User.get_default_user()
122 anon = User.get_default_user()
122 anon.active = not status
123 anon.active = not status
123 Session().add(anon)
124 Session().add(anon)
124 Session().commit()
125 Session().commit()
125
126
126 return context()
127 return context()
127
128
128 def auth_restriction(self, registry, auth_restriction):
129 def auth_restriction(self, registry, auth_restriction):
129 """
130 """
130 Context process for changing the builtin rhodecode plugin auth restrictions.
131 Context process for changing the builtin rhodecode plugin auth restrictions.
131 Use like:
132 Use like:
132 fixture = Fixture()
133 fixture = Fixture()
133 with fixture.auth_restriction('super_admin'):
134 with fixture.auth_restriction('super_admin'):
134 #tests
135 #tests
135
136
136 after this block auth restriction will be taken off
137 after this block auth restriction will be taken off
137 """
138 """
138
139
139 class context(object):
140 class context(object):
140 def _get_plugin(self):
141 def _get_plugin(self):
141 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
142 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
142 plugin = RhodeCodeAuthPlugin(plugin_id)
143 plugin = RhodeCodeAuthPlugin(plugin_id)
143 return plugin
144 return plugin
144
145
145 def __enter__(self):
146 def __enter__(self):
146
147
147 plugin = self._get_plugin()
148 plugin = self._get_plugin()
148 plugin.create_or_update_setting('auth_restriction', auth_restriction)
149 plugin.create_or_update_setting('auth_restriction', auth_restriction)
149 Session().commit()
150 Session().commit()
150 SettingsModel().invalidate_settings_cache(hard=True)
151 SettingsModel().invalidate_settings_cache(hard=True)
151
152
152 def __exit__(self, exc_type, exc_val, exc_tb):
153 def __exit__(self, exc_type, exc_val, exc_tb):
153
154
154 plugin = self._get_plugin()
155 plugin = self._get_plugin()
155 plugin.create_or_update_setting(
156 plugin.create_or_update_setting(
156 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE)
157 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE)
157 Session().commit()
158 Session().commit()
158 SettingsModel().invalidate_settings_cache(hard=True)
159 SettingsModel().invalidate_settings_cache(hard=True)
159
160
160 return context()
161 return context()
161
162
162 def scope_restriction(self, registry, scope_restriction):
163 def scope_restriction(self, registry, scope_restriction):
163 """
164 """
164 Context process for changing the builtin rhodecode plugin scope restrictions.
165 Context process for changing the builtin rhodecode plugin scope restrictions.
165 Use like:
166 Use like:
166 fixture = Fixture()
167 fixture = Fixture()
167 with fixture.scope_restriction('scope_http'):
168 with fixture.scope_restriction('scope_http'):
168 #tests
169 #tests
169
170
170 after this block scope restriction will be taken off
171 after this block scope restriction will be taken off
171 """
172 """
172
173
173 class context(object):
174 class context(object):
174 def _get_plugin(self):
175 def _get_plugin(self):
175 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
176 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
176 plugin = RhodeCodeAuthPlugin(plugin_id)
177 plugin = RhodeCodeAuthPlugin(plugin_id)
177 return plugin
178 return plugin
178
179
179 def __enter__(self):
180 def __enter__(self):
180 plugin = self._get_plugin()
181 plugin = self._get_plugin()
181 plugin.create_or_update_setting('scope_restriction', scope_restriction)
182 plugin.create_or_update_setting('scope_restriction', scope_restriction)
182 Session().commit()
183 Session().commit()
183 SettingsModel().invalidate_settings_cache(hard=True)
184 SettingsModel().invalidate_settings_cache(hard=True)
184
185
185 def __exit__(self, exc_type, exc_val, exc_tb):
186 def __exit__(self, exc_type, exc_val, exc_tb):
186 plugin = self._get_plugin()
187 plugin = self._get_plugin()
187 plugin.create_or_update_setting(
188 plugin.create_or_update_setting(
188 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL)
189 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL)
189 Session().commit()
190 Session().commit()
190 SettingsModel().invalidate_settings_cache(hard=True)
191 SettingsModel().invalidate_settings_cache(hard=True)
191
192
192 return context()
193 return context()
193
194
194 def _get_repo_create_params(self, **custom):
195 def _get_repo_create_params(self, **custom):
195 repo_type = custom.get('repo_type') or 'hg'
196 repo_type = custom.get('repo_type') or 'hg'
196
197
197 default_landing_ref, landing_ref_lbl = ScmModel.backend_landing_ref(repo_type)
198 default_landing_ref, landing_ref_lbl = ScmModel.backend_landing_ref(repo_type)
198
199
199 defs = {
200 defs = {
200 'repo_name': None,
201 'repo_name': None,
201 'repo_type': repo_type,
202 'repo_type': repo_type,
202 'clone_uri': '',
203 'clone_uri': '',
203 'push_uri': '',
204 'push_uri': '',
204 'repo_group': '-1',
205 'repo_group': '-1',
205 'repo_description': 'DESC',
206 'repo_description': 'DESC',
206 'repo_private': False,
207 'repo_private': False,
207 'repo_landing_commit_ref': default_landing_ref,
208 'repo_landing_commit_ref': default_landing_ref,
208 'repo_copy_permissions': False,
209 'repo_copy_permissions': False,
209 'repo_state': Repository.STATE_CREATED,
210 'repo_state': Repository.STATE_CREATED,
210 }
211 }
211 defs.update(custom)
212 defs.update(custom)
212 if 'repo_name_full' not in custom:
213 if 'repo_name_full' not in custom:
213 defs.update({'repo_name_full': defs['repo_name']})
214 defs.update({'repo_name_full': defs['repo_name']})
214
215
215 # fix the repo name if passed as repo_name_full
216 # fix the repo name if passed as repo_name_full
216 if defs['repo_name']:
217 if defs['repo_name']:
217 defs['repo_name'] = defs['repo_name'].split('/')[-1]
218 defs['repo_name'] = defs['repo_name'].split('/')[-1]
218
219
219 return defs
220 return defs
220
221
221 def _get_group_create_params(self, **custom):
222 def _get_group_create_params(self, **custom):
222 defs = {
223 defs = {
223 'group_name': None,
224 'group_name': None,
224 'group_description': 'DESC',
225 'group_description': 'DESC',
225 'perm_updates': [],
226 'perm_updates': [],
226 'perm_additions': [],
227 'perm_additions': [],
227 'perm_deletions': [],
228 'perm_deletions': [],
228 'group_parent_id': -1,
229 'group_parent_id': -1,
229 'enable_locking': False,
230 'enable_locking': False,
230 'recursive': False,
231 'recursive': False,
231 }
232 }
232 defs.update(custom)
233 defs.update(custom)
233
234
234 return defs
235 return defs
235
236
236 def _get_user_create_params(self, name, **custom):
237 def _get_user_create_params(self, name, **custom):
237 defs = {
238 defs = {
238 'username': name,
239 'username': name,
239 'password': 'qweqwe',
240 'password': 'qweqwe',
240 'email': '%s+test@rhodecode.org' % name,
241 'email': '%s+test@rhodecode.org' % name,
241 'firstname': 'TestUser',
242 'firstname': 'TestUser',
242 'lastname': 'Test',
243 'lastname': 'Test',
243 'description': 'test description',
244 'description': 'test description',
244 'active': True,
245 'active': True,
245 'admin': False,
246 'admin': False,
246 'extern_type': 'rhodecode',
247 'extern_type': 'rhodecode',
247 'extern_name': None,
248 'extern_name': None,
248 }
249 }
249 defs.update(custom)
250 defs.update(custom)
250
251
251 return defs
252 return defs
252
253
253 def _get_user_group_create_params(self, name, **custom):
254 def _get_user_group_create_params(self, name, **custom):
254 defs = {
255 defs = {
255 'users_group_name': name,
256 'users_group_name': name,
256 'user_group_description': 'DESC',
257 'user_group_description': 'DESC',
257 'users_group_active': True,
258 'users_group_active': True,
258 'user_group_data': {},
259 'user_group_data': {},
259 }
260 }
260 defs.update(custom)
261 defs.update(custom)
261
262
262 return defs
263 return defs
263
264
264 def create_repo(self, name, **kwargs):
265 def create_repo(self, name, **kwargs):
265 repo_group = kwargs.get('repo_group')
266 repo_group = kwargs.get('repo_group')
266 if isinstance(repo_group, RepoGroup):
267 if isinstance(repo_group, RepoGroup):
267 kwargs['repo_group'] = repo_group.group_id
268 kwargs['repo_group'] = repo_group.group_id
268 name = name.split(Repository.NAME_SEP)[-1]
269 name = name.split(Repository.NAME_SEP)[-1]
269 name = Repository.NAME_SEP.join((repo_group.group_name, name))
270 name = Repository.NAME_SEP.join((repo_group.group_name, name))
270
271
271 if 'skip_if_exists' in kwargs:
272 if 'skip_if_exists' in kwargs:
272 del kwargs['skip_if_exists']
273 del kwargs['skip_if_exists']
273 r = Repository.get_by_repo_name(name)
274 r = Repository.get_by_repo_name(name)
274 if r:
275 if r:
275 return r
276 return r
276
277
277 form_data = self._get_repo_create_params(repo_name=name, **kwargs)
278 form_data = self._get_repo_create_params(repo_name=name, **kwargs)
278 cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
279 cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
279 RepoModel().create(form_data, cur_user)
280 RepoModel().create(form_data, cur_user)
280 Session().commit()
281 Session().commit()
281 repo = Repository.get_by_repo_name(name)
282 repo = Repository.get_by_repo_name(name)
282 assert repo
283 assert repo
283 return repo
284 return repo
284
285
285 def create_fork(self, repo_to_fork, fork_name, **kwargs):
286 def create_fork(self, repo_to_fork, fork_name, **kwargs):
286 repo_to_fork = Repository.get_by_repo_name(repo_to_fork)
287 repo_to_fork = Repository.get_by_repo_name(repo_to_fork)
287
288
288 form_data = self._get_repo_create_params(
289 form_data = self._get_repo_create_params(
289 repo_name=fork_name,
290 repo_name=fork_name,
290 fork_parent_id=repo_to_fork.repo_id,
291 fork_parent_id=repo_to_fork.repo_id,
291 repo_type=repo_to_fork.repo_type,
292 repo_type=repo_to_fork.repo_type,
292 **kwargs)
293 **kwargs)
293
294
294 #TODO: fix it !!
295 # TODO: fix it !!
295 form_data['description'] = form_data['repo_description']
296 form_data['description'] = form_data['repo_description']
296 form_data['private'] = form_data['repo_private']
297 form_data['private'] = form_data['repo_private']
297 form_data['landing_rev'] = form_data['repo_landing_commit_ref']
298 form_data['landing_rev'] = form_data['repo_landing_commit_ref']
298
299
299 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
300 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
300 RepoModel().create_fork(form_data, cur_user=owner)
301 RepoModel().create_fork(form_data, cur_user=owner)
301 Session().commit()
302 Session().commit()
302 r = Repository.get_by_repo_name(fork_name)
303 r = Repository.get_by_repo_name(fork_name)
303 assert r
304 assert r
304 return r
305 return r
305
306
306 def destroy_repo(self, repo_name, **kwargs):
307 def destroy_repo(self, repo_name, **kwargs):
307 RepoModel().delete(repo_name, pull_requests='delete', **kwargs)
308 RepoModel().delete(repo_name, pull_requests='delete', **kwargs)
308 Session().commit()
309 Session().commit()
309
310
310 def destroy_repo_on_filesystem(self, repo_name):
311 def destroy_repo_on_filesystem(self, repo_name):
311 rm_path = os.path.join(RepoModel().repos_path, repo_name)
312 rm_path = os.path.join(RepoModel().repos_path, repo_name)
312 if os.path.isdir(rm_path):
313 if os.path.isdir(rm_path):
313 shutil.rmtree(rm_path)
314 shutil.rmtree(rm_path)
314
315
315 def create_repo_group(self, name, **kwargs):
316 def create_repo_group(self, name, **kwargs):
316 if 'skip_if_exists' in kwargs:
317 if 'skip_if_exists' in kwargs:
317 del kwargs['skip_if_exists']
318 del kwargs['skip_if_exists']
318 gr = RepoGroup.get_by_group_name(group_name=name)
319 gr = RepoGroup.get_by_group_name(group_name=name)
319 if gr:
320 if gr:
320 return gr
321 return gr
321 form_data = self._get_group_create_params(group_name=name, **kwargs)
322 form_data = self._get_group_create_params(group_name=name, **kwargs)
322 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
323 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
323 gr = RepoGroupModel().create(
324 gr = RepoGroupModel().create(
324 group_name=form_data['group_name'],
325 group_name=form_data['group_name'],
325 group_description=form_data['group_name'],
326 group_description=form_data['group_name'],
326 owner=owner)
327 owner=owner)
327 Session().commit()
328 Session().commit()
328 gr = RepoGroup.get_by_group_name(gr.group_name)
329 gr = RepoGroup.get_by_group_name(gr.group_name)
329 return gr
330 return gr
330
331
331 def destroy_repo_group(self, repogroupid):
332 def destroy_repo_group(self, repogroupid):
332 RepoGroupModel().delete(repogroupid)
333 RepoGroupModel().delete(repogroupid)
333 Session().commit()
334 Session().commit()
334
335
335 def create_user(self, name, **kwargs):
336 def create_user(self, name, **kwargs):
336 if 'skip_if_exists' in kwargs:
337 if 'skip_if_exists' in kwargs:
337 del kwargs['skip_if_exists']
338 del kwargs['skip_if_exists']
338 user = User.get_by_username(name)
339 user = User.get_by_username(name)
339 if user:
340 if user:
340 return user
341 return user
341 form_data = self._get_user_create_params(name, **kwargs)
342 form_data = self._get_user_create_params(name, **kwargs)
342 user = UserModel().create(form_data)
343 user = UserModel().create(form_data)
343
344
344 # create token for user
345 # create token for user
345 AuthTokenModel().create(
346 AuthTokenModel().create(
346 user=user, description=u'TEST_USER_TOKEN')
347 user=user, description=u'TEST_USER_TOKEN')
347
348
348 Session().commit()
349 Session().commit()
349 user = User.get_by_username(user.username)
350 user = User.get_by_username(user.username)
350 return user
351 return user
351
352
352 def destroy_user(self, userid):
353 def destroy_user(self, userid):
353 UserModel().delete(userid)
354 UserModel().delete(userid)
354 Session().commit()
355 Session().commit()
355
356
356 def create_additional_user_email(self, user, email):
357 def create_additional_user_email(self, user, email):
357 uem = UserEmailMap()
358 uem = UserEmailMap()
358 uem.user = user
359 uem.user = user
359 uem.email = email
360 uem.email = email
360 Session().add(uem)
361 Session().add(uem)
361 return uem
362 return uem
362
363
363 def destroy_users(self, userid_iter):
364 def destroy_users(self, userid_iter):
364 for user_id in userid_iter:
365 for user_id in userid_iter:
365 if User.get_by_username(user_id):
366 if User.get_by_username(user_id):
366 UserModel().delete(user_id)
367 UserModel().delete(user_id)
367 Session().commit()
368 Session().commit()
368
369
369 def create_user_group(self, name, **kwargs):
370 def create_user_group(self, name, **kwargs):
370 if 'skip_if_exists' in kwargs:
371 if 'skip_if_exists' in kwargs:
371 del kwargs['skip_if_exists']
372 del kwargs['skip_if_exists']
372 gr = UserGroup.get_by_group_name(group_name=name)
373 gr = UserGroup.get_by_group_name(group_name=name)
373 if gr:
374 if gr:
374 return gr
375 return gr
375 # map active flag to the real attribute. For API consistency of fixtures
376 # map active flag to the real attribute. For API consistency of fixtures
376 if 'active' in kwargs:
377 if 'active' in kwargs:
377 kwargs['users_group_active'] = kwargs['active']
378 kwargs['users_group_active'] = kwargs['active']
378 del kwargs['active']
379 del kwargs['active']
379 form_data = self._get_user_group_create_params(name, **kwargs)
380 form_data = self._get_user_group_create_params(name, **kwargs)
380 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
381 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
381 user_group = UserGroupModel().create(
382 user_group = UserGroupModel().create(
382 name=form_data['users_group_name'],
383 name=form_data['users_group_name'],
383 description=form_data['user_group_description'],
384 description=form_data['user_group_description'],
384 owner=owner, active=form_data['users_group_active'],
385 owner=owner, active=form_data['users_group_active'],
385 group_data=form_data['user_group_data'])
386 group_data=form_data['user_group_data'])
386 Session().commit()
387 Session().commit()
387 user_group = UserGroup.get_by_group_name(user_group.users_group_name)
388 user_group = UserGroup.get_by_group_name(user_group.users_group_name)
388 return user_group
389 return user_group
389
390
390 def destroy_user_group(self, usergroupid):
391 def destroy_user_group(self, usergroupid):
391 UserGroupModel().delete(user_group=usergroupid, force=True)
392 UserGroupModel().delete(user_group=usergroupid, force=True)
392 Session().commit()
393 Session().commit()
393
394
394 def create_gist(self, **kwargs):
395 def create_gist(self, **kwargs):
395 form_data = {
396 form_data = {
396 'description': 'new-gist',
397 'description': 'new-gist',
397 'owner': TEST_USER_ADMIN_LOGIN,
398 'owner': TEST_USER_ADMIN_LOGIN,
398 'gist_type': GistModel.cls.GIST_PUBLIC,
399 'gist_type': GistModel.cls.GIST_PUBLIC,
399 'lifetime': -1,
400 'lifetime': -1,
400 'acl_level': Gist.ACL_LEVEL_PUBLIC,
401 'acl_level': Gist.ACL_LEVEL_PUBLIC,
401 'gist_mapping': {b'filename1.txt': {'content': b'hello world'},}
402 'gist_mapping': {b'filename1.txt': {'content': b'hello world'},}
402 }
403 }
403 form_data.update(kwargs)
404 form_data.update(kwargs)
404 gist = GistModel().create(
405 gist = GistModel().create(
405 description=form_data['description'], owner=form_data['owner'],
406 description=form_data['description'], owner=form_data['owner'],
406 gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'],
407 gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'],
407 lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level']
408 lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level']
408 )
409 )
409 Session().commit()
410 Session().commit()
410 return gist
411 return gist
411
412
412 def destroy_gists(self, gistid=None):
413 def destroy_gists(self, gistid=None):
413 for g in GistModel.cls.get_all():
414 for g in GistModel.cls.get_all():
414 if gistid:
415 if gistid:
415 if gistid == g.gist_access_id:
416 if gistid == g.gist_access_id:
416 GistModel().delete(g)
417 GistModel().delete(g)
417 else:
418 else:
418 GistModel().delete(g)
419 GistModel().delete(g)
419 Session().commit()
420 Session().commit()
420
421
421 def load_resource(self, resource_name, strip=False):
422 def load_resource(self, resource_name, strip=False):
422 with open(os.path.join(FIXTURES, resource_name), 'rb') as f:
423 with open(os.path.join(FIXTURES, resource_name), 'rb') as f:
423 source = f.read()
424 source = f.read()
424 if strip:
425 if strip:
425 source = source.strip()
426 source = source.strip()
426
427
427 return source
428 return source
@@ -1,825 +1,826 b''
1
1
2 # Copyright (C) 2010-2023 RhodeCode GmbH
2 # Copyright (C) 2010-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software: you can redistribute it and/or modify
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU Affero General Public License, version 3
5 # it under the terms of the GNU Affero General Public License, version 3
6 # (only), as published by the Free Software Foundation.
6 # (only), as published by the Free Software Foundation.
7 #
7 #
8 # This program is distributed in the hope that it will be useful,
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License for more details.
11 # GNU General Public License for more details.
12 #
12 #
13 # You should have received a copy of the GNU Affero General Public License
13 # You should have received a copy of the GNU Affero General Public License
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 #
15 #
16 # This program is dual-licensed. If you wish to learn more about the
16 # This program is dual-licensed. If you wish to learn more about the
17 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
18 # and proprietary license terms, please see https://rhodecode.com/licenses/
19
19
20 import textwrap
20 import textwrap
21
21
22 import mock
22 import mock
23 import pytest
23 import pytest
24
24
25 from rhodecode.lib.codeblocks import DiffSet
25 from rhodecode.lib.codeblocks import DiffSet
26 from rhodecode.lib.diffs import (
26 from rhodecode.lib.diffs import (
27 DiffProcessor,
27 DiffProcessor,
28 NEW_FILENODE, DEL_FILENODE, MOD_FILENODE, RENAMED_FILENODE,
28 NEW_FILENODE, DEL_FILENODE, MOD_FILENODE, RENAMED_FILENODE,
29 CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE)
29 CHMOD_FILENODE, BIN_FILENODE, COPIED_FILENODE)
30 from rhodecode.lib.str_utils import safe_bytes
30
31 from rhodecode.lib.utils2 import AttributeDict
31 from rhodecode.lib.utils2 import AttributeDict
32 from rhodecode.lib.vcs.backends.git import GitCommit
32 from rhodecode.lib.vcs.backends.git import GitCommit
33 from rhodecode.tests.fixture import Fixture, no_newline_id_generator
33 from rhodecode.tests.fixture import Fixture
34 from rhodecode.tests import no_newline_id_generator
34 from rhodecode.lib.vcs.backends.git.repository import GitDiff
35 from rhodecode.lib.vcs.backends.git.repository import GitDiff
35 from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff
36 from rhodecode.lib.vcs.backends.hg.repository import MercurialDiff
36 from rhodecode.lib.vcs.backends.svn.repository import SubversionDiff
37 from rhodecode.lib.vcs.backends.svn.repository import SubversionDiff
37
38
38 fixture = Fixture()
39 fixture = Fixture()
39
40
40
41
41 class TestMixedFilenameEncodings(object):
42 class TestMixedFilenameEncodings(object):
42
43
43 @pytest.fixture(scope="class")
44 @pytest.fixture(scope="class")
44 def raw_diff(self):
45 def raw_diff(self):
45 return fixture.load_resource(
46 return fixture.load_resource(
46 'hg_diff_mixed_filename_encodings.diff')
47 'hg_diff_mixed_filename_encodings.diff')
47
48
48 @pytest.fixture()
49 @pytest.fixture()
49 def processor(self, raw_diff):
50 def processor(self, raw_diff):
50 diff = MercurialDiff(raw_diff)
51 diff = MercurialDiff(raw_diff)
51 processor = DiffProcessor(diff, diff_format='newdiff')
52 processor = DiffProcessor(diff, diff_format='newdiff')
52 return processor
53 return processor
53
54
54 def test_filenames_are_decoded_to_unicode(self, processor):
55 def test_filenames_are_decoded_to_unicode(self, processor):
55 diff_data = processor.prepare()
56 diff_data = processor.prepare()
56 filenames = [item['filename'] for item in diff_data]
57 filenames = [item['filename'] for item in diff_data]
57 assert filenames == [
58 assert filenames == [
58 u'spΓ€cial-utf8.txt', u'spοΏ½cial-cp1252.txt', u'spοΏ½cial-latin1.txt']
59 u'spΓ€cial-utf8.txt', u'spοΏ½cial-cp1252.txt', u'spοΏ½cial-latin1.txt']
59
60
60 def test_raw_diff_is_decoded_to_unicode(self, processor):
61 def test_raw_diff_is_decoded_to_unicode(self, processor):
61 diff_data = processor.prepare()
62 diff_data = processor.prepare()
62 raw_diffs = [item['raw_diff'] for item in diff_data]
63 raw_diffs = [item['raw_diff'] for item in diff_data]
63 new_file_message = u'\nnew file mode 100644\n'
64 new_file_message = u'\nnew file mode 100644\n'
64 expected_raw_diffs = [
65 expected_raw_diffs = [
65 u' a/spΓ€cial-utf8.txt b/spΓ€cial-utf8.txt' + new_file_message,
66 u' a/spΓ€cial-utf8.txt b/spΓ€cial-utf8.txt' + new_file_message,
66 u' a/spοΏ½cial-cp1252.txt b/spοΏ½cial-cp1252.txt' + new_file_message,
67 u' a/spοΏ½cial-cp1252.txt b/spοΏ½cial-cp1252.txt' + new_file_message,
67 u' a/spοΏ½cial-latin1.txt b/spοΏ½cial-latin1.txt' + new_file_message]
68 u' a/spοΏ½cial-latin1.txt b/spοΏ½cial-latin1.txt' + new_file_message]
68 assert raw_diffs == expected_raw_diffs
69 assert raw_diffs == expected_raw_diffs
69
70
70 def test_as_raw_preserves_the_encoding(self, processor, raw_diff):
71 def test_as_raw_preserves_the_encoding(self, processor, raw_diff):
71 assert processor.as_raw() == raw_diff
72 assert processor.as_raw() == raw_diff
72
73
73
74
74 # TODO: mikhail: format the following data structure properly
75 # TODO: mikhail: format the following data structure properly
75 DIFF_FIXTURES = [
76 DIFF_FIXTURES = [
76 ('hg',
77 ('hg',
77 'hg_diff_add_single_binary_file.diff',
78 'hg_diff_add_single_binary_file.diff',
78 [('US Warszawa.jpg', 'A',
79 [('US Warszawa.jpg', 'A',
79 {'added': 0,
80 {'added': 0,
80 'deleted': 0,
81 'deleted': 0,
81 'binary': True,
82 'binary': True,
82 'old_mode': '',
83 'old_mode': '',
83 'new_mode': '100755',
84 'new_mode': '100755',
84 'ops': {NEW_FILENODE: 'new file 100755',
85 'ops': {NEW_FILENODE: 'new file 100755',
85 BIN_FILENODE: 'binary diff hidden'}}),
86 BIN_FILENODE: 'binary diff hidden'}}),
86 ]),
87 ]),
87 ('hg',
88 ('hg',
88 'hg_diff_mod_single_binary_file.diff',
89 'hg_diff_mod_single_binary_file.diff',
89 [('US Warszawa.jpg', 'M',
90 [('US Warszawa.jpg', 'M',
90 {'added': 0,
91 {'added': 0,
91 'deleted': 0,
92 'deleted': 0,
92 'binary': True,
93 'binary': True,
93 'old_mode': '',
94 'old_mode': '',
94 'new_mode': '',
95 'new_mode': '',
95 'ops': {MOD_FILENODE: 'modified file',
96 'ops': {MOD_FILENODE: 'modified file',
96 BIN_FILENODE: 'binary diff hidden'}}),
97 BIN_FILENODE: 'binary diff hidden'}}),
97 ]),
98 ]),
98 ('hg',
99 ('hg',
99 'hg_diff_mod_single_file_and_rename_and_chmod.diff',
100 'hg_diff_mod_single_file_and_rename_and_chmod.diff',
100 [('README', 'M',
101 [('README', 'M',
101 {'added': 3,
102 {'added': 3,
102 'deleted': 0,
103 'deleted': 0,
103 'binary': False,
104 'binary': False,
104 'old_mode': '100755',
105 'old_mode': '100755',
105 'new_mode': '100644',
106 'new_mode': '100644',
106 'renamed': ('README.rst', 'README'),
107 'renamed': ('README.rst', 'README'),
107 'ops': {MOD_FILENODE: 'modified file',
108 'ops': {MOD_FILENODE: 'modified file',
108 RENAMED_FILENODE: 'file renamed from README.rst to README',
109 RENAMED_FILENODE: 'file renamed from README.rst to README',
109 CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
110 CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
110 ]),
111 ]),
111 ('hg',
112 ('hg',
112 'hg_diff_no_newline.diff',
113 'hg_diff_no_newline.diff',
113 [('server.properties', 'M',
114 [('server.properties', 'M',
114 {'added': 2,
115 {'added': 2,
115 'deleted': 1,
116 'deleted': 1,
116 'binary': False,
117 'binary': False,
117 'old_mode': '',
118 'old_mode': '',
118 'new_mode': '',
119 'new_mode': '',
119 'ops': {MOD_FILENODE: 'modified file'}}),
120 'ops': {MOD_FILENODE: 'modified file'}}),
120 ]),
121 ]),
121 ('hg',
122 ('hg',
122 'hg_diff_mod_file_and_rename.diff',
123 'hg_diff_mod_file_and_rename.diff',
123 [('README.rst', 'M',
124 [('README.rst', 'M',
124 {'added': 3,
125 {'added': 3,
125 'deleted': 0,
126 'deleted': 0,
126 'binary': False,
127 'binary': False,
127 'old_mode': '',
128 'old_mode': '',
128 'new_mode': '',
129 'new_mode': '',
129 'renamed': ('README', 'README.rst'),
130 'renamed': ('README', 'README.rst'),
130 'ops': {MOD_FILENODE: 'modified file',
131 'ops': {MOD_FILENODE: 'modified file',
131 RENAMED_FILENODE: 'file renamed from README to README.rst'}}),
132 RENAMED_FILENODE: 'file renamed from README to README.rst'}}),
132 ]),
133 ]),
133 ('hg',
134 ('hg',
134 'hg_diff_del_single_binary_file.diff',
135 'hg_diff_del_single_binary_file.diff',
135 [('US Warszawa.jpg', 'D',
136 [('US Warszawa.jpg', 'D',
136 {'added': 0,
137 {'added': 0,
137 'deleted': 0,
138 'deleted': 0,
138 'binary': True,
139 'binary': True,
139 'old_mode': '',
140 'old_mode': '',
140 'new_mode': '',
141 'new_mode': '',
141 'ops': {DEL_FILENODE: 'deleted file',
142 'ops': {DEL_FILENODE: 'deleted file',
142 BIN_FILENODE: 'binary diff hidden'}}),
143 BIN_FILENODE: 'binary diff hidden'}}),
143 ]),
144 ]),
144 ('hg',
145 ('hg',
145 'hg_diff_chmod_and_mod_single_binary_file.diff',
146 'hg_diff_chmod_and_mod_single_binary_file.diff',
146 [('gravatar.png', 'M',
147 [('gravatar.png', 'M',
147 {'added': 0,
148 {'added': 0,
148 'deleted': 0,
149 'deleted': 0,
149 'binary': True,
150 'binary': True,
150 'old_mode': '100644',
151 'old_mode': '100644',
151 'new_mode': '100755',
152 'new_mode': '100755',
152 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
153 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
153 BIN_FILENODE: 'binary diff hidden'}}),
154 BIN_FILENODE: 'binary diff hidden'}}),
154 ]),
155 ]),
155 ('hg',
156 ('hg',
156 'hg_diff_chmod.diff',
157 'hg_diff_chmod.diff',
157 [('file', 'M',
158 [('file', 'M',
158 {'added': 0,
159 {'added': 0,
159 'deleted': 0,
160 'deleted': 0,
160 'binary': True,
161 'binary': True,
161 'old_mode': '100755',
162 'old_mode': '100755',
162 'new_mode': '100644',
163 'new_mode': '100644',
163 'ops': {CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
164 'ops': {CHMOD_FILENODE: 'modified file chmod 100755 => 100644'}}),
164 ]),
165 ]),
165 ('hg',
166 ('hg',
166 'hg_diff_rename_file.diff',
167 'hg_diff_rename_file.diff',
167 [('file_renamed', 'M',
168 [('file_renamed', 'M',
168 {'added': 0,
169 {'added': 0,
169 'deleted': 0,
170 'deleted': 0,
170 'binary': True,
171 'binary': True,
171 'old_mode': '',
172 'old_mode': '',
172 'new_mode': '',
173 'new_mode': '',
173 'renamed': ('file', 'file_renamed'),
174 'renamed': ('file', 'file_renamed'),
174 'ops': {RENAMED_FILENODE: 'file renamed from file to file_renamed'}}),
175 'ops': {RENAMED_FILENODE: 'file renamed from file to file_renamed'}}),
175 ]),
176 ]),
176 ('hg',
177 ('hg',
177 'hg_diff_rename_and_chmod_file.diff',
178 'hg_diff_rename_and_chmod_file.diff',
178 [('README', 'M',
179 [('README', 'M',
179 {'added': 0,
180 {'added': 0,
180 'deleted': 0,
181 'deleted': 0,
181 'binary': True,
182 'binary': True,
182 'old_mode': '100644',
183 'old_mode': '100644',
183 'new_mode': '100755',
184 'new_mode': '100755',
184 'renamed': ('README.rst', 'README'),
185 'renamed': ('README.rst', 'README'),
185 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
186 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755',
186 RENAMED_FILENODE: 'file renamed from README.rst to README'}}),
187 RENAMED_FILENODE: 'file renamed from README.rst to README'}}),
187 ]),
188 ]),
188 ('hg',
189 ('hg',
189 'hg_diff_binary_and_normal.diff',
190 'hg_diff_binary_and_normal.diff',
190 [('img/baseline-10px.png', 'A',
191 [('img/baseline-10px.png', 'A',
191 {'added': 0,
192 {'added': 0,
192 'deleted': 0,
193 'deleted': 0,
193 'binary': True,
194 'binary': True,
194 'new_mode': '100644',
195 'new_mode': '100644',
195 'old_mode': '',
196 'old_mode': '',
196 'ops': {NEW_FILENODE: 'new file 100644',
197 'ops': {NEW_FILENODE: 'new file 100644',
197 BIN_FILENODE: 'binary diff hidden'}}),
198 BIN_FILENODE: 'binary diff hidden'}}),
198 ('js/jquery/hashgrid.js', 'A',
199 ('js/jquery/hashgrid.js', 'A',
199 {'added': 340,
200 {'added': 340,
200 'deleted': 0,
201 'deleted': 0,
201 'binary': False,
202 'binary': False,
202 'new_mode': '100755',
203 'new_mode': '100755',
203 'old_mode': '',
204 'old_mode': '',
204 'ops': {NEW_FILENODE: 'new file 100755'}}),
205 'ops': {NEW_FILENODE: 'new file 100755'}}),
205 ('index.html', 'M',
206 ('index.html', 'M',
206 {'added': 3,
207 {'added': 3,
207 'deleted': 2,
208 'deleted': 2,
208 'binary': False,
209 'binary': False,
209 'new_mode': '',
210 'new_mode': '',
210 'old_mode': '',
211 'old_mode': '',
211 'ops': {MOD_FILENODE: 'modified file'}}),
212 'ops': {MOD_FILENODE: 'modified file'}}),
212 ('less/docs.less', 'M',
213 ('less/docs.less', 'M',
213 {'added': 34,
214 {'added': 34,
214 'deleted': 0,
215 'deleted': 0,
215 'binary': False,
216 'binary': False,
216 'new_mode': '',
217 'new_mode': '',
217 'old_mode': '',
218 'old_mode': '',
218 'ops': {MOD_FILENODE: 'modified file'}}),
219 'ops': {MOD_FILENODE: 'modified file'}}),
219 ('less/scaffolding.less', 'M',
220 ('less/scaffolding.less', 'M',
220 {'added': 1,
221 {'added': 1,
221 'deleted': 3,
222 'deleted': 3,
222 'binary': False,
223 'binary': False,
223 'new_mode': '',
224 'new_mode': '',
224 'old_mode': '',
225 'old_mode': '',
225 'ops': {MOD_FILENODE: 'modified file'}}),
226 'ops': {MOD_FILENODE: 'modified file'}}),
226 ('readme.markdown', 'M',
227 ('readme.markdown', 'M',
227 {'added': 1,
228 {'added': 1,
228 'deleted': 10,
229 'deleted': 10,
229 'binary': False,
230 'binary': False,
230 'new_mode': '',
231 'new_mode': '',
231 'old_mode': '',
232 'old_mode': '',
232 'ops': {MOD_FILENODE: 'modified file'}}),
233 'ops': {MOD_FILENODE: 'modified file'}}),
233 ('img/baseline-20px.png', 'D',
234 ('img/baseline-20px.png', 'D',
234 {'added': 0,
235 {'added': 0,
235 'deleted': 0,
236 'deleted': 0,
236 'binary': True,
237 'binary': True,
237 'new_mode': '',
238 'new_mode': '',
238 'old_mode': '',
239 'old_mode': '',
239 'ops': {DEL_FILENODE: 'deleted file',
240 'ops': {DEL_FILENODE: 'deleted file',
240 BIN_FILENODE: 'binary diff hidden'}}),
241 BIN_FILENODE: 'binary diff hidden'}}),
241 ('js/global.js', 'D',
242 ('js/global.js', 'D',
242 {'added': 0,
243 {'added': 0,
243 'deleted': 75,
244 'deleted': 75,
244 'binary': False,
245 'binary': False,
245 'new_mode': '',
246 'new_mode': '',
246 'old_mode': '',
247 'old_mode': '',
247 'ops': {DEL_FILENODE: 'deleted file'}})
248 'ops': {DEL_FILENODE: 'deleted file'}})
248 ]),
249 ]),
249 ('git',
250 ('git',
250 'git_diff_chmod.diff',
251 'git_diff_chmod.diff',
251 [('work-horus.xls', 'M',
252 [('work-horus.xls', 'M',
252 {'added': 0,
253 {'added': 0,
253 'deleted': 0,
254 'deleted': 0,
254 'binary': True,
255 'binary': True,
255 'old_mode': '100644',
256 'old_mode': '100644',
256 'new_mode': '100755',
257 'new_mode': '100755',
257 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}})
258 'ops': {CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}})
258 ]),
259 ]),
259 ('git',
260 ('git',
260 'git_diff_js_chars.diff',
261 'git_diff_js_chars.diff',
261 [('\\"><img src=x onerror=prompt(0)>/\\"><img src=x onerror=prompt(1)>.txt', 'M',
262 [('\\"><img src=x onerror=prompt(0)>/\\"><img src=x onerror=prompt(1)>.txt', 'M',
262 {'added': 1,
263 {'added': 1,
263 'deleted': 0,
264 'deleted': 0,
264 'binary': False,
265 'binary': False,
265 'old_mode': '',
266 'old_mode': '',
266 'new_mode': '100644',
267 'new_mode': '100644',
267 'ops': {MOD_FILENODE: 'modified file'}})
268 'ops': {MOD_FILENODE: 'modified file'}})
268 ]),
269 ]),
269 ('git',
270 ('git',
270 'git_diff_rename_file.diff',
271 'git_diff_rename_file.diff',
271 [('file.xls', 'M',
272 [('file.xls', 'M',
272 {'added': 0,
273 {'added': 0,
273 'deleted': 0,
274 'deleted': 0,
274 'binary': True,
275 'binary': True,
275 'old_mode': '',
276 'old_mode': '',
276 'new_mode': '',
277 'new_mode': '',
277 'renamed': ('work-horus.xls', 'file.xls'),
278 'renamed': ('work-horus.xls', 'file.xls'),
278 'ops': {
279 'ops': {
279 RENAMED_FILENODE: 'file renamed from work-horus.xls to file.xls'}})
280 RENAMED_FILENODE: 'file renamed from work-horus.xls to file.xls'}})
280 ]),
281 ]),
281 ('git',
282 ('git',
282 'git_diff_mod_single_binary_file.diff',
283 'git_diff_mod_single_binary_file.diff',
283 [('US Warszawa.jpg', 'M',
284 [('US Warszawa.jpg', 'M',
284 {'added': 0,
285 {'added': 0,
285 'deleted': 0,
286 'deleted': 0,
286 'binary': True,
287 'binary': True,
287 'old_mode': '',
288 'old_mode': '',
288 'new_mode': '',
289 'new_mode': '',
289 'ops': {MOD_FILENODE: 'modified file',
290 'ops': {MOD_FILENODE: 'modified file',
290 BIN_FILENODE: 'binary diff hidden'}})
291 BIN_FILENODE: 'binary diff hidden'}})
291 ]),
292 ]),
292 ('git',
293 ('git',
293 'git_diff_binary_and_normal.diff',
294 'git_diff_binary_and_normal.diff',
294 [('img/baseline-10px.png', 'A',
295 [('img/baseline-10px.png', 'A',
295 {'added': 0,
296 {'added': 0,
296 'deleted': 0,
297 'deleted': 0,
297 'binary': True,
298 'binary': True,
298 'old_mode': '',
299 'old_mode': '',
299 'new_mode': '100644',
300 'new_mode': '100644',
300 'ops': {NEW_FILENODE: 'new file 100644',
301 'ops': {NEW_FILENODE: 'new file 100644',
301 BIN_FILENODE: 'binary diff hidden'}}),
302 BIN_FILENODE: 'binary diff hidden'}}),
302 ('js/jquery/hashgrid.js', 'A',
303 ('js/jquery/hashgrid.js', 'A',
303 {'added': 340,
304 {'added': 340,
304 'deleted': 0,
305 'deleted': 0,
305 'binary': False,
306 'binary': False,
306 'old_mode': '',
307 'old_mode': '',
307 'new_mode': '100755',
308 'new_mode': '100755',
308 'ops': {NEW_FILENODE: 'new file 100755'}}),
309 'ops': {NEW_FILENODE: 'new file 100755'}}),
309 ('index.html', 'M',
310 ('index.html', 'M',
310 {'added': 3,
311 {'added': 3,
311 'deleted': 2,
312 'deleted': 2,
312 'binary': False,
313 'binary': False,
313 'old_mode': '',
314 'old_mode': '',
314 'new_mode': '100644',
315 'new_mode': '100644',
315 'ops': {MOD_FILENODE: 'modified file'}}),
316 'ops': {MOD_FILENODE: 'modified file'}}),
316 ('less/docs.less', 'M',
317 ('less/docs.less', 'M',
317 {'added': 34,
318 {'added': 34,
318 'deleted': 0,
319 'deleted': 0,
319 'binary': False,
320 'binary': False,
320 'old_mode': '',
321 'old_mode': '',
321 'new_mode': '100644',
322 'new_mode': '100644',
322 'ops': {MOD_FILENODE: 'modified file'}}),
323 'ops': {MOD_FILENODE: 'modified file'}}),
323 ('less/scaffolding.less', 'M',
324 ('less/scaffolding.less', 'M',
324 {'added': 1,
325 {'added': 1,
325 'deleted': 3,
326 'deleted': 3,
326 'binary': False,
327 'binary': False,
327 'old_mode': '',
328 'old_mode': '',
328 'new_mode': '100644',
329 'new_mode': '100644',
329 'ops': {MOD_FILENODE: 'modified file'}}),
330 'ops': {MOD_FILENODE: 'modified file'}}),
330 ('readme.markdown', 'M',
331 ('readme.markdown', 'M',
331 {'added': 1,
332 {'added': 1,
332 'deleted': 10,
333 'deleted': 10,
333 'binary': False,
334 'binary': False,
334 'old_mode': '',
335 'old_mode': '',
335 'new_mode': '100644',
336 'new_mode': '100644',
336 'ops': {MOD_FILENODE: 'modified file'}}),
337 'ops': {MOD_FILENODE: 'modified file'}}),
337 ('img/baseline-20px.png', 'D',
338 ('img/baseline-20px.png', 'D',
338 {'added': 0,
339 {'added': 0,
339 'deleted': 0,
340 'deleted': 0,
340 'binary': True,
341 'binary': True,
341 'old_mode': '',
342 'old_mode': '',
342 'new_mode': '',
343 'new_mode': '',
343 'ops': {DEL_FILENODE: 'deleted file',
344 'ops': {DEL_FILENODE: 'deleted file',
344 BIN_FILENODE: 'binary diff hidden'}}),
345 BIN_FILENODE: 'binary diff hidden'}}),
345 ('js/global.js', 'D',
346 ('js/global.js', 'D',
346 {'added': 0,
347 {'added': 0,
347 'deleted': 75,
348 'deleted': 75,
348 'binary': False,
349 'binary': False,
349 'old_mode': '',
350 'old_mode': '',
350 'new_mode': '',
351 'new_mode': '',
351 'ops': {DEL_FILENODE: 'deleted file'}}),
352 'ops': {DEL_FILENODE: 'deleted file'}}),
352 ]),
353 ]),
353 ('hg',
354 ('hg',
354 'diff_with_diff_data.diff',
355 'diff_with_diff_data.diff',
355 [('vcs/backends/base.py', 'M',
356 [('vcs/backends/base.py', 'M',
356 {'added': 18,
357 {'added': 18,
357 'deleted': 2,
358 'deleted': 2,
358 'binary': False,
359 'binary': False,
359 'old_mode': '',
360 'old_mode': '',
360 'new_mode': '100644',
361 'new_mode': '100644',
361 'ops': {MOD_FILENODE: 'modified file'}}),
362 'ops': {MOD_FILENODE: 'modified file'}}),
362 ('vcs/backends/git/repository.py', 'M',
363 ('vcs/backends/git/repository.py', 'M',
363 {'added': 46,
364 {'added': 46,
364 'deleted': 15,
365 'deleted': 15,
365 'binary': False,
366 'binary': False,
366 'old_mode': '',
367 'old_mode': '',
367 'new_mode': '100644',
368 'new_mode': '100644',
368 'ops': {MOD_FILENODE: 'modified file'}}),
369 'ops': {MOD_FILENODE: 'modified file'}}),
369 ('vcs/backends/hg.py', 'M',
370 ('vcs/backends/hg.py', 'M',
370 {'added': 22,
371 {'added': 22,
371 'deleted': 3,
372 'deleted': 3,
372 'binary': False,
373 'binary': False,
373 'old_mode': '',
374 'old_mode': '',
374 'new_mode': '100644',
375 'new_mode': '100644',
375 'ops': {MOD_FILENODE: 'modified file'}}),
376 'ops': {MOD_FILENODE: 'modified file'}}),
376 ('vcs/tests/test_git.py', 'M',
377 ('vcs/tests/test_git.py', 'M',
377 {'added': 5,
378 {'added': 5,
378 'deleted': 5,
379 'deleted': 5,
379 'binary': False,
380 'binary': False,
380 'old_mode': '',
381 'old_mode': '',
381 'new_mode': '100644',
382 'new_mode': '100644',
382 'ops': {MOD_FILENODE: 'modified file'}}),
383 'ops': {MOD_FILENODE: 'modified file'}}),
383 ('vcs/tests/test_repository.py', 'M',
384 ('vcs/tests/test_repository.py', 'M',
384 {'added': 174,
385 {'added': 174,
385 'deleted': 2,
386 'deleted': 2,
386 'binary': False,
387 'binary': False,
387 'old_mode': '',
388 'old_mode': '',
388 'new_mode': '100644',
389 'new_mode': '100644',
389 'ops': {MOD_FILENODE: 'modified file'}}),
390 'ops': {MOD_FILENODE: 'modified file'}}),
390 ]),
391 ]),
391 ('hg',
392 ('hg',
392 'hg_diff_copy_file.diff',
393 'hg_diff_copy_file.diff',
393 [('file2', 'M',
394 [('file2', 'M',
394 {'added': 0,
395 {'added': 0,
395 'deleted': 0,
396 'deleted': 0,
396 'binary': True,
397 'binary': True,
397 'old_mode': '',
398 'old_mode': '',
398 'new_mode': '',
399 'new_mode': '',
399 'copied': ('file1', 'file2'),
400 'copied': ('file1', 'file2'),
400 'ops': {COPIED_FILENODE: 'file copied from file1 to file2'}}),
401 'ops': {COPIED_FILENODE: 'file copied from file1 to file2'}}),
401 ]),
402 ]),
402 ('hg',
403 ('hg',
403 'hg_diff_copy_and_modify_file.diff',
404 'hg_diff_copy_and_modify_file.diff',
404 [('file3', 'M',
405 [('file3', 'M',
405 {'added': 1,
406 {'added': 1,
406 'deleted': 0,
407 'deleted': 0,
407 'binary': False,
408 'binary': False,
408 'old_mode': '',
409 'old_mode': '',
409 'new_mode': '',
410 'new_mode': '',
410 'copied': ('file2', 'file3'),
411 'copied': ('file2', 'file3'),
411 'ops': {COPIED_FILENODE: 'file copied from file2 to file3',
412 'ops': {COPIED_FILENODE: 'file copied from file2 to file3',
412 MOD_FILENODE: 'modified file'}}),
413 MOD_FILENODE: 'modified file'}}),
413 ]),
414 ]),
414 ('hg',
415 ('hg',
415 'hg_diff_copy_and_chmod_file.diff',
416 'hg_diff_copy_and_chmod_file.diff',
416 [('file4', 'M',
417 [('file4', 'M',
417 {'added': 0,
418 {'added': 0,
418 'deleted': 0,
419 'deleted': 0,
419 'binary': True,
420 'binary': True,
420 'old_mode': '100644',
421 'old_mode': '100644',
421 'new_mode': '100755',
422 'new_mode': '100755',
422 'copied': ('file3', 'file4'),
423 'copied': ('file3', 'file4'),
423 'ops': {COPIED_FILENODE: 'file copied from file3 to file4',
424 'ops': {COPIED_FILENODE: 'file copied from file3 to file4',
424 CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}),
425 CHMOD_FILENODE: 'modified file chmod 100644 => 100755'}}),
425 ]),
426 ]),
426 ('hg',
427 ('hg',
427 'hg_diff_copy_chmod_and_edit_file.diff',
428 'hg_diff_copy_chmod_and_edit_file.diff',
428 [('file5', 'M',
429 [('file5', 'M',
429 {'added': 2,
430 {'added': 2,
430 'deleted': 1,
431 'deleted': 1,
431 'binary': False,
432 'binary': False,
432 'old_mode': '100755',
433 'old_mode': '100755',
433 'new_mode': '100644',
434 'new_mode': '100644',
434 'copied': ('file4', 'file5'),
435 'copied': ('file4', 'file5'),
435 'ops': {COPIED_FILENODE: 'file copied from file4 to file5',
436 'ops': {COPIED_FILENODE: 'file copied from file4 to file5',
436 CHMOD_FILENODE: 'modified file chmod 100755 => 100644',
437 CHMOD_FILENODE: 'modified file chmod 100755 => 100644',
437 MOD_FILENODE: 'modified file'}})]),
438 MOD_FILENODE: 'modified file'}})]),
438
439
439 # Diffs to validate rename and copy file with space in its name
440 # Diffs to validate rename and copy file with space in its name
440 ('git',
441 ('git',
441 'git_diff_rename_file_with_spaces.diff',
442 'git_diff_rename_file_with_spaces.diff',
442 [('file_with_ two spaces.txt', 'M',
443 [('file_with_ two spaces.txt', 'M',
443 {'added': 0,
444 {'added': 0,
444 'deleted': 0,
445 'deleted': 0,
445 'binary': True,
446 'binary': True,
446 'old_mode': '',
447 'old_mode': '',
447 'new_mode': '',
448 'new_mode': '',
448 'renamed': ('file_with_ spaces.txt', 'file_with_ two spaces.txt'),
449 'renamed': ('file_with_ spaces.txt', 'file_with_ two spaces.txt'),
449 'ops': {
450 'ops': {
450 RENAMED_FILENODE: (
451 RENAMED_FILENODE: (
451 'file renamed from file_with_ spaces.txt to file_with_ '
452 'file renamed from file_with_ spaces.txt to file_with_ '
452 ' two spaces.txt')}
453 ' two spaces.txt')}
453 }), ]),
454 }), ]),
454 ('hg',
455 ('hg',
455 'hg_diff_rename_file_with_spaces.diff',
456 'hg_diff_rename_file_with_spaces.diff',
456 [('file_changed _.txt', 'M',
457 [('file_changed _.txt', 'M',
457 {'added': 0,
458 {'added': 0,
458 'deleted': 0,
459 'deleted': 0,
459 'binary': True,
460 'binary': True,
460 'old_mode': '',
461 'old_mode': '',
461 'new_mode': '',
462 'new_mode': '',
462 'renamed': ('file_ with update.txt', 'file_changed _.txt'),
463 'renamed': ('file_ with update.txt', 'file_changed _.txt'),
463 'ops': {
464 'ops': {
464 RENAMED_FILENODE: (
465 RENAMED_FILENODE: (
465 'file renamed from file_ with update.txt to file_changed _.txt')}
466 'file renamed from file_ with update.txt to file_changed _.txt')}
466 }), ]),
467 }), ]),
467 ('hg',
468 ('hg',
468 'hg_diff_copy_file_with_spaces.diff',
469 'hg_diff_copy_file_with_spaces.diff',
469 [('file_copied_ with spaces.txt', 'M',
470 [('file_copied_ with spaces.txt', 'M',
470 {'added': 0,
471 {'added': 0,
471 'deleted': 0,
472 'deleted': 0,
472 'binary': True,
473 'binary': True,
473 'old_mode': '',
474 'old_mode': '',
474 'new_mode': '',
475 'new_mode': '',
475 'copied': ('file_changed_without_spaces.txt', 'file_copied_ with spaces.txt'),
476 'copied': ('file_changed_without_spaces.txt', 'file_copied_ with spaces.txt'),
476 'ops': {
477 'ops': {
477 COPIED_FILENODE: (
478 COPIED_FILENODE: (
478 'file copied from file_changed_without_spaces.txt to'
479 'file copied from file_changed_without_spaces.txt to'
479 ' file_copied_ with spaces.txt')}
480 ' file_copied_ with spaces.txt')}
480 }),
481 }),
481 ]),
482 ]),
482
483
483 # special signs from git
484 # special signs from git
484 ('git',
485 ('git',
485 'git_diff_binary_special_files.diff',
486 'git_diff_binary_special_files.diff',
486 [('css/_Icon\\r', 'A',
487 [('css/_Icon\\r', 'A',
487 {'added': 0,
488 {'added': 0,
488 'deleted': 0,
489 'deleted': 0,
489 'binary': True,
490 'binary': True,
490 'old_mode': '',
491 'old_mode': '',
491 'new_mode': '100644',
492 'new_mode': '100644',
492 'ops': {NEW_FILENODE: 'new file 100644',
493 'ops': {NEW_FILENODE: 'new file 100644',
493 BIN_FILENODE: 'binary diff hidden'}
494 BIN_FILENODE: 'binary diff hidden'}
494 }),
495 }),
495 ]),
496 ]),
496 ('git',
497 ('git',
497 'git_diff_binary_special_files_2.diff',
498 'git_diff_binary_special_files_2.diff',
498 [('css/Icon\\r', 'A',
499 [('css/Icon\\r', 'A',
499 {'added': 0,
500 {'added': 0,
500 'deleted': 0,
501 'deleted': 0,
501 'binary': True,
502 'binary': True,
502 'old_mode': '',
503 'old_mode': '',
503 'new_mode': '100644',
504 'new_mode': '100644',
504 'ops': {NEW_FILENODE: 'new file 100644', }
505 'ops': {NEW_FILENODE: 'new file 100644', }
505 }),
506 }),
506 ]),
507 ]),
507
508
508 ('svn',
509 ('svn',
509 'svn_diff_binary_add_file.diff',
510 'svn_diff_binary_add_file.diff',
510 [('intl.dll', 'A',
511 [('intl.dll', 'A',
511 {'added': 0,
512 {'added': 0,
512 'deleted': 0,
513 'deleted': 0,
513 'binary': False,
514 'binary': False,
514 'old_mode': '',
515 'old_mode': '',
515 'new_mode': '10644',
516 'new_mode': '10644',
516 'ops': {NEW_FILENODE: 'new file 10644',
517 'ops': {NEW_FILENODE: 'new file 10644',
517 #TODO(Marcink): depends on binary detection on svn patches
518 #TODO(Marcink): depends on binary detection on svn patches
518 # BIN_FILENODE: 'binary diff hidden'
519 # BIN_FILENODE: 'binary diff hidden'
519 }
520 }
520 }),
521 }),
521 ]),
522 ]),
522
523
523 ('svn',
524 ('svn',
524 'svn_diff_multiple_changes.diff',
525 'svn_diff_multiple_changes.diff',
525 [('trunk/doc/images/SettingsOverlay.png', 'M',
526 [('trunk/doc/images/SettingsOverlay.png', 'M',
526 {'added': 0,
527 {'added': 0,
527 'deleted': 0,
528 'deleted': 0,
528 'binary': False,
529 'binary': False,
529 'old_mode': '',
530 'old_mode': '',
530 'new_mode': '',
531 'new_mode': '',
531 'ops': {MOD_FILENODE: 'modified file',
532 'ops': {MOD_FILENODE: 'modified file',
532 #TODO(Marcink): depends on binary detection on svn patches
533 #TODO(Marcink): depends on binary detection on svn patches
533 # BIN_FILENODE: 'binary diff hidden'
534 # BIN_FILENODE: 'binary diff hidden'
534 }
535 }
535 }),
536 }),
536 ('trunk/doc/source/de/tsvn_ch04.xml', 'M',
537 ('trunk/doc/source/de/tsvn_ch04.xml', 'M',
537 {'added': 89,
538 {'added': 89,
538 'deleted': 34,
539 'deleted': 34,
539 'binary': False,
540 'binary': False,
540 'old_mode': '',
541 'old_mode': '',
541 'new_mode': '',
542 'new_mode': '',
542 'ops': {MOD_FILENODE: 'modified file'}
543 'ops': {MOD_FILENODE: 'modified file'}
543 }),
544 }),
544 ('trunk/doc/source/en/tsvn_ch04.xml', 'M',
545 ('trunk/doc/source/en/tsvn_ch04.xml', 'M',
545 {'added': 66,
546 {'added': 66,
546 'deleted': 21,
547 'deleted': 21,
547 'binary': False,
548 'binary': False,
548 'old_mode': '',
549 'old_mode': '',
549 'new_mode': '',
550 'new_mode': '',
550 'ops': {MOD_FILENODE: 'modified file'}
551 'ops': {MOD_FILENODE: 'modified file'}
551 }),
552 }),
552 ('trunk/src/Changelog.txt', 'M',
553 ('trunk/src/Changelog.txt', 'M',
553 {'added': 2,
554 {'added': 2,
554 'deleted': 0,
555 'deleted': 0,
555 'binary': False,
556 'binary': False,
556 'old_mode': '',
557 'old_mode': '',
557 'new_mode': '',
558 'new_mode': '',
558 'ops': {MOD_FILENODE: 'modified file'}
559 'ops': {MOD_FILENODE: 'modified file'}
559 }),
560 }),
560 ('trunk/src/Resources/TortoiseProcENG.rc', 'M',
561 ('trunk/src/Resources/TortoiseProcENG.rc', 'M',
561 {'added': 19,
562 {'added': 19,
562 'deleted': 13,
563 'deleted': 13,
563 'binary': False,
564 'binary': False,
564 'old_mode': '',
565 'old_mode': '',
565 'new_mode': '',
566 'new_mode': '',
566 'ops': {MOD_FILENODE: 'modified file'}
567 'ops': {MOD_FILENODE: 'modified file'}
567 }),
568 }),
568 ('trunk/src/TortoiseProc/SetOverlayPage.cpp', 'M',
569 ('trunk/src/TortoiseProc/SetOverlayPage.cpp', 'M',
569 {'added': 16,
570 {'added': 16,
570 'deleted': 1,
571 'deleted': 1,
571 'binary': False,
572 'binary': False,
572 'old_mode': '',
573 'old_mode': '',
573 'new_mode': '',
574 'new_mode': '',
574 'ops': {MOD_FILENODE: 'modified file'}
575 'ops': {MOD_FILENODE: 'modified file'}
575 }),
576 }),
576 ('trunk/src/TortoiseProc/SetOverlayPage.h', 'M',
577 ('trunk/src/TortoiseProc/SetOverlayPage.h', 'M',
577 {'added': 3,
578 {'added': 3,
578 'deleted': 0,
579 'deleted': 0,
579 'binary': False,
580 'binary': False,
580 'old_mode': '',
581 'old_mode': '',
581 'new_mode': '',
582 'new_mode': '',
582 'ops': {MOD_FILENODE: 'modified file'}
583 'ops': {MOD_FILENODE: 'modified file'}
583 }),
584 }),
584 ('trunk/src/TortoiseProc/resource.h', 'M',
585 ('trunk/src/TortoiseProc/resource.h', 'M',
585 {'added': 2,
586 {'added': 2,
586 'deleted': 0,
587 'deleted': 0,
587 'binary': False,
588 'binary': False,
588 'old_mode': '',
589 'old_mode': '',
589 'new_mode': '',
590 'new_mode': '',
590 'ops': {MOD_FILENODE: 'modified file'}
591 'ops': {MOD_FILENODE: 'modified file'}
591 }),
592 }),
592 ('trunk/src/TortoiseShell/ShellCache.h', 'M',
593 ('trunk/src/TortoiseShell/ShellCache.h', 'M',
593 {'added': 50,
594 {'added': 50,
594 'deleted': 1,
595 'deleted': 1,
595 'binary': False,
596 'binary': False,
596 'old_mode': '',
597 'old_mode': '',
597 'new_mode': '',
598 'new_mode': '',
598 'ops': {MOD_FILENODE: 'modified file'}
599 'ops': {MOD_FILENODE: 'modified file'}
599 }),
600 }),
600 ]),
601 ]),
601
602
602 ]
603 ]
603
604
604 DIFF_FIXTURES_WITH_CONTENT = [
605 DIFF_FIXTURES_WITH_CONTENT = [
605 (
606 (
606 'hg', 'hg_diff_single_file_change_newline.diff',
607 'hg', 'hg_diff_single_file_change_newline.diff',
607 [
608 [
608 (
609 (
609 'file_b', # filename
610 'file_b', # filename
610 'A', # change
611 'A', # change
611 { # stats
612 { # stats
612 'added': 1,
613 'added': 1,
613 'deleted': 0,
614 'deleted': 0,
614 'binary': False,
615 'binary': False,
615 'old_mode': '',
616 'old_mode': '',
616 'new_mode': '100644',
617 'new_mode': '100644',
617 'ops': {NEW_FILENODE: 'new file 100644', }
618 'ops': {NEW_FILENODE: 'new file 100644', }
618 },
619 },
619 '@@ -0,0 +1 @@\n+test_content b\n' # diff
620 '@@ -0,0 +1 @@\n+test_content b\n' # diff
620 ),
621 ),
621 ],
622 ],
622 ),
623 ),
623 (
624 (
624 'hg', 'hg_diff_double_file_change_newline.diff',
625 'hg', 'hg_diff_double_file_change_newline.diff',
625 [
626 [
626 (
627 (
627 'file_b', # filename
628 'file_b', # filename
628 'A', # change
629 'A', # change
629 { # stats
630 { # stats
630 'added': 1,
631 'added': 1,
631 'deleted': 0,
632 'deleted': 0,
632 'binary': False,
633 'binary': False,
633 'old_mode': '',
634 'old_mode': '',
634 'new_mode': '100644',
635 'new_mode': '100644',
635 'ops': {NEW_FILENODE: 'new file 100644', }
636 'ops': {NEW_FILENODE: 'new file 100644', }
636 },
637 },
637 '@@ -0,0 +1 @@\n+test_content b\n' # diff
638 '@@ -0,0 +1 @@\n+test_content b\n' # diff
638 ),
639 ),
639 (
640 (
640 'file_c', # filename
641 'file_c', # filename
641 'A', # change
642 'A', # change
642 { # stats
643 { # stats
643 'added': 1,
644 'added': 1,
644 'deleted': 0,
645 'deleted': 0,
645 'binary': False,
646 'binary': False,
646 'old_mode': '',
647 'old_mode': '',
647 'new_mode': '100644',
648 'new_mode': '100644',
648 'ops': {NEW_FILENODE: 'new file 100644', }
649 'ops': {NEW_FILENODE: 'new file 100644', }
649 },
650 },
650 '@@ -0,0 +1 @@\n+test_content c\n' # diff
651 '@@ -0,0 +1 @@\n+test_content c\n' # diff
651 ),
652 ),
652 ],
653 ],
653 ),
654 ),
654 (
655 (
655 'hg', 'hg_diff_double_file_change_double_newline.diff',
656 'hg', 'hg_diff_double_file_change_double_newline.diff',
656 [
657 [
657 (
658 (
658 'file_b', # filename
659 'file_b', # filename
659 'A', # change
660 'A', # change
660 { # stats
661 { # stats
661 'added': 1,
662 'added': 1,
662 'deleted': 0,
663 'deleted': 0,
663 'binary': False,
664 'binary': False,
664 'old_mode': '',
665 'old_mode': '',
665 'new_mode': '100644',
666 'new_mode': '100644',
666 'ops': {NEW_FILENODE: 'new file 100644', }
667 'ops': {NEW_FILENODE: 'new file 100644', }
667 },
668 },
668 '@@ -0,0 +1 @@\n+test_content b\n\n' # diff
669 '@@ -0,0 +1 @@\n+test_content b\n\n' # diff
669 ),
670 ),
670 (
671 (
671 'file_c', # filename
672 'file_c', # filename
672 'A', # change
673 'A', # change
673 { # stats
674 { # stats
674 'added': 1,
675 'added': 1,
675 'deleted': 0,
676 'deleted': 0,
676 'binary': False,
677 'binary': False,
677 'old_mode': '',
678 'old_mode': '',
678 'new_mode': '100644',
679 'new_mode': '100644',
679 'ops': {NEW_FILENODE: 'new file 100644', }
680 'ops': {NEW_FILENODE: 'new file 100644', }
680 },
681 },
681 '@@ -0,0 +1 @@\n+test_content c\n' # diff
682 '@@ -0,0 +1 @@\n+test_content c\n' # diff
682 ),
683 ),
683 ],
684 ],
684 ),
685 ),
685 (
686 (
686 'hg', 'hg_diff_four_file_change_newline.diff',
687 'hg', 'hg_diff_four_file_change_newline.diff',
687 [
688 [
688 (
689 (
689 'file', # filename
690 'file', # filename
690 'A', # change
691 'A', # change
691 { # stats
692 { # stats
692 'added': 1,
693 'added': 1,
693 'deleted': 0,
694 'deleted': 0,
694 'binary': False,
695 'binary': False,
695 'old_mode': '',
696 'old_mode': '',
696 'new_mode': '100644',
697 'new_mode': '100644',
697 'ops': {NEW_FILENODE: 'new file 100644', }
698 'ops': {NEW_FILENODE: 'new file 100644', }
698 },
699 },
699 '@@ -0,0 +1,1 @@\n+file\n' # diff
700 '@@ -0,0 +1,1 @@\n+file\n' # diff
700 ),
701 ),
701 (
702 (
702 'file2', # filename
703 'file2', # filename
703 'A', # change
704 'A', # change
704 { # stats
705 { # stats
705 'added': 1,
706 'added': 1,
706 'deleted': 0,
707 'deleted': 0,
707 'binary': False,
708 'binary': False,
708 'old_mode': '',
709 'old_mode': '',
709 'new_mode': '100644',
710 'new_mode': '100644',
710 'ops': {NEW_FILENODE: 'new file 100644', }
711 'ops': {NEW_FILENODE: 'new file 100644', }
711 },
712 },
712 '@@ -0,0 +1,1 @@\n+another line\n' # diff
713 '@@ -0,0 +1,1 @@\n+another line\n' # diff
713 ),
714 ),
714 (
715 (
715 'file3', # filename
716 'file3', # filename
716 'A', # change
717 'A', # change
717 { # stats
718 { # stats
718 'added': 1,
719 'added': 1,
719 'deleted': 0,
720 'deleted': 0,
720 'binary': False,
721 'binary': False,
721 'old_mode': '',
722 'old_mode': '',
722 'new_mode': '100644',
723 'new_mode': '100644',
723 'ops': {NEW_FILENODE: 'new file 100644', }
724 'ops': {NEW_FILENODE: 'new file 100644', }
724 },
725 },
725 '@@ -0,0 +1,1 @@\n+newline\n' # diff
726 '@@ -0,0 +1,1 @@\n+newline\n' # diff
726 ),
727 ),
727 (
728 (
728 'file4', # filename
729 'file4', # filename
729 'A', # change
730 'A', # change
730 { # stats
731 { # stats
731 'added': 1,
732 'added': 1,
732 'deleted': 0,
733 'deleted': 0,
733 'binary': False,
734 'binary': False,
734 'old_mode': '',
735 'old_mode': '',
735 'new_mode': '100644',
736 'new_mode': '100644',
736 'ops': {NEW_FILENODE: 'new file 100644', }
737 'ops': {NEW_FILENODE: 'new file 100644', }
737 },
738 },
738 '@@ -0,0 +1,1 @@\n+fil4\n\\ No newline at end of file' # diff
739 '@@ -0,0 +1,1 @@\n+fil4\n\\ No newline at end of file' # diff
739 ),
740 ),
740 ],
741 ],
741 ),
742 ),
742
743
743 ]
744 ]
744
745
745
746
746 diff_class = {
747 diff_class = {
747 'git': GitDiff,
748 'git': GitDiff,
748 'hg': MercurialDiff,
749 'hg': MercurialDiff,
749 'svn': SubversionDiff,
750 'svn': SubversionDiff,
750 }
751 }
751
752
752
753
753 @pytest.mark.parametrize('vcs_type, diff_file, expected_data', DIFF_FIXTURES)
754 @pytest.mark.parametrize('vcs_type, diff_file, expected_data', DIFF_FIXTURES)
754 def test_diff_lib(vcs_type, diff_file, expected_data):
755 def test_diff_lib(vcs_type, diff_file, expected_data):
755 diff_txt = fixture.load_resource(diff_file)
756 diff_txt = fixture.load_resource(diff_file)
756 diff = diff_class[vcs_type](diff_txt)
757 diff = diff_class[vcs_type](diff_txt)
757
758
758 diff_proc = DiffProcessor(diff, diff_format='newdiff')
759 diff_proc = DiffProcessor(diff, diff_format='newdiff')
759 diff_proc_d = diff_proc.prepare()
760 diff_proc_d = diff_proc.prepare()
760 data = [(x['filename'], x['operation'], x['stats'])
761 data = [(x['filename'], x['operation'], x['stats'])
761 for x in diff_proc_d]
762 for x in diff_proc_d]
762 assert expected_data == data
763 assert expected_data == data
763
764
764
765
765 @pytest.mark.parametrize('vcs_type, diff_file, expected_data', DIFF_FIXTURES_WITH_CONTENT)
766 @pytest.mark.parametrize('vcs_type, diff_file, expected_data', DIFF_FIXTURES_WITH_CONTENT)
766 def test_diff_lib_newlines(vcs_type, diff_file, expected_data):
767 def test_diff_lib_newlines(vcs_type, diff_file, expected_data):
767 diff_txt = fixture.load_resource(diff_file)
768 diff_txt = fixture.load_resource(diff_file)
768 diff = diff_class[vcs_type](diff_txt)
769 diff = diff_class[vcs_type](diff_txt)
769
770
770 diff_proc = DiffProcessor(diff, diff_format='newdiff')
771 diff_proc = DiffProcessor(diff, diff_format='newdiff')
771 diff_proc_d = diff_proc.prepare()
772 diff_proc_d = diff_proc.prepare()
772 data = [(x['filename'], x['operation'], x['stats'], x['raw_diff'])
773 data = [(x['filename'], x['operation'], x['stats'], x['raw_diff'])
773 for x in diff_proc_d]
774 for x in diff_proc_d]
774 assert expected_data == data
775 assert expected_data == data
775
776
776
777
777 @pytest.mark.parametrize('input_str', [
778 @pytest.mark.parametrize('input_str', [
778 b'',
779 b'',
779 b'\n',
780 b'\n',
780 b'\n\n',
781 b'\n\n',
781 b'First\n+second',
782 b'First\n+second',
782 b'First\n+second\n',
783 b'First\n+second\n',
783
784
784 b'\n\n\n Multi \n\n\n',
785 b'\n\n\n Multi \n\n\n',
785 b'\n\n\n Multi beginning',
786 b'\n\n\n Multi beginning',
786 b'Multi end \n\n\n',
787 b'Multi end \n\n\n',
787 b'Multi end',
788 b'Multi end',
788 b'@@ -0,0 +1 @@\n+test_content \n\n b\n'
789 b'@@ -0,0 +1 @@\n+test_content \n\n b\n'
789 ], ids=no_newline_id_generator)
790 ], ids=no_newline_id_generator)
790 def test_splitlines(input_str):
791 def test_splitlines(input_str):
791 result = DiffProcessor.diff_splitter(input_str)
792 result = DiffProcessor.diff_splitter(input_str)
792 assert list(result) == input_str.splitlines(True)
793 assert list(result) == input_str.splitlines(True)
793
794
794
795
795 def test_diff_over_limit(request):
796 def test_diff_over_limit(request):
796
797
797 diff_limit = 1024
798 diff_limit = 1024
798 file_limit = 1024
799 file_limit = 1024
799
800
800 raw_diff = fixture.load_resource('large_diff.diff')
801 raw_diff = fixture.load_resource('large_diff.diff')
801 vcs_diff = GitDiff(raw_diff)
802 vcs_diff = GitDiff(raw_diff)
802 diff_processor = DiffProcessor(vcs_diff, diff_format='newdiff',
803 diff_processor = DiffProcessor(vcs_diff, diff_format='newdiff',
803 diff_limit=diff_limit, file_limit=file_limit,
804 diff_limit=diff_limit, file_limit=file_limit,
804 show_full_diff=False)
805 show_full_diff=False)
805
806
806 _parsed = diff_processor.prepare()
807 _parsed = diff_processor.prepare()
807
808
808 commit1 = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
809 commit1 = GitCommit(repository=mock.Mock(), raw_id='abcdef12', idx=1)
809 commit2 = GitCommit(repository=mock.Mock(), raw_id='abcdef34', idx=2)
810 commit2 = GitCommit(repository=mock.Mock(), raw_id='abcdef34', idx=2)
810
811
811 diffset = DiffSet(
812 diffset = DiffSet(
812 repo_name='repo_name',
813 repo_name='repo_name',
813 source_node_getter=lambda *a, **kw: AttributeDict({'commit': commit1}),
814 source_node_getter=lambda *a, **kw: AttributeDict({'commit': commit1}),
814 target_node_getter=lambda *a, **kw: AttributeDict({'commit': commit2})
815 target_node_getter=lambda *a, **kw: AttributeDict({'commit': commit2})
815 )
816 )
816
817
817 diffset = diffset.render_patchset(_parsed, commit1, commit2)
818 diffset = diffset.render_patchset(_parsed, commit1, commit2)
818
819
819 assert len(diffset.files) == 2
820 assert len(diffset.files) == 2
820 assert diffset.limited_diff is True
821 assert diffset.limited_diff is True
821 assert diffset.files[0].patch['filename'] == 'example.go'
822 assert diffset.files[0].patch['filename'] == 'example.go'
822 assert diffset.files[0].limited_diff is True
823 assert diffset.files[0].limited_diff is True
823
824
824 assert diffset.files[1].patch['filename'] == 'README.md'
825 assert diffset.files[1].patch['filename'] == 'README.md'
825 assert diffset.files[1].limited_diff is False
826 assert diffset.files[1].limited_diff is False
General Comments 0
You need to be logged in to leave comments. Login now