##// END OF EJS Templates
fix(LFS): fixed pull_changes method to support a new (sync_large_objects) flag, updated all parts that will use a new flag. Fixes: RCCE-8
ilin.s -
r5256:6b054b38 default
parent child Browse files
Show More
@@ -1,2533 +1,2535 b''
1 # Copyright (C) 2011-2023 RhodeCode GmbH
1 # Copyright (C) 2011-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20 import time
20 import time
21
21
22 import rhodecode
22 import rhodecode
23 from rhodecode.api import (
23 from rhodecode.api import (
24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
24 jsonrpc_method, JSONRPCError, JSONRPCForbidden, JSONRPCValidationError)
25 from rhodecode.api.utils import (
25 from rhodecode.api.utils import (
26 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
26 has_superadmin_permission, Optional, OAttr, get_repo_or_error,
27 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
27 get_user_group_or_error, get_user_or_error, validate_repo_permissions,
28 get_perm_or_error, parse_args, get_origin, build_commit_data,
28 get_perm_or_error, parse_args, get_origin, build_commit_data,
29 validate_set_owner_permissions)
29 validate_set_owner_permissions)
30 from rhodecode.lib import audit_logger, rc_cache, channelstream
30 from rhodecode.lib import audit_logger, rc_cache, channelstream
31 from rhodecode.lib import repo_maintenance
31 from rhodecode.lib import repo_maintenance
32 from rhodecode.lib.auth import (
32 from rhodecode.lib.auth import (
33 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
33 HasPermissionAnyApi, HasUserGroupPermissionAnyApi,
34 HasRepoPermissionAnyApi)
34 HasRepoPermissionAnyApi)
35 from rhodecode.lib.celerylib.utils import get_task_id
35 from rhodecode.lib.celerylib.utils import get_task_id
36 from rhodecode.lib.utils2 import (
36 from rhodecode.lib.utils2 import (
37 str2bool, time_to_datetime, safe_str, safe_int)
37 str2bool, time_to_datetime, safe_str, safe_int)
38 from rhodecode.lib.ext_json import json
38 from rhodecode.lib.ext_json import json
39 from rhodecode.lib.exceptions import (
39 from rhodecode.lib.exceptions import (
40 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
40 StatusChangeOnClosedPullRequestError, CommentVersionMismatch)
41 from rhodecode.lib.vcs import RepositoryError
41 from rhodecode.lib.vcs import RepositoryError
42 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
42 from rhodecode.lib.vcs.exceptions import NodeDoesNotExistError
43 from rhodecode.model.changeset_status import ChangesetStatusModel
43 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.comment import CommentsModel
44 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.db import (
45 from rhodecode.model.db import (
46 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
46 Session, ChangesetStatus, RepositoryField, Repository, RepoGroup,
47 ChangesetComment)
47 ChangesetComment)
48 from rhodecode.model.permission import PermissionModel
48 from rhodecode.model.permission import PermissionModel
49 from rhodecode.model.pull_request import PullRequestModel
49 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.repo import RepoModel
50 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.scm import ScmModel, RepoList
51 from rhodecode.model.scm import ScmModel, RepoList
52 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
52 from rhodecode.model.settings import SettingsModel, VcsSettingsModel
53 from rhodecode.model import validation_schema
53 from rhodecode.model import validation_schema
54 from rhodecode.model.validation_schema.schemas import repo_schema
54 from rhodecode.model.validation_schema.schemas import repo_schema
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 @jsonrpc_method()
59 @jsonrpc_method()
60 def get_repo(request, apiuser, repoid, cache=Optional(True)):
60 def get_repo(request, apiuser, repoid, cache=Optional(True)):
61 """
61 """
62 Gets an existing repository by its name or repository_id.
62 Gets an existing repository by its name or repository_id.
63
63
64 The members section so the output returns users groups or users
64 The members section so the output returns users groups or users
65 associated with that repository.
65 associated with that repository.
66
66
67 This command can only be run using an |authtoken| with admin rights,
67 This command can only be run using an |authtoken| with admin rights,
68 or users with at least read rights to the |repo|.
68 or users with at least read rights to the |repo|.
69
69
70 :param apiuser: This is filled automatically from the |authtoken|.
70 :param apiuser: This is filled automatically from the |authtoken|.
71 :type apiuser: AuthUser
71 :type apiuser: AuthUser
72 :param repoid: The repository name or repository id.
72 :param repoid: The repository name or repository id.
73 :type repoid: str or int
73 :type repoid: str or int
74 :param cache: use the cached value for last changeset
74 :param cache: use the cached value for last changeset
75 :type: cache: Optional(bool)
75 :type: cache: Optional(bool)
76
76
77 Example output:
77 Example output:
78
78
79 .. code-block:: bash
79 .. code-block:: bash
80
80
81 {
81 {
82 "error": null,
82 "error": null,
83 "id": <repo_id>,
83 "id": <repo_id>,
84 "result": {
84 "result": {
85 "clone_uri": null,
85 "clone_uri": null,
86 "created_on": "timestamp",
86 "created_on": "timestamp",
87 "description": "repo description",
87 "description": "repo description",
88 "enable_downloads": false,
88 "enable_downloads": false,
89 "enable_locking": false,
89 "enable_locking": false,
90 "enable_statistics": false,
90 "enable_statistics": false,
91 "followers": [
91 "followers": [
92 {
92 {
93 "active": true,
93 "active": true,
94 "admin": false,
94 "admin": false,
95 "api_key": "****************************************",
95 "api_key": "****************************************",
96 "api_keys": [
96 "api_keys": [
97 "****************************************"
97 "****************************************"
98 ],
98 ],
99 "email": "user@example.com",
99 "email": "user@example.com",
100 "emails": [
100 "emails": [
101 "user@example.com"
101 "user@example.com"
102 ],
102 ],
103 "extern_name": "rhodecode",
103 "extern_name": "rhodecode",
104 "extern_type": "rhodecode",
104 "extern_type": "rhodecode",
105 "firstname": "username",
105 "firstname": "username",
106 "ip_addresses": [],
106 "ip_addresses": [],
107 "language": null,
107 "language": null,
108 "last_login": "2015-09-16T17:16:35.854",
108 "last_login": "2015-09-16T17:16:35.854",
109 "lastname": "surname",
109 "lastname": "surname",
110 "user_id": <user_id>,
110 "user_id": <user_id>,
111 "username": "name"
111 "username": "name"
112 }
112 }
113 ],
113 ],
114 "fork_of": "parent-repo",
114 "fork_of": "parent-repo",
115 "landing_rev": [
115 "landing_rev": [
116 "rev",
116 "rev",
117 "tip"
117 "tip"
118 ],
118 ],
119 "last_changeset": {
119 "last_changeset": {
120 "author": "User <user@example.com>",
120 "author": "User <user@example.com>",
121 "branch": "default",
121 "branch": "default",
122 "date": "timestamp",
122 "date": "timestamp",
123 "message": "last commit message",
123 "message": "last commit message",
124 "parents": [
124 "parents": [
125 {
125 {
126 "raw_id": "commit-id"
126 "raw_id": "commit-id"
127 }
127 }
128 ],
128 ],
129 "raw_id": "commit-id",
129 "raw_id": "commit-id",
130 "revision": <revision number>,
130 "revision": <revision number>,
131 "short_id": "short id"
131 "short_id": "short id"
132 },
132 },
133 "lock_reason": null,
133 "lock_reason": null,
134 "locked_by": null,
134 "locked_by": null,
135 "locked_date": null,
135 "locked_date": null,
136 "owner": "owner-name",
136 "owner": "owner-name",
137 "permissions": [
137 "permissions": [
138 {
138 {
139 "name": "super-admin-name",
139 "name": "super-admin-name",
140 "origin": "super-admin",
140 "origin": "super-admin",
141 "permission": "repository.admin",
141 "permission": "repository.admin",
142 "type": "user"
142 "type": "user"
143 },
143 },
144 {
144 {
145 "name": "owner-name",
145 "name": "owner-name",
146 "origin": "owner",
146 "origin": "owner",
147 "permission": "repository.admin",
147 "permission": "repository.admin",
148 "type": "user"
148 "type": "user"
149 },
149 },
150 {
150 {
151 "name": "user-group-name",
151 "name": "user-group-name",
152 "origin": "permission",
152 "origin": "permission",
153 "permission": "repository.write",
153 "permission": "repository.write",
154 "type": "user_group"
154 "type": "user_group"
155 }
155 }
156 ],
156 ],
157 "private": true,
157 "private": true,
158 "repo_id": 676,
158 "repo_id": 676,
159 "repo_name": "user-group/repo-name",
159 "repo_name": "user-group/repo-name",
160 "repo_type": "hg"
160 "repo_type": "hg"
161 }
161 }
162 }
162 }
163 """
163 """
164
164
165 repo = get_repo_or_error(repoid)
165 repo = get_repo_or_error(repoid)
166 cache = Optional.extract(cache)
166 cache = Optional.extract(cache)
167
167
168 include_secrets = False
168 include_secrets = False
169 if has_superadmin_permission(apiuser):
169 if has_superadmin_permission(apiuser):
170 include_secrets = True
170 include_secrets = True
171 else:
171 else:
172 # check if we have at least read permission for this repo !
172 # check if we have at least read permission for this repo !
173 _perms = (
173 _perms = (
174 'repository.admin', 'repository.write', 'repository.read',)
174 'repository.admin', 'repository.write', 'repository.read',)
175 validate_repo_permissions(apiuser, repoid, repo, _perms)
175 validate_repo_permissions(apiuser, repoid, repo, _perms)
176
176
177 permissions = []
177 permissions = []
178 for _user in repo.permissions():
178 for _user in repo.permissions():
179 user_data = {
179 user_data = {
180 'name': _user.username,
180 'name': _user.username,
181 'permission': _user.permission,
181 'permission': _user.permission,
182 'origin': get_origin(_user),
182 'origin': get_origin(_user),
183 'type': "user",
183 'type': "user",
184 }
184 }
185 permissions.append(user_data)
185 permissions.append(user_data)
186
186
187 for _user_group in repo.permission_user_groups():
187 for _user_group in repo.permission_user_groups():
188 user_group_data = {
188 user_group_data = {
189 'name': _user_group.users_group_name,
189 'name': _user_group.users_group_name,
190 'permission': _user_group.permission,
190 'permission': _user_group.permission,
191 'origin': get_origin(_user_group),
191 'origin': get_origin(_user_group),
192 'type': "user_group",
192 'type': "user_group",
193 }
193 }
194 permissions.append(user_group_data)
194 permissions.append(user_group_data)
195
195
196 following_users = [
196 following_users = [
197 user.user.get_api_data(include_secrets=include_secrets)
197 user.user.get_api_data(include_secrets=include_secrets)
198 for user in repo.followers]
198 for user in repo.followers]
199
199
200 if not cache:
200 if not cache:
201 repo.update_commit_cache()
201 repo.update_commit_cache()
202 data = repo.get_api_data(include_secrets=include_secrets)
202 data = repo.get_api_data(include_secrets=include_secrets)
203 data['permissions'] = permissions
203 data['permissions'] = permissions
204 data['followers'] = following_users
204 data['followers'] = following_users
205
205
206 return data
206 return data
207
207
208
208
209 @jsonrpc_method()
209 @jsonrpc_method()
210 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
210 def get_repos(request, apiuser, root=Optional(None), traverse=Optional(True)):
211 """
211 """
212 Lists all existing repositories.
212 Lists all existing repositories.
213
213
214 This command can only be run using an |authtoken| with admin rights,
214 This command can only be run using an |authtoken| with admin rights,
215 or users with at least read rights to |repos|.
215 or users with at least read rights to |repos|.
216
216
217 :param apiuser: This is filled automatically from the |authtoken|.
217 :param apiuser: This is filled automatically from the |authtoken|.
218 :type apiuser: AuthUser
218 :type apiuser: AuthUser
219 :param root: specify root repository group to fetch repositories.
219 :param root: specify root repository group to fetch repositories.
220 filters the returned repositories to be members of given root group.
220 filters the returned repositories to be members of given root group.
221 :type root: Optional(None)
221 :type root: Optional(None)
222 :param traverse: traverse given root into subrepositories. With this flag
222 :param traverse: traverse given root into subrepositories. With this flag
223 set to False, it will only return top-level repositories from `root`.
223 set to False, it will only return top-level repositories from `root`.
224 if root is empty it will return just top-level repositories.
224 if root is empty it will return just top-level repositories.
225 :type traverse: Optional(True)
225 :type traverse: Optional(True)
226
226
227
227
228 Example output:
228 Example output:
229
229
230 .. code-block:: bash
230 .. code-block:: bash
231
231
232 id : <id_given_in_input>
232 id : <id_given_in_input>
233 result: [
233 result: [
234 {
234 {
235 "repo_id" : "<repo_id>",
235 "repo_id" : "<repo_id>",
236 "repo_name" : "<reponame>"
236 "repo_name" : "<reponame>"
237 "repo_type" : "<repo_type>",
237 "repo_type" : "<repo_type>",
238 "clone_uri" : "<clone_uri>",
238 "clone_uri" : "<clone_uri>",
239 "private": : "<bool>",
239 "private": : "<bool>",
240 "created_on" : "<datetimecreated>",
240 "created_on" : "<datetimecreated>",
241 "description" : "<description>",
241 "description" : "<description>",
242 "landing_rev": "<landing_rev>",
242 "landing_rev": "<landing_rev>",
243 "owner": "<repo_owner>",
243 "owner": "<repo_owner>",
244 "fork_of": "<name_of_fork_parent>",
244 "fork_of": "<name_of_fork_parent>",
245 "enable_downloads": "<bool>",
245 "enable_downloads": "<bool>",
246 "enable_locking": "<bool>",
246 "enable_locking": "<bool>",
247 "enable_statistics": "<bool>",
247 "enable_statistics": "<bool>",
248 },
248 },
249 ...
249 ...
250 ]
250 ]
251 error: null
251 error: null
252 """
252 """
253
253
254 include_secrets = has_superadmin_permission(apiuser)
254 include_secrets = has_superadmin_permission(apiuser)
255 _perms = ('repository.read', 'repository.write', 'repository.admin',)
255 _perms = ('repository.read', 'repository.write', 'repository.admin',)
256 extras = {'user': apiuser}
256 extras = {'user': apiuser}
257
257
258 root = Optional.extract(root)
258 root = Optional.extract(root)
259 traverse = Optional.extract(traverse, binary=True)
259 traverse = Optional.extract(traverse, binary=True)
260
260
261 if root:
261 if root:
262 # verify parent existance, if it's empty return an error
262 # verify parent existance, if it's empty return an error
263 parent = RepoGroup.get_by_group_name(root)
263 parent = RepoGroup.get_by_group_name(root)
264 if not parent:
264 if not parent:
265 raise JSONRPCError(
265 raise JSONRPCError(
266 f'Root repository group `{root}` does not exist')
266 f'Root repository group `{root}` does not exist')
267
267
268 if traverse:
268 if traverse:
269 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
269 repos = RepoModel().get_repos_for_root(root=root, traverse=traverse)
270 else:
270 else:
271 repos = RepoModel().get_repos_for_root(root=parent)
271 repos = RepoModel().get_repos_for_root(root=parent)
272 else:
272 else:
273 if traverse:
273 if traverse:
274 repos = RepoModel().get_all()
274 repos = RepoModel().get_all()
275 else:
275 else:
276 # return just top-level
276 # return just top-level
277 repos = RepoModel().get_repos_for_root(root=None)
277 repos = RepoModel().get_repos_for_root(root=None)
278
278
279 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
279 repo_list = RepoList(repos, perm_set=_perms, extra_kwargs=extras)
280 return [repo.get_api_data(include_secrets=include_secrets)
280 return [repo.get_api_data(include_secrets=include_secrets)
281 for repo in repo_list]
281 for repo in repo_list]
282
282
283
283
284 @jsonrpc_method()
284 @jsonrpc_method()
285 def get_repo_changeset(request, apiuser, repoid, revision,
285 def get_repo_changeset(request, apiuser, repoid, revision,
286 details=Optional('basic')):
286 details=Optional('basic')):
287 """
287 """
288 Returns information about a changeset.
288 Returns information about a changeset.
289
289
290 Additionally parameters define the amount of details returned by
290 Additionally parameters define the amount of details returned by
291 this function.
291 this function.
292
292
293 This command can only be run using an |authtoken| with admin rights,
293 This command can only be run using an |authtoken| with admin rights,
294 or users with at least read rights to the |repo|.
294 or users with at least read rights to the |repo|.
295
295
296 :param apiuser: This is filled automatically from the |authtoken|.
296 :param apiuser: This is filled automatically from the |authtoken|.
297 :type apiuser: AuthUser
297 :type apiuser: AuthUser
298 :param repoid: The repository name or repository id
298 :param repoid: The repository name or repository id
299 :type repoid: str or int
299 :type repoid: str or int
300 :param revision: revision for which listing should be done
300 :param revision: revision for which listing should be done
301 :type revision: str
301 :type revision: str
302 :param details: details can be 'basic|extended|full' full gives diff
302 :param details: details can be 'basic|extended|full' full gives diff
303 info details like the diff itself, and number of changed files etc.
303 info details like the diff itself, and number of changed files etc.
304 :type details: Optional(str)
304 :type details: Optional(str)
305
305
306 """
306 """
307 repo = get_repo_or_error(repoid)
307 repo = get_repo_or_error(repoid)
308 if not has_superadmin_permission(apiuser):
308 if not has_superadmin_permission(apiuser):
309 _perms = ('repository.admin', 'repository.write', 'repository.read',)
309 _perms = ('repository.admin', 'repository.write', 'repository.read',)
310 validate_repo_permissions(apiuser, repoid, repo, _perms)
310 validate_repo_permissions(apiuser, repoid, repo, _perms)
311
311
312 changes_details = Optional.extract(details)
312 changes_details = Optional.extract(details)
313 _changes_details_types = ['basic', 'extended', 'full']
313 _changes_details_types = ['basic', 'extended', 'full']
314 if changes_details not in _changes_details_types:
314 if changes_details not in _changes_details_types:
315 raise JSONRPCError(
315 raise JSONRPCError(
316 'ret_type must be one of %s' % (
316 'ret_type must be one of %s' % (
317 ','.join(_changes_details_types)))
317 ','.join(_changes_details_types)))
318
318
319 vcs_repo = repo.scm_instance()
319 vcs_repo = repo.scm_instance()
320 pre_load = ['author', 'branch', 'date', 'message', 'parents',
320 pre_load = ['author', 'branch', 'date', 'message', 'parents',
321 'status', '_commit', '_file_paths']
321 'status', '_commit', '_file_paths']
322
322
323 try:
323 try:
324 commit = repo.get_commit(commit_id=revision, pre_load=pre_load)
324 commit = repo.get_commit(commit_id=revision, pre_load=pre_load)
325 except TypeError as e:
325 except TypeError as e:
326 raise JSONRPCError(safe_str(e))
326 raise JSONRPCError(safe_str(e))
327 _cs_json = commit.__json__()
327 _cs_json = commit.__json__()
328 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
328 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
329 if changes_details == 'full':
329 if changes_details == 'full':
330 _cs_json['refs'] = commit._get_refs()
330 _cs_json['refs'] = commit._get_refs()
331 return _cs_json
331 return _cs_json
332
332
333
333
334 @jsonrpc_method()
334 @jsonrpc_method()
335 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
335 def get_repo_changesets(request, apiuser, repoid, start_rev, limit,
336 details=Optional('basic')):
336 details=Optional('basic')):
337 """
337 """
338 Returns a set of commits limited by the number starting
338 Returns a set of commits limited by the number starting
339 from the `start_rev` option.
339 from the `start_rev` option.
340
340
341 Additional parameters define the amount of details returned by this
341 Additional parameters define the amount of details returned by this
342 function.
342 function.
343
343
344 This command can only be run using an |authtoken| with admin rights,
344 This command can only be run using an |authtoken| with admin rights,
345 or users with at least read rights to |repos|.
345 or users with at least read rights to |repos|.
346
346
347 :param apiuser: This is filled automatically from the |authtoken|.
347 :param apiuser: This is filled automatically from the |authtoken|.
348 :type apiuser: AuthUser
348 :type apiuser: AuthUser
349 :param repoid: The repository name or repository ID.
349 :param repoid: The repository name or repository ID.
350 :type repoid: str or int
350 :type repoid: str or int
351 :param start_rev: The starting revision from where to get changesets.
351 :param start_rev: The starting revision from where to get changesets.
352 :type start_rev: str
352 :type start_rev: str
353 :param limit: Limit the number of commits to this amount
353 :param limit: Limit the number of commits to this amount
354 :type limit: str or int
354 :type limit: str or int
355 :param details: Set the level of detail returned. Valid option are:
355 :param details: Set the level of detail returned. Valid option are:
356 ``basic``, ``extended`` and ``full``.
356 ``basic``, ``extended`` and ``full``.
357 :type details: Optional(str)
357 :type details: Optional(str)
358
358
359 .. note::
359 .. note::
360
360
361 Setting the parameter `details` to the value ``full`` is extensive
361 Setting the parameter `details` to the value ``full`` is extensive
362 and returns details like the diff itself, and the number
362 and returns details like the diff itself, and the number
363 of changed files.
363 of changed files.
364
364
365 """
365 """
366 repo = get_repo_or_error(repoid)
366 repo = get_repo_or_error(repoid)
367 if not has_superadmin_permission(apiuser):
367 if not has_superadmin_permission(apiuser):
368 _perms = ('repository.admin', 'repository.write', 'repository.read',)
368 _perms = ('repository.admin', 'repository.write', 'repository.read',)
369 validate_repo_permissions(apiuser, repoid, repo, _perms)
369 validate_repo_permissions(apiuser, repoid, repo, _perms)
370
370
371 changes_details = Optional.extract(details)
371 changes_details = Optional.extract(details)
372 _changes_details_types = ['basic', 'extended', 'full']
372 _changes_details_types = ['basic', 'extended', 'full']
373 if changes_details not in _changes_details_types:
373 if changes_details not in _changes_details_types:
374 raise JSONRPCError(
374 raise JSONRPCError(
375 'ret_type must be one of %s' % (
375 'ret_type must be one of %s' % (
376 ','.join(_changes_details_types)))
376 ','.join(_changes_details_types)))
377
377
378 limit = int(limit)
378 limit = int(limit)
379 pre_load = ['author', 'branch', 'date', 'message', 'parents',
379 pre_load = ['author', 'branch', 'date', 'message', 'parents',
380 'status', '_commit', '_file_paths']
380 'status', '_commit', '_file_paths']
381
381
382 vcs_repo = repo.scm_instance()
382 vcs_repo = repo.scm_instance()
383 # SVN needs a special case to distinguish its index and commit id
383 # SVN needs a special case to distinguish its index and commit id
384 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
384 if vcs_repo and vcs_repo.alias == 'svn' and (start_rev == '0'):
385 start_rev = vcs_repo.commit_ids[0]
385 start_rev = vcs_repo.commit_ids[0]
386
386
387 try:
387 try:
388 commits = vcs_repo.get_commits(
388 commits = vcs_repo.get_commits(
389 start_id=start_rev, pre_load=pre_load, translate_tags=False)
389 start_id=start_rev, pre_load=pre_load, translate_tags=False)
390 except TypeError as e:
390 except TypeError as e:
391 raise JSONRPCError(safe_str(e))
391 raise JSONRPCError(safe_str(e))
392 except Exception:
392 except Exception:
393 log.exception('Fetching of commits failed')
393 log.exception('Fetching of commits failed')
394 raise JSONRPCError('Error occurred during commit fetching')
394 raise JSONRPCError('Error occurred during commit fetching')
395
395
396 ret = []
396 ret = []
397 for cnt, commit in enumerate(commits):
397 for cnt, commit in enumerate(commits):
398 if cnt >= limit != -1:
398 if cnt >= limit != -1:
399 break
399 break
400 _cs_json = commit.__json__()
400 _cs_json = commit.__json__()
401 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
401 _cs_json['diff'] = build_commit_data(vcs_repo, commit, changes_details)
402 if changes_details == 'full':
402 if changes_details == 'full':
403 _cs_json['refs'] = {
403 _cs_json['refs'] = {
404 'branches': [commit.branch],
404 'branches': [commit.branch],
405 'bookmarks': getattr(commit, 'bookmarks', []),
405 'bookmarks': getattr(commit, 'bookmarks', []),
406 'tags': commit.tags
406 'tags': commit.tags
407 }
407 }
408 ret.append(_cs_json)
408 ret.append(_cs_json)
409 return ret
409 return ret
410
410
411
411
412 @jsonrpc_method()
412 @jsonrpc_method()
413 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
413 def get_repo_nodes(request, apiuser, repoid, revision, root_path,
414 ret_type=Optional('all'), details=Optional('basic'),
414 ret_type=Optional('all'), details=Optional('basic'),
415 max_file_bytes=Optional(None)):
415 max_file_bytes=Optional(None)):
416 """
416 """
417 Returns a list of nodes and children in a flat list for a given
417 Returns a list of nodes and children in a flat list for a given
418 path at given revision.
418 path at given revision.
419
419
420 It's possible to specify ret_type to show only `files` or `dirs`.
420 It's possible to specify ret_type to show only `files` or `dirs`.
421
421
422 This command can only be run using an |authtoken| with admin rights,
422 This command can only be run using an |authtoken| with admin rights,
423 or users with at least read rights to |repos|.
423 or users with at least read rights to |repos|.
424
424
425 :param apiuser: This is filled automatically from the |authtoken|.
425 :param apiuser: This is filled automatically from the |authtoken|.
426 :type apiuser: AuthUser
426 :type apiuser: AuthUser
427 :param repoid: The repository name or repository ID.
427 :param repoid: The repository name or repository ID.
428 :type repoid: str or int
428 :type repoid: str or int
429 :param revision: The revision for which listing should be done.
429 :param revision: The revision for which listing should be done.
430 :type revision: str
430 :type revision: str
431 :param root_path: The path from which to start displaying.
431 :param root_path: The path from which to start displaying.
432 :type root_path: str
432 :type root_path: str
433 :param ret_type: Set the return type. Valid options are
433 :param ret_type: Set the return type. Valid options are
434 ``all`` (default), ``files`` and ``dirs``.
434 ``all`` (default), ``files`` and ``dirs``.
435 :type ret_type: Optional(str)
435 :type ret_type: Optional(str)
436 :param details: Returns extended information about nodes, such as
436 :param details: Returns extended information about nodes, such as
437 md5, binary, and or content.
437 md5, binary, and or content.
438 The valid options are ``basic`` and ``full``.
438 The valid options are ``basic`` and ``full``.
439 :type details: Optional(str)
439 :type details: Optional(str)
440 :param max_file_bytes: Only return file content under this file size bytes
440 :param max_file_bytes: Only return file content under this file size bytes
441 :type details: Optional(int)
441 :type details: Optional(int)
442
442
443 Example output:
443 Example output:
444
444
445 .. code-block:: bash
445 .. code-block:: bash
446
446
447 id : <id_given_in_input>
447 id : <id_given_in_input>
448 result: [
448 result: [
449 {
449 {
450 "binary": false,
450 "binary": false,
451 "content": "File line",
451 "content": "File line",
452 "extension": "md",
452 "extension": "md",
453 "lines": 2,
453 "lines": 2,
454 "md5": "059fa5d29b19c0657e384749480f6422",
454 "md5": "059fa5d29b19c0657e384749480f6422",
455 "mimetype": "text/x-minidsrc",
455 "mimetype": "text/x-minidsrc",
456 "name": "file.md",
456 "name": "file.md",
457 "size": 580,
457 "size": 580,
458 "type": "file"
458 "type": "file"
459 },
459 },
460 ...
460 ...
461 ]
461 ]
462 error: null
462 error: null
463 """
463 """
464
464
465 repo = get_repo_or_error(repoid)
465 repo = get_repo_or_error(repoid)
466 if not has_superadmin_permission(apiuser):
466 if not has_superadmin_permission(apiuser):
467 _perms = ('repository.admin', 'repository.write', 'repository.read',)
467 _perms = ('repository.admin', 'repository.write', 'repository.read',)
468 validate_repo_permissions(apiuser, repoid, repo, _perms)
468 validate_repo_permissions(apiuser, repoid, repo, _perms)
469
469
470 ret_type = Optional.extract(ret_type)
470 ret_type = Optional.extract(ret_type)
471 details = Optional.extract(details)
471 details = Optional.extract(details)
472 max_file_bytes = Optional.extract(max_file_bytes)
472 max_file_bytes = Optional.extract(max_file_bytes)
473
473
474 _extended_types = ['basic', 'full']
474 _extended_types = ['basic', 'full']
475 if details not in _extended_types:
475 if details not in _extended_types:
476 ret_types = ','.join(_extended_types)
476 ret_types = ','.join(_extended_types)
477 raise JSONRPCError(f'ret_type must be one of {ret_types}')
477 raise JSONRPCError(f'ret_type must be one of {ret_types}')
478
478
479 extended_info = False
479 extended_info = False
480 content = False
480 content = False
481 if details == 'basic':
481 if details == 'basic':
482 extended_info = True
482 extended_info = True
483
483
484 if details == 'full':
484 if details == 'full':
485 extended_info = content = True
485 extended_info = content = True
486
486
487 _map = {}
487 _map = {}
488 try:
488 try:
489 # check if repo is not empty by any chance, skip quicker if it is.
489 # check if repo is not empty by any chance, skip quicker if it is.
490 _scm = repo.scm_instance()
490 _scm = repo.scm_instance()
491 if _scm.is_empty():
491 if _scm.is_empty():
492 return []
492 return []
493
493
494 _d, _f = ScmModel().get_nodes(
494 _d, _f = ScmModel().get_nodes(
495 repo, revision, root_path, flat=False,
495 repo, revision, root_path, flat=False,
496 extended_info=extended_info, content=content,
496 extended_info=extended_info, content=content,
497 max_file_bytes=max_file_bytes)
497 max_file_bytes=max_file_bytes)
498
498
499 _map = {
499 _map = {
500 'all': _d + _f,
500 'all': _d + _f,
501 'files': _f,
501 'files': _f,
502 'dirs': _d,
502 'dirs': _d,
503 }
503 }
504
504
505 return _map[ret_type]
505 return _map[ret_type]
506 except KeyError:
506 except KeyError:
507 keys = ','.join(sorted(_map.keys()))
507 keys = ','.join(sorted(_map.keys()))
508 raise JSONRPCError(f'ret_type must be one of {keys}')
508 raise JSONRPCError(f'ret_type must be one of {keys}')
509 except Exception:
509 except Exception:
510 log.exception("Exception occurred while trying to get repo nodes")
510 log.exception("Exception occurred while trying to get repo nodes")
511 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
511 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
512
512
513
513
514 @jsonrpc_method()
514 @jsonrpc_method()
515 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
515 def get_repo_file(request, apiuser, repoid, commit_id, file_path,
516 max_file_bytes=Optional(0), details=Optional('basic'),
516 max_file_bytes=Optional(0), details=Optional('basic'),
517 cache=Optional(True)):
517 cache=Optional(True)):
518 """
518 """
519 Returns a single file from repository at given revision.
519 Returns a single file from repository at given revision.
520
520
521 This command can only be run using an |authtoken| with admin rights,
521 This command can only be run using an |authtoken| with admin rights,
522 or users with at least read rights to |repos|.
522 or users with at least read rights to |repos|.
523
523
524 :param apiuser: This is filled automatically from the |authtoken|.
524 :param apiuser: This is filled automatically from the |authtoken|.
525 :type apiuser: AuthUser
525 :type apiuser: AuthUser
526 :param repoid: The repository name or repository ID.
526 :param repoid: The repository name or repository ID.
527 :type repoid: str or int
527 :type repoid: str or int
528 :param commit_id: The revision for which listing should be done.
528 :param commit_id: The revision for which listing should be done.
529 :type commit_id: str
529 :type commit_id: str
530 :param file_path: The path from which to start displaying.
530 :param file_path: The path from which to start displaying.
531 :type file_path: str
531 :type file_path: str
532 :param details: Returns different set of information about nodes.
532 :param details: Returns different set of information about nodes.
533 The valid options are ``minimal`` ``basic`` and ``full``.
533 The valid options are ``minimal`` ``basic`` and ``full``.
534 :type details: Optional(str)
534 :type details: Optional(str)
535 :param max_file_bytes: Only return file content under this file size bytes
535 :param max_file_bytes: Only return file content under this file size bytes
536 :type max_file_bytes: Optional(int)
536 :type max_file_bytes: Optional(int)
537 :param cache: Use internal caches for fetching files. If disabled fetching
537 :param cache: Use internal caches for fetching files. If disabled fetching
538 files is slower but more memory efficient
538 files is slower but more memory efficient
539 :type cache: Optional(bool)
539 :type cache: Optional(bool)
540
540
541 Example output:
541 Example output:
542
542
543 .. code-block:: bash
543 .. code-block:: bash
544
544
545 id : <id_given_in_input>
545 id : <id_given_in_input>
546 result: {
546 result: {
547 "binary": false,
547 "binary": false,
548 "extension": "py",
548 "extension": "py",
549 "lines": 35,
549 "lines": 35,
550 "content": "....",
550 "content": "....",
551 "md5": "76318336366b0f17ee249e11b0c99c41",
551 "md5": "76318336366b0f17ee249e11b0c99c41",
552 "mimetype": "text/x-python",
552 "mimetype": "text/x-python",
553 "name": "python.py",
553 "name": "python.py",
554 "size": 817,
554 "size": 817,
555 "type": "file",
555 "type": "file",
556 }
556 }
557 error: null
557 error: null
558 """
558 """
559
559
560 repo = get_repo_or_error(repoid)
560 repo = get_repo_or_error(repoid)
561 if not has_superadmin_permission(apiuser):
561 if not has_superadmin_permission(apiuser):
562 _perms = ('repository.admin', 'repository.write', 'repository.read',)
562 _perms = ('repository.admin', 'repository.write', 'repository.read',)
563 validate_repo_permissions(apiuser, repoid, repo, _perms)
563 validate_repo_permissions(apiuser, repoid, repo, _perms)
564
564
565 cache = Optional.extract(cache, binary=True)
565 cache = Optional.extract(cache, binary=True)
566 details = Optional.extract(details)
566 details = Optional.extract(details)
567 max_file_bytes = Optional.extract(max_file_bytes)
567 max_file_bytes = Optional.extract(max_file_bytes)
568
568
569 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
569 _extended_types = ['minimal', 'minimal+search', 'basic', 'full']
570 if details not in _extended_types:
570 if details not in _extended_types:
571 ret_types = ','.join(_extended_types)
571 ret_types = ','.join(_extended_types)
572 raise JSONRPCError(f'ret_type must be one of %s, got {ret_types}', details)
572 raise JSONRPCError(f'ret_type must be one of %s, got {ret_types}', details)
573 extended_info = False
573 extended_info = False
574 content = False
574 content = False
575
575
576 if details == 'minimal':
576 if details == 'minimal':
577 extended_info = False
577 extended_info = False
578
578
579 elif details == 'basic':
579 elif details == 'basic':
580 extended_info = True
580 extended_info = True
581
581
582 elif details == 'full':
582 elif details == 'full':
583 extended_info = content = True
583 extended_info = content = True
584
584
585 file_path = safe_str(file_path)
585 file_path = safe_str(file_path)
586 try:
586 try:
587 # check if repo is not empty by any chance, skip quicker if it is.
587 # check if repo is not empty by any chance, skip quicker if it is.
588 _scm = repo.scm_instance()
588 _scm = repo.scm_instance()
589 if _scm.is_empty():
589 if _scm.is_empty():
590 return None
590 return None
591
591
592 node = ScmModel().get_node(
592 node = ScmModel().get_node(
593 repo, commit_id, file_path, extended_info=extended_info,
593 repo, commit_id, file_path, extended_info=extended_info,
594 content=content, max_file_bytes=max_file_bytes, cache=cache)
594 content=content, max_file_bytes=max_file_bytes, cache=cache)
595
595
596 except NodeDoesNotExistError:
596 except NodeDoesNotExistError:
597 raise JSONRPCError(
597 raise JSONRPCError(
598 f'There is no file in repo: `{repo.repo_name}` at path `{file_path}` for commit: `{commit_id}`')
598 f'There is no file in repo: `{repo.repo_name}` at path `{file_path}` for commit: `{commit_id}`')
599 except Exception:
599 except Exception:
600 log.exception("Exception occurred while trying to get repo %s file",
600 log.exception("Exception occurred while trying to get repo %s file",
601 repo.repo_name)
601 repo.repo_name)
602 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` file at path {file_path}')
602 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` file at path {file_path}')
603
603
604 return node
604 return node
605
605
606
606
607 @jsonrpc_method()
607 @jsonrpc_method()
608 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
608 def get_repo_fts_tree(request, apiuser, repoid, commit_id, root_path):
609 """
609 """
610 Returns a list of tree nodes for path at given revision. This api is built
610 Returns a list of tree nodes for path at given revision. This api is built
611 strictly for usage in full text search building, and shouldn't be consumed
611 strictly for usage in full text search building, and shouldn't be consumed
612
612
613 This command can only be run using an |authtoken| with admin rights,
613 This command can only be run using an |authtoken| with admin rights,
614 or users with at least read rights to |repos|.
614 or users with at least read rights to |repos|.
615
615
616 """
616 """
617
617
618 repo = get_repo_or_error(repoid)
618 repo = get_repo_or_error(repoid)
619 if not has_superadmin_permission(apiuser):
619 if not has_superadmin_permission(apiuser):
620 _perms = ('repository.admin', 'repository.write', 'repository.read',)
620 _perms = ('repository.admin', 'repository.write', 'repository.read',)
621 validate_repo_permissions(apiuser, repoid, repo, _perms)
621 validate_repo_permissions(apiuser, repoid, repo, _perms)
622
622
623 repo_id = repo.repo_id
623 repo_id = repo.repo_id
624 cache_seconds = rhodecode.ConfigGet().get_int('rc_cache.cache_repo.expiration_time')
624 cache_seconds = rhodecode.ConfigGet().get_int('rc_cache.cache_repo.expiration_time')
625 cache_on = cache_seconds > 0
625 cache_on = cache_seconds > 0
626
626
627 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
627 cache_namespace_uid = f'repo.{rc_cache.FILE_TREE_CACHE_VER}.{repo_id}'
628 rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
628 rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
629
629
630 def compute_fts_tree(repo_id, commit_id, root_path):
630 def compute_fts_tree(repo_id, commit_id, root_path):
631 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
631 return ScmModel().get_fts_data(repo_id, commit_id, root_path)
632
632
633 try:
633 try:
634 # check if repo is not empty by any chance, skip quicker if it is.
634 # check if repo is not empty by any chance, skip quicker if it is.
635 _scm = repo.scm_instance()
635 _scm = repo.scm_instance()
636 if not _scm or _scm.is_empty():
636 if not _scm or _scm.is_empty():
637 return []
637 return []
638 except RepositoryError:
638 except RepositoryError:
639 log.exception("Exception occurred while trying to get repo nodes")
639 log.exception("Exception occurred while trying to get repo nodes")
640 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
640 raise JSONRPCError(f'failed to get repo: `{repo.repo_name}` nodes')
641
641
642 try:
642 try:
643 # we need to resolve commit_id to a FULL sha for cache to work correctly.
643 # we need to resolve commit_id to a FULL sha for cache to work correctly.
644 # sending 'master' is a pointer that needs to be translated to current commit.
644 # sending 'master' is a pointer that needs to be translated to current commit.
645 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
645 commit_id = _scm.get_commit(commit_id=commit_id).raw_id
646 log.debug(
646 log.debug(
647 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
647 'Computing FTS REPO TREE for repo_id %s commit_id `%s` '
648 'with caching: %s[TTL: %ss]' % (
648 'with caching: %s[TTL: %ss]' % (
649 repo_id, commit_id, cache_on, cache_seconds or 0))
649 repo_id, commit_id, cache_on, cache_seconds or 0))
650
650
651 tree_files = compute_fts_tree(repo_id, commit_id, root_path)
651 tree_files = compute_fts_tree(repo_id, commit_id, root_path)
652
652
653 return tree_files
653 return tree_files
654
654
655 except Exception:
655 except Exception:
656 log.exception("Exception occurred while trying to get repo nodes")
656 log.exception("Exception occurred while trying to get repo nodes")
657 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
657 raise JSONRPCError('failed to get repo: `%s` nodes' % repo.repo_name)
658
658
659
659
660 @jsonrpc_method()
660 @jsonrpc_method()
661 def get_repo_refs(request, apiuser, repoid):
661 def get_repo_refs(request, apiuser, repoid):
662 """
662 """
663 Returns a dictionary of current references. It returns
663 Returns a dictionary of current references. It returns
664 bookmarks, branches, closed_branches, and tags for given repository
664 bookmarks, branches, closed_branches, and tags for given repository
665
665
666 It's possible to specify ret_type to show only `files` or `dirs`.
666 It's possible to specify ret_type to show only `files` or `dirs`.
667
667
668 This command can only be run using an |authtoken| with admin rights,
668 This command can only be run using an |authtoken| with admin rights,
669 or users with at least read rights to |repos|.
669 or users with at least read rights to |repos|.
670
670
671 :param apiuser: This is filled automatically from the |authtoken|.
671 :param apiuser: This is filled automatically from the |authtoken|.
672 :type apiuser: AuthUser
672 :type apiuser: AuthUser
673 :param repoid: The repository name or repository ID.
673 :param repoid: The repository name or repository ID.
674 :type repoid: str or int
674 :type repoid: str or int
675
675
676 Example output:
676 Example output:
677
677
678 .. code-block:: bash
678 .. code-block:: bash
679
679
680 id : <id_given_in_input>
680 id : <id_given_in_input>
681 "result": {
681 "result": {
682 "bookmarks": {
682 "bookmarks": {
683 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
683 "dev": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
684 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
684 "master": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
685 },
685 },
686 "branches": {
686 "branches": {
687 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
687 "default": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
688 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
688 "stable": "367f590445081d8ec8c2ea0456e73ae1f1c3d6cf"
689 },
689 },
690 "branches_closed": {},
690 "branches_closed": {},
691 "tags": {
691 "tags": {
692 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
692 "tip": "5611d30200f4040ba2ab4f3d64e5b06408a02188",
693 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
693 "v4.4.0": "1232313f9e6adac5ce5399c2a891dc1e72b79022",
694 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
694 "v4.4.1": "cbb9f1d329ae5768379cdec55a62ebdd546c4e27",
695 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
695 "v4.4.2": "24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17",
696 }
696 }
697 }
697 }
698 error: null
698 error: null
699 """
699 """
700
700
701 repo = get_repo_or_error(repoid)
701 repo = get_repo_or_error(repoid)
702 if not has_superadmin_permission(apiuser):
702 if not has_superadmin_permission(apiuser):
703 _perms = ('repository.admin', 'repository.write', 'repository.read',)
703 _perms = ('repository.admin', 'repository.write', 'repository.read',)
704 validate_repo_permissions(apiuser, repoid, repo, _perms)
704 validate_repo_permissions(apiuser, repoid, repo, _perms)
705
705
706 try:
706 try:
707 # check if repo is not empty by any chance, skip quicker if it is.
707 # check if repo is not empty by any chance, skip quicker if it is.
708 vcs_instance = repo.scm_instance()
708 vcs_instance = repo.scm_instance()
709 refs = vcs_instance.refs()
709 refs = vcs_instance.refs()
710 return refs
710 return refs
711 except Exception:
711 except Exception:
712 log.exception("Exception occurred while trying to get repo refs")
712 log.exception("Exception occurred while trying to get repo refs")
713 raise JSONRPCError(
713 raise JSONRPCError(
714 'failed to get repo: `%s` references' % repo.repo_name
714 'failed to get repo: `%s` references' % repo.repo_name
715 )
715 )
716
716
717
717
718 @jsonrpc_method()
718 @jsonrpc_method()
719 def create_repo(
719 def create_repo(
720 request, apiuser, repo_name, repo_type,
720 request, apiuser, repo_name, repo_type,
721 owner=Optional(OAttr('apiuser')),
721 owner=Optional(OAttr('apiuser')),
722 description=Optional(''),
722 description=Optional(''),
723 private=Optional(False),
723 private=Optional(False),
724 clone_uri=Optional(None),
724 clone_uri=Optional(None),
725 push_uri=Optional(None),
725 push_uri=Optional(None),
726 landing_rev=Optional(None),
726 landing_rev=Optional(None),
727 enable_statistics=Optional(False),
727 enable_statistics=Optional(False),
728 enable_locking=Optional(False),
728 enable_locking=Optional(False),
729 enable_downloads=Optional(False),
729 enable_downloads=Optional(False),
730 copy_permissions=Optional(False)):
730 copy_permissions=Optional(False)):
731 """
731 """
732 Creates a repository.
732 Creates a repository.
733
733
734 * If the repository name contains "/", repository will be created inside
734 * If the repository name contains "/", repository will be created inside
735 a repository group or nested repository groups
735 a repository group or nested repository groups
736
736
737 For example "foo/bar/repo1" will create |repo| called "repo1" inside
737 For example "foo/bar/repo1" will create |repo| called "repo1" inside
738 group "foo/bar". You have to have permissions to access and write to
738 group "foo/bar". You have to have permissions to access and write to
739 the last repository group ("bar" in this example)
739 the last repository group ("bar" in this example)
740
740
741 This command can only be run using an |authtoken| with at least
741 This command can only be run using an |authtoken| with at least
742 permissions to create repositories, or write permissions to
742 permissions to create repositories, or write permissions to
743 parent repository groups.
743 parent repository groups.
744
744
745 :param apiuser: This is filled automatically from the |authtoken|.
745 :param apiuser: This is filled automatically from the |authtoken|.
746 :type apiuser: AuthUser
746 :type apiuser: AuthUser
747 :param repo_name: Set the repository name.
747 :param repo_name: Set the repository name.
748 :type repo_name: str
748 :type repo_name: str
749 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
749 :param repo_type: Set the repository type; 'hg','git', or 'svn'.
750 :type repo_type: str
750 :type repo_type: str
751 :param owner: user_id or username
751 :param owner: user_id or username
752 :type owner: Optional(str)
752 :type owner: Optional(str)
753 :param description: Set the repository description.
753 :param description: Set the repository description.
754 :type description: Optional(str)
754 :type description: Optional(str)
755 :param private: set repository as private
755 :param private: set repository as private
756 :type private: bool
756 :type private: bool
757 :param clone_uri: set clone_uri
757 :param clone_uri: set clone_uri
758 :type clone_uri: str
758 :type clone_uri: str
759 :param push_uri: set push_uri
759 :param push_uri: set push_uri
760 :type push_uri: str
760 :type push_uri: str
761 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
761 :param landing_rev: <rev_type>:<rev>, e.g branch:default, book:dev, rev:abcd
762 :type landing_rev: str
762 :type landing_rev: str
763 :param enable_locking:
763 :param enable_locking:
764 :type enable_locking: bool
764 :type enable_locking: bool
765 :param enable_downloads:
765 :param enable_downloads:
766 :type enable_downloads: bool
766 :type enable_downloads: bool
767 :param enable_statistics:
767 :param enable_statistics:
768 :type enable_statistics: bool
768 :type enable_statistics: bool
769 :param copy_permissions: Copy permission from group in which the
769 :param copy_permissions: Copy permission from group in which the
770 repository is being created.
770 repository is being created.
771 :type copy_permissions: bool
771 :type copy_permissions: bool
772
772
773
773
774 Example output:
774 Example output:
775
775
776 .. code-block:: bash
776 .. code-block:: bash
777
777
778 id : <id_given_in_input>
778 id : <id_given_in_input>
779 result: {
779 result: {
780 "msg": "Created new repository `<reponame>`",
780 "msg": "Created new repository `<reponame>`",
781 "success": true,
781 "success": true,
782 "task": "<celery task id or None if done sync>"
782 "task": "<celery task id or None if done sync>"
783 }
783 }
784 error: null
784 error: null
785
785
786
786
787 Example error output:
787 Example error output:
788
788
789 .. code-block:: bash
789 .. code-block:: bash
790
790
791 id : <id_given_in_input>
791 id : <id_given_in_input>
792 result : null
792 result : null
793 error : {
793 error : {
794 'failed to create repository `<repo_name>`'
794 'failed to create repository `<repo_name>`'
795 }
795 }
796
796
797 """
797 """
798
798
799 owner = validate_set_owner_permissions(apiuser, owner)
799 owner = validate_set_owner_permissions(apiuser, owner)
800
800
801 description = Optional.extract(description)
801 description = Optional.extract(description)
802 copy_permissions = Optional.extract(copy_permissions)
802 copy_permissions = Optional.extract(copy_permissions)
803 clone_uri = Optional.extract(clone_uri)
803 clone_uri = Optional.extract(clone_uri)
804 push_uri = Optional.extract(push_uri)
804 push_uri = Optional.extract(push_uri)
805
805
806 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
806 defs = SettingsModel().get_default_repo_settings(strip_prefix=True)
807 if isinstance(private, Optional):
807 if isinstance(private, Optional):
808 private = defs.get('repo_private') or Optional.extract(private)
808 private = defs.get('repo_private') or Optional.extract(private)
809 if isinstance(repo_type, Optional):
809 if isinstance(repo_type, Optional):
810 repo_type = defs.get('repo_type')
810 repo_type = defs.get('repo_type')
811 if isinstance(enable_statistics, Optional):
811 if isinstance(enable_statistics, Optional):
812 enable_statistics = defs.get('repo_enable_statistics')
812 enable_statistics = defs.get('repo_enable_statistics')
813 if isinstance(enable_locking, Optional):
813 if isinstance(enable_locking, Optional):
814 enable_locking = defs.get('repo_enable_locking')
814 enable_locking = defs.get('repo_enable_locking')
815 if isinstance(enable_downloads, Optional):
815 if isinstance(enable_downloads, Optional):
816 enable_downloads = defs.get('repo_enable_downloads')
816 enable_downloads = defs.get('repo_enable_downloads')
817
817
818 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
818 landing_ref, _label = ScmModel.backend_landing_ref(repo_type)
819 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
819 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
820 ref_choices = list(set(ref_choices + [landing_ref]))
820 ref_choices = list(set(ref_choices + [landing_ref]))
821
821
822 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
822 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
823
823
824 schema = repo_schema.RepoSchema().bind(
824 schema = repo_schema.RepoSchema().bind(
825 repo_type_options=rhodecode.BACKENDS.keys(),
825 repo_type_options=rhodecode.BACKENDS.keys(),
826 repo_ref_options=ref_choices,
826 repo_ref_options=ref_choices,
827 repo_type=repo_type,
827 repo_type=repo_type,
828 # user caller
828 # user caller
829 user=apiuser)
829 user=apiuser)
830
830
831 try:
831 try:
832 schema_data = schema.deserialize(dict(
832 schema_data = schema.deserialize(dict(
833 repo_name=repo_name,
833 repo_name=repo_name,
834 repo_type=repo_type,
834 repo_type=repo_type,
835 repo_owner=owner.username,
835 repo_owner=owner.username,
836 repo_description=description,
836 repo_description=description,
837 repo_landing_commit_ref=landing_commit_ref,
837 repo_landing_commit_ref=landing_commit_ref,
838 repo_clone_uri=clone_uri,
838 repo_clone_uri=clone_uri,
839 repo_push_uri=push_uri,
839 repo_push_uri=push_uri,
840 repo_private=private,
840 repo_private=private,
841 repo_copy_permissions=copy_permissions,
841 repo_copy_permissions=copy_permissions,
842 repo_enable_statistics=enable_statistics,
842 repo_enable_statistics=enable_statistics,
843 repo_enable_downloads=enable_downloads,
843 repo_enable_downloads=enable_downloads,
844 repo_enable_locking=enable_locking))
844 repo_enable_locking=enable_locking))
845 except validation_schema.Invalid as err:
845 except validation_schema.Invalid as err:
846 raise JSONRPCValidationError(colander_exc=err)
846 raise JSONRPCValidationError(colander_exc=err)
847
847
848 try:
848 try:
849 data = {
849 data = {
850 'owner': owner,
850 'owner': owner,
851 'repo_name': schema_data['repo_group']['repo_name_without_group'],
851 'repo_name': schema_data['repo_group']['repo_name_without_group'],
852 'repo_name_full': schema_data['repo_name'],
852 'repo_name_full': schema_data['repo_name'],
853 'repo_group': schema_data['repo_group']['repo_group_id'],
853 'repo_group': schema_data['repo_group']['repo_group_id'],
854 'repo_type': schema_data['repo_type'],
854 'repo_type': schema_data['repo_type'],
855 'repo_description': schema_data['repo_description'],
855 'repo_description': schema_data['repo_description'],
856 'repo_private': schema_data['repo_private'],
856 'repo_private': schema_data['repo_private'],
857 'clone_uri': schema_data['repo_clone_uri'],
857 'clone_uri': schema_data['repo_clone_uri'],
858 'push_uri': schema_data['repo_push_uri'],
858 'push_uri': schema_data['repo_push_uri'],
859 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
859 'repo_landing_rev': schema_data['repo_landing_commit_ref'],
860 'enable_statistics': schema_data['repo_enable_statistics'],
860 'enable_statistics': schema_data['repo_enable_statistics'],
861 'enable_locking': schema_data['repo_enable_locking'],
861 'enable_locking': schema_data['repo_enable_locking'],
862 'enable_downloads': schema_data['repo_enable_downloads'],
862 'enable_downloads': schema_data['repo_enable_downloads'],
863 'repo_copy_permissions': schema_data['repo_copy_permissions'],
863 'repo_copy_permissions': schema_data['repo_copy_permissions'],
864 }
864 }
865
865
866 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
866 task = RepoModel().create(form_data=data, cur_user=owner.user_id)
867 task_id = get_task_id(task)
867 task_id = get_task_id(task)
868 # no commit, it's done in RepoModel, or async via celery
868 # no commit, it's done in RepoModel, or async via celery
869 return {
869 return {
870 'msg': "Created new repository `{}`".format(schema_data['repo_name']),
870 'msg': "Created new repository `{}`".format(schema_data['repo_name']),
871 'success': True, # cannot return the repo data here since fork
871 'success': True, # cannot return the repo data here since fork
872 # can be done async
872 # can be done async
873 'task': task_id
873 'task': task_id
874 }
874 }
875 except Exception:
875 except Exception:
876 log.exception(
876 log.exception(
877 "Exception while trying to create the repository %s",
877 "Exception while trying to create the repository %s",
878 schema_data['repo_name'])
878 schema_data['repo_name'])
879 raise JSONRPCError(
879 raise JSONRPCError(
880 'failed to create repository `{}`'.format(schema_data['repo_name']))
880 'failed to create repository `{}`'.format(schema_data['repo_name']))
881
881
882
882
883 @jsonrpc_method()
883 @jsonrpc_method()
884 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
884 def add_field_to_repo(request, apiuser, repoid, key, label=Optional(''),
885 description=Optional('')):
885 description=Optional('')):
886 """
886 """
887 Adds an extra field to a repository.
887 Adds an extra field to a repository.
888
888
889 This command can only be run using an |authtoken| with at least
889 This command can only be run using an |authtoken| with at least
890 write permissions to the |repo|.
890 write permissions to the |repo|.
891
891
892 :param apiuser: This is filled automatically from the |authtoken|.
892 :param apiuser: This is filled automatically from the |authtoken|.
893 :type apiuser: AuthUser
893 :type apiuser: AuthUser
894 :param repoid: Set the repository name or repository id.
894 :param repoid: Set the repository name or repository id.
895 :type repoid: str or int
895 :type repoid: str or int
896 :param key: Create a unique field key for this repository.
896 :param key: Create a unique field key for this repository.
897 :type key: str
897 :type key: str
898 :param label:
898 :param label:
899 :type label: Optional(str)
899 :type label: Optional(str)
900 :param description:
900 :param description:
901 :type description: Optional(str)
901 :type description: Optional(str)
902 """
902 """
903 repo = get_repo_or_error(repoid)
903 repo = get_repo_or_error(repoid)
904 if not has_superadmin_permission(apiuser):
904 if not has_superadmin_permission(apiuser):
905 _perms = ('repository.admin',)
905 _perms = ('repository.admin',)
906 validate_repo_permissions(apiuser, repoid, repo, _perms)
906 validate_repo_permissions(apiuser, repoid, repo, _perms)
907
907
908 label = Optional.extract(label) or key
908 label = Optional.extract(label) or key
909 description = Optional.extract(description)
909 description = Optional.extract(description)
910
910
911 field = RepositoryField.get_by_key_name(key, repo)
911 field = RepositoryField.get_by_key_name(key, repo)
912 if field:
912 if field:
913 raise JSONRPCError(f'Field with key `{key}` exists for repo `{repoid}`')
913 raise JSONRPCError(f'Field with key `{key}` exists for repo `{repoid}`')
914
914
915 try:
915 try:
916 RepoModel().add_repo_field(repo, key, field_label=label,
916 RepoModel().add_repo_field(repo, key, field_label=label,
917 field_desc=description)
917 field_desc=description)
918 Session().commit()
918 Session().commit()
919 return {
919 return {
920 'msg': f"Added new repository field `{key}`",
920 'msg': f"Added new repository field `{key}`",
921 'success': True,
921 'success': True,
922 }
922 }
923 except Exception:
923 except Exception:
924 log.exception("Exception occurred while trying to add field to repo")
924 log.exception("Exception occurred while trying to add field to repo")
925 raise JSONRPCError(
925 raise JSONRPCError(
926 f'failed to create new field for repository `{repoid}`')
926 f'failed to create new field for repository `{repoid}`')
927
927
928
928
929 @jsonrpc_method()
929 @jsonrpc_method()
930 def remove_field_from_repo(request, apiuser, repoid, key):
930 def remove_field_from_repo(request, apiuser, repoid, key):
931 """
931 """
932 Removes an extra field from a repository.
932 Removes an extra field from a repository.
933
933
934 This command can only be run using an |authtoken| with at least
934 This command can only be run using an |authtoken| with at least
935 write permissions to the |repo|.
935 write permissions to the |repo|.
936
936
937 :param apiuser: This is filled automatically from the |authtoken|.
937 :param apiuser: This is filled automatically from the |authtoken|.
938 :type apiuser: AuthUser
938 :type apiuser: AuthUser
939 :param repoid: Set the repository name or repository ID.
939 :param repoid: Set the repository name or repository ID.
940 :type repoid: str or int
940 :type repoid: str or int
941 :param key: Set the unique field key for this repository.
941 :param key: Set the unique field key for this repository.
942 :type key: str
942 :type key: str
943 """
943 """
944
944
945 repo = get_repo_or_error(repoid)
945 repo = get_repo_or_error(repoid)
946 if not has_superadmin_permission(apiuser):
946 if not has_superadmin_permission(apiuser):
947 _perms = ('repository.admin',)
947 _perms = ('repository.admin',)
948 validate_repo_permissions(apiuser, repoid, repo, _perms)
948 validate_repo_permissions(apiuser, repoid, repo, _perms)
949
949
950 field = RepositoryField.get_by_key_name(key, repo)
950 field = RepositoryField.get_by_key_name(key, repo)
951 if not field:
951 if not field:
952 raise JSONRPCError('Field with key `%s` does not '
952 raise JSONRPCError('Field with key `%s` does not '
953 'exists for repo `%s`' % (key, repoid))
953 'exists for repo `%s`' % (key, repoid))
954
954
955 try:
955 try:
956 RepoModel().delete_repo_field(repo, field_key=key)
956 RepoModel().delete_repo_field(repo, field_key=key)
957 Session().commit()
957 Session().commit()
958 return {
958 return {
959 'msg': f"Deleted repository field `{key}`",
959 'msg': f"Deleted repository field `{key}`",
960 'success': True,
960 'success': True,
961 }
961 }
962 except Exception:
962 except Exception:
963 log.exception(
963 log.exception(
964 "Exception occurred while trying to delete field from repo")
964 "Exception occurred while trying to delete field from repo")
965 raise JSONRPCError(
965 raise JSONRPCError(
966 f'failed to delete field for repository `{repoid}`')
966 f'failed to delete field for repository `{repoid}`')
967
967
968
968
969 @jsonrpc_method()
969 @jsonrpc_method()
970 def update_repo(
970 def update_repo(
971 request, apiuser, repoid, repo_name=Optional(None),
971 request, apiuser, repoid, repo_name=Optional(None),
972 owner=Optional(OAttr('apiuser')), description=Optional(''),
972 owner=Optional(OAttr('apiuser')), description=Optional(''),
973 private=Optional(False),
973 private=Optional(False),
974 clone_uri=Optional(None), push_uri=Optional(None),
974 clone_uri=Optional(None), push_uri=Optional(None),
975 landing_rev=Optional(None), fork_of=Optional(None),
975 landing_rev=Optional(None), fork_of=Optional(None),
976 enable_statistics=Optional(False),
976 enable_statistics=Optional(False),
977 enable_locking=Optional(False),
977 enable_locking=Optional(False),
978 enable_downloads=Optional(False), fields=Optional('')):
978 enable_downloads=Optional(False), fields=Optional('')):
979 r"""
979 r"""
980 Updates a repository with the given information.
980 Updates a repository with the given information.
981
981
982 This command can only be run using an |authtoken| with at least
982 This command can only be run using an |authtoken| with at least
983 admin permissions to the |repo|.
983 admin permissions to the |repo|.
984
984
985 * If the repository name contains "/", repository will be updated
985 * If the repository name contains "/", repository will be updated
986 accordingly with a repository group or nested repository groups
986 accordingly with a repository group or nested repository groups
987
987
988 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
988 For example repoid=repo-test name="foo/bar/repo-test" will update |repo|
989 called "repo-test" and place it inside group "foo/bar".
989 called "repo-test" and place it inside group "foo/bar".
990 You have to have permissions to access and write to the last repository
990 You have to have permissions to access and write to the last repository
991 group ("bar" in this example)
991 group ("bar" in this example)
992
992
993 :param apiuser: This is filled automatically from the |authtoken|.
993 :param apiuser: This is filled automatically from the |authtoken|.
994 :type apiuser: AuthUser
994 :type apiuser: AuthUser
995 :param repoid: repository name or repository ID.
995 :param repoid: repository name or repository ID.
996 :type repoid: str or int
996 :type repoid: str or int
997 :param repo_name: Update the |repo| name, including the
997 :param repo_name: Update the |repo| name, including the
998 repository group it's in.
998 repository group it's in.
999 :type repo_name: str
999 :type repo_name: str
1000 :param owner: Set the |repo| owner.
1000 :param owner: Set the |repo| owner.
1001 :type owner: str
1001 :type owner: str
1002 :param fork_of: Set the |repo| as fork of another |repo|.
1002 :param fork_of: Set the |repo| as fork of another |repo|.
1003 :type fork_of: str
1003 :type fork_of: str
1004 :param description: Update the |repo| description.
1004 :param description: Update the |repo| description.
1005 :type description: str
1005 :type description: str
1006 :param private: Set the |repo| as private. (True | False)
1006 :param private: Set the |repo| as private. (True | False)
1007 :type private: bool
1007 :type private: bool
1008 :param clone_uri: Update the |repo| clone URI.
1008 :param clone_uri: Update the |repo| clone URI.
1009 :type clone_uri: str
1009 :type clone_uri: str
1010 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1010 :param landing_rev: Set the |repo| landing revision. e.g branch:default, book:dev, rev:abcd
1011 :type landing_rev: str
1011 :type landing_rev: str
1012 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1012 :param enable_statistics: Enable statistics on the |repo|, (True | False).
1013 :type enable_statistics: bool
1013 :type enable_statistics: bool
1014 :param enable_locking: Enable |repo| locking.
1014 :param enable_locking: Enable |repo| locking.
1015 :type enable_locking: bool
1015 :type enable_locking: bool
1016 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1016 :param enable_downloads: Enable downloads from the |repo|, (True | False).
1017 :type enable_downloads: bool
1017 :type enable_downloads: bool
1018 :param fields: Add extra fields to the |repo|. Use the following
1018 :param fields: Add extra fields to the |repo|. Use the following
1019 example format: ``field_key=field_val,field_key2=fieldval2``.
1019 example format: ``field_key=field_val,field_key2=fieldval2``.
1020 Escape ', ' with \,
1020 Escape ', ' with \,
1021 :type fields: str
1021 :type fields: str
1022 """
1022 """
1023
1023
1024 repo = get_repo_or_error(repoid)
1024 repo = get_repo_or_error(repoid)
1025
1025
1026 include_secrets = False
1026 include_secrets = False
1027 if not has_superadmin_permission(apiuser):
1027 if not has_superadmin_permission(apiuser):
1028 _perms = ('repository.admin',)
1028 _perms = ('repository.admin',)
1029 validate_repo_permissions(apiuser, repoid, repo, _perms)
1029 validate_repo_permissions(apiuser, repoid, repo, _perms)
1030 else:
1030 else:
1031 include_secrets = True
1031 include_secrets = True
1032
1032
1033 updates = dict(
1033 updates = dict(
1034 repo_name=repo_name
1034 repo_name=repo_name
1035 if not isinstance(repo_name, Optional) else repo.repo_name,
1035 if not isinstance(repo_name, Optional) else repo.repo_name,
1036
1036
1037 fork_id=fork_of
1037 fork_id=fork_of
1038 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1038 if not isinstance(fork_of, Optional) else repo.fork.repo_name if repo.fork else None,
1039
1039
1040 user=owner
1040 user=owner
1041 if not isinstance(owner, Optional) else repo.user.username,
1041 if not isinstance(owner, Optional) else repo.user.username,
1042
1042
1043 repo_description=description
1043 repo_description=description
1044 if not isinstance(description, Optional) else repo.description,
1044 if not isinstance(description, Optional) else repo.description,
1045
1045
1046 repo_private=private
1046 repo_private=private
1047 if not isinstance(private, Optional) else repo.private,
1047 if not isinstance(private, Optional) else repo.private,
1048
1048
1049 clone_uri=clone_uri
1049 clone_uri=clone_uri
1050 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1050 if not isinstance(clone_uri, Optional) else repo.clone_uri,
1051
1051
1052 push_uri=push_uri
1052 push_uri=push_uri
1053 if not isinstance(push_uri, Optional) else repo.push_uri,
1053 if not isinstance(push_uri, Optional) else repo.push_uri,
1054
1054
1055 repo_landing_rev=landing_rev
1055 repo_landing_rev=landing_rev
1056 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1056 if not isinstance(landing_rev, Optional) else repo._landing_revision,
1057
1057
1058 repo_enable_statistics=enable_statistics
1058 repo_enable_statistics=enable_statistics
1059 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1059 if not isinstance(enable_statistics, Optional) else repo.enable_statistics,
1060
1060
1061 repo_enable_locking=enable_locking
1061 repo_enable_locking=enable_locking
1062 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1062 if not isinstance(enable_locking, Optional) else repo.enable_locking,
1063
1063
1064 repo_enable_downloads=enable_downloads
1064 repo_enable_downloads=enable_downloads
1065 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1065 if not isinstance(enable_downloads, Optional) else repo.enable_downloads)
1066
1066
1067 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1067 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1068 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1068 ref_choices, _labels = ScmModel().get_repo_landing_revs(
1069 request.translate, repo=repo)
1069 request.translate, repo=repo)
1070 ref_choices = list(set(ref_choices + [landing_ref]))
1070 ref_choices = list(set(ref_choices + [landing_ref]))
1071
1071
1072 old_values = repo.get_api_data()
1072 old_values = repo.get_api_data()
1073 repo_type = repo.repo_type
1073 repo_type = repo.repo_type
1074 schema = repo_schema.RepoSchema().bind(
1074 schema = repo_schema.RepoSchema().bind(
1075 repo_type_options=rhodecode.BACKENDS.keys(),
1075 repo_type_options=rhodecode.BACKENDS.keys(),
1076 repo_ref_options=ref_choices,
1076 repo_ref_options=ref_choices,
1077 repo_type=repo_type,
1077 repo_type=repo_type,
1078 # user caller
1078 # user caller
1079 user=apiuser,
1079 user=apiuser,
1080 old_values=old_values)
1080 old_values=old_values)
1081 try:
1081 try:
1082 schema_data = schema.deserialize(dict(
1082 schema_data = schema.deserialize(dict(
1083 # we save old value, users cannot change type
1083 # we save old value, users cannot change type
1084 repo_type=repo_type,
1084 repo_type=repo_type,
1085
1085
1086 repo_name=updates['repo_name'],
1086 repo_name=updates['repo_name'],
1087 repo_owner=updates['user'],
1087 repo_owner=updates['user'],
1088 repo_description=updates['repo_description'],
1088 repo_description=updates['repo_description'],
1089 repo_clone_uri=updates['clone_uri'],
1089 repo_clone_uri=updates['clone_uri'],
1090 repo_push_uri=updates['push_uri'],
1090 repo_push_uri=updates['push_uri'],
1091 repo_fork_of=updates['fork_id'],
1091 repo_fork_of=updates['fork_id'],
1092 repo_private=updates['repo_private'],
1092 repo_private=updates['repo_private'],
1093 repo_landing_commit_ref=updates['repo_landing_rev'],
1093 repo_landing_commit_ref=updates['repo_landing_rev'],
1094 repo_enable_statistics=updates['repo_enable_statistics'],
1094 repo_enable_statistics=updates['repo_enable_statistics'],
1095 repo_enable_downloads=updates['repo_enable_downloads'],
1095 repo_enable_downloads=updates['repo_enable_downloads'],
1096 repo_enable_locking=updates['repo_enable_locking']))
1096 repo_enable_locking=updates['repo_enable_locking']))
1097 except validation_schema.Invalid as err:
1097 except validation_schema.Invalid as err:
1098 raise JSONRPCValidationError(colander_exc=err)
1098 raise JSONRPCValidationError(colander_exc=err)
1099
1099
1100 # save validated data back into the updates dict
1100 # save validated data back into the updates dict
1101 validated_updates = dict(
1101 validated_updates = dict(
1102 repo_name=schema_data['repo_group']['repo_name_without_group'],
1102 repo_name=schema_data['repo_group']['repo_name_without_group'],
1103 repo_group=schema_data['repo_group']['repo_group_id'],
1103 repo_group=schema_data['repo_group']['repo_group_id'],
1104
1104
1105 user=schema_data['repo_owner'],
1105 user=schema_data['repo_owner'],
1106 repo_description=schema_data['repo_description'],
1106 repo_description=schema_data['repo_description'],
1107 repo_private=schema_data['repo_private'],
1107 repo_private=schema_data['repo_private'],
1108 clone_uri=schema_data['repo_clone_uri'],
1108 clone_uri=schema_data['repo_clone_uri'],
1109 push_uri=schema_data['repo_push_uri'],
1109 push_uri=schema_data['repo_push_uri'],
1110 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1110 repo_landing_rev=schema_data['repo_landing_commit_ref'],
1111 repo_enable_statistics=schema_data['repo_enable_statistics'],
1111 repo_enable_statistics=schema_data['repo_enable_statistics'],
1112 repo_enable_locking=schema_data['repo_enable_locking'],
1112 repo_enable_locking=schema_data['repo_enable_locking'],
1113 repo_enable_downloads=schema_data['repo_enable_downloads'],
1113 repo_enable_downloads=schema_data['repo_enable_downloads'],
1114 )
1114 )
1115
1115
1116 if schema_data['repo_fork_of']:
1116 if schema_data['repo_fork_of']:
1117 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1117 fork_repo = get_repo_or_error(schema_data['repo_fork_of'])
1118 validated_updates['fork_id'] = fork_repo.repo_id
1118 validated_updates['fork_id'] = fork_repo.repo_id
1119
1119
1120 # extra fields
1120 # extra fields
1121 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1121 fields = parse_args(Optional.extract(fields), key_prefix='ex_')
1122 if fields:
1122 if fields:
1123 validated_updates.update(fields)
1123 validated_updates.update(fields)
1124
1124
1125 try:
1125 try:
1126 RepoModel().update(repo, **validated_updates)
1126 RepoModel().update(repo, **validated_updates)
1127 audit_logger.store_api(
1127 audit_logger.store_api(
1128 'repo.edit', action_data={'old_data': old_values},
1128 'repo.edit', action_data={'old_data': old_values},
1129 user=apiuser, repo=repo)
1129 user=apiuser, repo=repo)
1130 Session().commit()
1130 Session().commit()
1131 return {
1131 return {
1132 'msg': f'updated repo ID:{repo.repo_id} {repo.repo_name}',
1132 'msg': f'updated repo ID:{repo.repo_id} {repo.repo_name}',
1133 'repository': repo.get_api_data(include_secrets=include_secrets)
1133 'repository': repo.get_api_data(include_secrets=include_secrets)
1134 }
1134 }
1135 except Exception:
1135 except Exception:
1136 log.exception(
1136 log.exception(
1137 "Exception while trying to update the repository %s",
1137 "Exception while trying to update the repository %s",
1138 repoid)
1138 repoid)
1139 raise JSONRPCError('failed to update repo `%s`' % repoid)
1139 raise JSONRPCError('failed to update repo `%s`' % repoid)
1140
1140
1141
1141
1142 @jsonrpc_method()
1142 @jsonrpc_method()
1143 def fork_repo(request, apiuser, repoid, fork_name,
1143 def fork_repo(request, apiuser, repoid, fork_name,
1144 owner=Optional(OAttr('apiuser')),
1144 owner=Optional(OAttr('apiuser')),
1145 description=Optional(''),
1145 description=Optional(''),
1146 private=Optional(False),
1146 private=Optional(False),
1147 clone_uri=Optional(None),
1147 clone_uri=Optional(None),
1148 landing_rev=Optional(None),
1148 landing_rev=Optional(None),
1149 copy_permissions=Optional(False)):
1149 copy_permissions=Optional(False)):
1150 """
1150 """
1151 Creates a fork of the specified |repo|.
1151 Creates a fork of the specified |repo|.
1152
1152
1153 * If the fork_name contains "/", fork will be created inside
1153 * If the fork_name contains "/", fork will be created inside
1154 a repository group or nested repository groups
1154 a repository group or nested repository groups
1155
1155
1156 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1156 For example "foo/bar/fork-repo" will create fork called "fork-repo"
1157 inside group "foo/bar". You have to have permissions to access and
1157 inside group "foo/bar". You have to have permissions to access and
1158 write to the last repository group ("bar" in this example)
1158 write to the last repository group ("bar" in this example)
1159
1159
1160 This command can only be run using an |authtoken| with minimum
1160 This command can only be run using an |authtoken| with minimum
1161 read permissions of the forked repo, create fork permissions for an user.
1161 read permissions of the forked repo, create fork permissions for an user.
1162
1162
1163 :param apiuser: This is filled automatically from the |authtoken|.
1163 :param apiuser: This is filled automatically from the |authtoken|.
1164 :type apiuser: AuthUser
1164 :type apiuser: AuthUser
1165 :param repoid: Set repository name or repository ID.
1165 :param repoid: Set repository name or repository ID.
1166 :type repoid: str or int
1166 :type repoid: str or int
1167 :param fork_name: Set the fork name, including it's repository group membership.
1167 :param fork_name: Set the fork name, including it's repository group membership.
1168 :type fork_name: str
1168 :type fork_name: str
1169 :param owner: Set the fork owner.
1169 :param owner: Set the fork owner.
1170 :type owner: str
1170 :type owner: str
1171 :param description: Set the fork description.
1171 :param description: Set the fork description.
1172 :type description: str
1172 :type description: str
1173 :param copy_permissions: Copy permissions from parent |repo|. The
1173 :param copy_permissions: Copy permissions from parent |repo|. The
1174 default is False.
1174 default is False.
1175 :type copy_permissions: bool
1175 :type copy_permissions: bool
1176 :param private: Make the fork private. The default is False.
1176 :param private: Make the fork private. The default is False.
1177 :type private: bool
1177 :type private: bool
1178 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1178 :param landing_rev: Set the landing revision. E.g branch:default, book:dev, rev:abcd
1179
1179
1180 Example output:
1180 Example output:
1181
1181
1182 .. code-block:: bash
1182 .. code-block:: bash
1183
1183
1184 id : <id_for_response>
1184 id : <id_for_response>
1185 api_key : "<api_key>"
1185 api_key : "<api_key>"
1186 args: {
1186 args: {
1187 "repoid" : "<reponame or repo_id>",
1187 "repoid" : "<reponame or repo_id>",
1188 "fork_name": "<forkname>",
1188 "fork_name": "<forkname>",
1189 "owner": "<username or user_id = Optional(=apiuser)>",
1189 "owner": "<username or user_id = Optional(=apiuser)>",
1190 "description": "<description>",
1190 "description": "<description>",
1191 "copy_permissions": "<bool>",
1191 "copy_permissions": "<bool>",
1192 "private": "<bool>",
1192 "private": "<bool>",
1193 "landing_rev": "<landing_rev>"
1193 "landing_rev": "<landing_rev>"
1194 }
1194 }
1195
1195
1196 Example error output:
1196 Example error output:
1197
1197
1198 .. code-block:: bash
1198 .. code-block:: bash
1199
1199
1200 id : <id_given_in_input>
1200 id : <id_given_in_input>
1201 result: {
1201 result: {
1202 "msg": "Created fork of `<reponame>` as `<forkname>`",
1202 "msg": "Created fork of `<reponame>` as `<forkname>`",
1203 "success": true,
1203 "success": true,
1204 "task": "<celery task id or None if done sync>"
1204 "task": "<celery task id or None if done sync>"
1205 }
1205 }
1206 error: null
1206 error: null
1207
1207
1208 """
1208 """
1209
1209
1210 repo = get_repo_or_error(repoid)
1210 repo = get_repo_or_error(repoid)
1211 repo_name = repo.repo_name
1211 repo_name = repo.repo_name
1212
1212
1213 if not has_superadmin_permission(apiuser):
1213 if not has_superadmin_permission(apiuser):
1214 # check if we have at least read permission for
1214 # check if we have at least read permission for
1215 # this repo that we fork !
1215 # this repo that we fork !
1216 _perms = ('repository.admin', 'repository.write', 'repository.read')
1216 _perms = ('repository.admin', 'repository.write', 'repository.read')
1217 validate_repo_permissions(apiuser, repoid, repo, _perms)
1217 validate_repo_permissions(apiuser, repoid, repo, _perms)
1218
1218
1219 # check if the regular user has at least fork permissions as well
1219 # check if the regular user has at least fork permissions as well
1220 if not HasPermissionAnyApi(PermissionModel.FORKING_ENABLED)(user=apiuser):
1220 if not HasPermissionAnyApi(PermissionModel.FORKING_ENABLED)(user=apiuser):
1221 raise JSONRPCForbidden()
1221 raise JSONRPCForbidden()
1222
1222
1223 # check if user can set owner parameter
1223 # check if user can set owner parameter
1224 owner = validate_set_owner_permissions(apiuser, owner)
1224 owner = validate_set_owner_permissions(apiuser, owner)
1225
1225
1226 description = Optional.extract(description)
1226 description = Optional.extract(description)
1227 copy_permissions = Optional.extract(copy_permissions)
1227 copy_permissions = Optional.extract(copy_permissions)
1228 clone_uri = Optional.extract(clone_uri)
1228 clone_uri = Optional.extract(clone_uri)
1229
1229
1230 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1230 landing_ref, _label = ScmModel.backend_landing_ref(repo.repo_type)
1231 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1231 ref_choices, _labels = ScmModel().get_repo_landing_revs(request.translate)
1232 ref_choices = list(set(ref_choices + [landing_ref]))
1232 ref_choices = list(set(ref_choices + [landing_ref]))
1233 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1233 landing_commit_ref = Optional.extract(landing_rev) or landing_ref
1234
1234
1235 private = Optional.extract(private)
1235 private = Optional.extract(private)
1236
1236
1237 schema = repo_schema.RepoSchema().bind(
1237 schema = repo_schema.RepoSchema().bind(
1238 repo_type_options=rhodecode.BACKENDS.keys(),
1238 repo_type_options=rhodecode.BACKENDS.keys(),
1239 repo_ref_options=ref_choices,
1239 repo_ref_options=ref_choices,
1240 repo_type=repo.repo_type,
1240 repo_type=repo.repo_type,
1241 # user caller
1241 # user caller
1242 user=apiuser)
1242 user=apiuser)
1243
1243
1244 try:
1244 try:
1245 schema_data = schema.deserialize(dict(
1245 schema_data = schema.deserialize(dict(
1246 repo_name=fork_name,
1246 repo_name=fork_name,
1247 repo_type=repo.repo_type,
1247 repo_type=repo.repo_type,
1248 repo_owner=owner.username,
1248 repo_owner=owner.username,
1249 repo_description=description,
1249 repo_description=description,
1250 repo_landing_commit_ref=landing_commit_ref,
1250 repo_landing_commit_ref=landing_commit_ref,
1251 repo_clone_uri=clone_uri,
1251 repo_clone_uri=clone_uri,
1252 repo_private=private,
1252 repo_private=private,
1253 repo_copy_permissions=copy_permissions))
1253 repo_copy_permissions=copy_permissions))
1254 except validation_schema.Invalid as err:
1254 except validation_schema.Invalid as err:
1255 raise JSONRPCValidationError(colander_exc=err)
1255 raise JSONRPCValidationError(colander_exc=err)
1256
1256
1257 try:
1257 try:
1258 data = {
1258 data = {
1259 'fork_parent_id': repo.repo_id,
1259 'fork_parent_id': repo.repo_id,
1260
1260
1261 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1261 'repo_name': schema_data['repo_group']['repo_name_without_group'],
1262 'repo_name_full': schema_data['repo_name'],
1262 'repo_name_full': schema_data['repo_name'],
1263 'repo_group': schema_data['repo_group']['repo_group_id'],
1263 'repo_group': schema_data['repo_group']['repo_group_id'],
1264 'repo_type': schema_data['repo_type'],
1264 'repo_type': schema_data['repo_type'],
1265 'description': schema_data['repo_description'],
1265 'description': schema_data['repo_description'],
1266 'private': schema_data['repo_private'],
1266 'private': schema_data['repo_private'],
1267 'copy_permissions': schema_data['repo_copy_permissions'],
1267 'copy_permissions': schema_data['repo_copy_permissions'],
1268 'landing_rev': schema_data['repo_landing_commit_ref'],
1268 'landing_rev': schema_data['repo_landing_commit_ref'],
1269 }
1269 }
1270
1270
1271 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1271 task = RepoModel().create_fork(data, cur_user=owner.user_id)
1272 # no commit, it's done in RepoModel, or async via celery
1272 # no commit, it's done in RepoModel, or async via celery
1273 task_id = get_task_id(task)
1273 task_id = get_task_id(task)
1274
1274
1275 return {
1275 return {
1276 'msg': 'Created fork of `{}` as `{}`'.format(
1276 'msg': 'Created fork of `{}` as `{}`'.format(
1277 repo.repo_name, schema_data['repo_name']),
1277 repo.repo_name, schema_data['repo_name']),
1278 'success': True, # cannot return the repo data here since fork
1278 'success': True, # cannot return the repo data here since fork
1279 # can be done async
1279 # can be done async
1280 'task': task_id
1280 'task': task_id
1281 }
1281 }
1282 except Exception:
1282 except Exception:
1283 log.exception(
1283 log.exception(
1284 "Exception while trying to create fork %s",
1284 "Exception while trying to create fork %s",
1285 schema_data['repo_name'])
1285 schema_data['repo_name'])
1286 raise JSONRPCError(
1286 raise JSONRPCError(
1287 'failed to fork repository `{}` as `{}`'.format(
1287 'failed to fork repository `{}` as `{}`'.format(
1288 repo_name, schema_data['repo_name']))
1288 repo_name, schema_data['repo_name']))
1289
1289
1290
1290
1291 @jsonrpc_method()
1291 @jsonrpc_method()
1292 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1292 def delete_repo(request, apiuser, repoid, forks=Optional('')):
1293 """
1293 """
1294 Deletes a repository.
1294 Deletes a repository.
1295
1295
1296 * When the `forks` parameter is set it's possible to detach or delete
1296 * When the `forks` parameter is set it's possible to detach or delete
1297 forks of deleted repository.
1297 forks of deleted repository.
1298
1298
1299 This command can only be run using an |authtoken| with admin
1299 This command can only be run using an |authtoken| with admin
1300 permissions on the |repo|.
1300 permissions on the |repo|.
1301
1301
1302 :param apiuser: This is filled automatically from the |authtoken|.
1302 :param apiuser: This is filled automatically from the |authtoken|.
1303 :type apiuser: AuthUser
1303 :type apiuser: AuthUser
1304 :param repoid: Set the repository name or repository ID.
1304 :param repoid: Set the repository name or repository ID.
1305 :type repoid: str or int
1305 :type repoid: str or int
1306 :param forks: Set to `detach` or `delete` forks from the |repo|.
1306 :param forks: Set to `detach` or `delete` forks from the |repo|.
1307 :type forks: Optional(str)
1307 :type forks: Optional(str)
1308
1308
1309 Example error output:
1309 Example error output:
1310
1310
1311 .. code-block:: bash
1311 .. code-block:: bash
1312
1312
1313 id : <id_given_in_input>
1313 id : <id_given_in_input>
1314 result: {
1314 result: {
1315 "msg": "Deleted repository `<reponame>`",
1315 "msg": "Deleted repository `<reponame>`",
1316 "success": true
1316 "success": true
1317 }
1317 }
1318 error: null
1318 error: null
1319 """
1319 """
1320
1320
1321 repo = get_repo_or_error(repoid)
1321 repo = get_repo_or_error(repoid)
1322 repo_name = repo.repo_name
1322 repo_name = repo.repo_name
1323 if not has_superadmin_permission(apiuser):
1323 if not has_superadmin_permission(apiuser):
1324 _perms = ('repository.admin',)
1324 _perms = ('repository.admin',)
1325 validate_repo_permissions(apiuser, repoid, repo, _perms)
1325 validate_repo_permissions(apiuser, repoid, repo, _perms)
1326
1326
1327 try:
1327 try:
1328 handle_forks = Optional.extract(forks)
1328 handle_forks = Optional.extract(forks)
1329 _forks_msg = ''
1329 _forks_msg = ''
1330 _forks = [f for f in repo.forks]
1330 _forks = [f for f in repo.forks]
1331 if handle_forks == 'detach':
1331 if handle_forks == 'detach':
1332 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1332 _forks_msg = ' ' + 'Detached %s forks' % len(_forks)
1333 elif handle_forks == 'delete':
1333 elif handle_forks == 'delete':
1334 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1334 _forks_msg = ' ' + 'Deleted %s forks' % len(_forks)
1335 elif _forks:
1335 elif _forks:
1336 raise JSONRPCError(
1336 raise JSONRPCError(
1337 'Cannot delete `%s` it still contains attached forks' %
1337 'Cannot delete `%s` it still contains attached forks' %
1338 (repo.repo_name,)
1338 (repo.repo_name,)
1339 )
1339 )
1340 old_data = repo.get_api_data()
1340 old_data = repo.get_api_data()
1341 RepoModel().delete(repo, forks=forks)
1341 RepoModel().delete(repo, forks=forks)
1342
1342
1343 repo = audit_logger.RepoWrap(repo_id=None,
1343 repo = audit_logger.RepoWrap(repo_id=None,
1344 repo_name=repo.repo_name)
1344 repo_name=repo.repo_name)
1345
1345
1346 audit_logger.store_api(
1346 audit_logger.store_api(
1347 'repo.delete', action_data={'old_data': old_data},
1347 'repo.delete', action_data={'old_data': old_data},
1348 user=apiuser, repo=repo)
1348 user=apiuser, repo=repo)
1349
1349
1350 ScmModel().mark_for_invalidation(repo_name, delete=True)
1350 ScmModel().mark_for_invalidation(repo_name, delete=True)
1351 Session().commit()
1351 Session().commit()
1352 return {
1352 return {
1353 'msg': f'Deleted repository `{repo_name}`{_forks_msg}',
1353 'msg': f'Deleted repository `{repo_name}`{_forks_msg}',
1354 'success': True
1354 'success': True
1355 }
1355 }
1356 except Exception:
1356 except Exception:
1357 log.exception("Exception occurred while trying to delete repo")
1357 log.exception("Exception occurred while trying to delete repo")
1358 raise JSONRPCError(
1358 raise JSONRPCError(
1359 f'failed to delete repository `{repo_name}`'
1359 f'failed to delete repository `{repo_name}`'
1360 )
1360 )
1361
1361
1362
1362
1363 #TODO: marcink, change name ?
1363 #TODO: marcink, change name ?
1364 @jsonrpc_method()
1364 @jsonrpc_method()
1365 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1365 def invalidate_cache(request, apiuser, repoid, delete_keys=Optional(False)):
1366 """
1366 """
1367 Invalidates the cache for the specified repository.
1367 Invalidates the cache for the specified repository.
1368
1368
1369 This command can only be run using an |authtoken| with admin rights to
1369 This command can only be run using an |authtoken| with admin rights to
1370 the specified repository.
1370 the specified repository.
1371
1371
1372 This command takes the following options:
1372 This command takes the following options:
1373
1373
1374 :param apiuser: This is filled automatically from |authtoken|.
1374 :param apiuser: This is filled automatically from |authtoken|.
1375 :type apiuser: AuthUser
1375 :type apiuser: AuthUser
1376 :param repoid: Sets the repository name or repository ID.
1376 :param repoid: Sets the repository name or repository ID.
1377 :type repoid: str or int
1377 :type repoid: str or int
1378 :param delete_keys: This deletes the invalidated keys instead of
1378 :param delete_keys: This deletes the invalidated keys instead of
1379 just flagging them.
1379 just flagging them.
1380 :type delete_keys: Optional(``True`` | ``False``)
1380 :type delete_keys: Optional(``True`` | ``False``)
1381
1381
1382 Example output:
1382 Example output:
1383
1383
1384 .. code-block:: bash
1384 .. code-block:: bash
1385
1385
1386 id : <id_given_in_input>
1386 id : <id_given_in_input>
1387 result : {
1387 result : {
1388 'msg': Cache for repository `<repository name>` was invalidated,
1388 'msg': Cache for repository `<repository name>` was invalidated,
1389 'repository': <repository name>
1389 'repository': <repository name>
1390 }
1390 }
1391 error : null
1391 error : null
1392
1392
1393 Example error output:
1393 Example error output:
1394
1394
1395 .. code-block:: bash
1395 .. code-block:: bash
1396
1396
1397 id : <id_given_in_input>
1397 id : <id_given_in_input>
1398 result : null
1398 result : null
1399 error : {
1399 error : {
1400 'Error occurred during cache invalidation action'
1400 'Error occurred during cache invalidation action'
1401 }
1401 }
1402
1402
1403 """
1403 """
1404
1404
1405 repo = get_repo_or_error(repoid)
1405 repo = get_repo_or_error(repoid)
1406 if not has_superadmin_permission(apiuser):
1406 if not has_superadmin_permission(apiuser):
1407 _perms = ('repository.admin', 'repository.write',)
1407 _perms = ('repository.admin', 'repository.write',)
1408 validate_repo_permissions(apiuser, repoid, repo, _perms)
1408 validate_repo_permissions(apiuser, repoid, repo, _perms)
1409
1409
1410 delete = Optional.extract(delete_keys)
1410 delete = Optional.extract(delete_keys)
1411 try:
1411 try:
1412 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1412 ScmModel().mark_for_invalidation(repo.repo_name, delete=delete)
1413 return {
1413 return {
1414 'msg': f'Cache for repository `{repoid}` was invalidated',
1414 'msg': f'Cache for repository `{repoid}` was invalidated',
1415 'repository': repo.repo_name
1415 'repository': repo.repo_name
1416 }
1416 }
1417 except Exception:
1417 except Exception:
1418 log.exception(
1418 log.exception(
1419 "Exception occurred while trying to invalidate repo cache")
1419 "Exception occurred while trying to invalidate repo cache")
1420 raise JSONRPCError(
1420 raise JSONRPCError(
1421 'Error occurred during cache invalidation action'
1421 'Error occurred during cache invalidation action'
1422 )
1422 )
1423
1423
1424
1424
1425 #TODO: marcink, change name ?
1425 #TODO: marcink, change name ?
1426 @jsonrpc_method()
1426 @jsonrpc_method()
1427 def lock(request, apiuser, repoid, locked=Optional(None),
1427 def lock(request, apiuser, repoid, locked=Optional(None),
1428 userid=Optional(OAttr('apiuser'))):
1428 userid=Optional(OAttr('apiuser'))):
1429 """
1429 """
1430 Sets the lock state of the specified |repo| by the given user.
1430 Sets the lock state of the specified |repo| by the given user.
1431 From more information, see :ref:`repo-locking`.
1431 From more information, see :ref:`repo-locking`.
1432
1432
1433 * If the ``userid`` option is not set, the repository is locked to the
1433 * If the ``userid`` option is not set, the repository is locked to the
1434 user who called the method.
1434 user who called the method.
1435 * If the ``locked`` parameter is not set, the current lock state of the
1435 * If the ``locked`` parameter is not set, the current lock state of the
1436 repository is displayed.
1436 repository is displayed.
1437
1437
1438 This command can only be run using an |authtoken| with admin rights to
1438 This command can only be run using an |authtoken| with admin rights to
1439 the specified repository.
1439 the specified repository.
1440
1440
1441 This command takes the following options:
1441 This command takes the following options:
1442
1442
1443 :param apiuser: This is filled automatically from the |authtoken|.
1443 :param apiuser: This is filled automatically from the |authtoken|.
1444 :type apiuser: AuthUser
1444 :type apiuser: AuthUser
1445 :param repoid: Sets the repository name or repository ID.
1445 :param repoid: Sets the repository name or repository ID.
1446 :type repoid: str or int
1446 :type repoid: str or int
1447 :param locked: Sets the lock state.
1447 :param locked: Sets the lock state.
1448 :type locked: Optional(``True`` | ``False``)
1448 :type locked: Optional(``True`` | ``False``)
1449 :param userid: Set the repository lock to this user.
1449 :param userid: Set the repository lock to this user.
1450 :type userid: Optional(str or int)
1450 :type userid: Optional(str or int)
1451
1451
1452 Example error output:
1452 Example error output:
1453
1453
1454 .. code-block:: bash
1454 .. code-block:: bash
1455
1455
1456 id : <id_given_in_input>
1456 id : <id_given_in_input>
1457 result : {
1457 result : {
1458 'repo': '<reponame>',
1458 'repo': '<reponame>',
1459 'locked': <bool: lock state>,
1459 'locked': <bool: lock state>,
1460 'locked_since': <int: lock timestamp>,
1460 'locked_since': <int: lock timestamp>,
1461 'locked_by': <username of person who made the lock>,
1461 'locked_by': <username of person who made the lock>,
1462 'lock_reason': <str: reason for locking>,
1462 'lock_reason': <str: reason for locking>,
1463 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1463 'lock_state_changed': <bool: True if lock state has been changed in this request>,
1464 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1464 'msg': 'Repo `<reponame>` locked by `<username>` on <timestamp>.'
1465 or
1465 or
1466 'msg': 'Repo `<repository name>` not locked.'
1466 'msg': 'Repo `<repository name>` not locked.'
1467 or
1467 or
1468 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1468 'msg': 'User `<user name>` set lock state for repo `<repository name>` to `<new lock state>`'
1469 }
1469 }
1470 error : null
1470 error : null
1471
1471
1472 Example error output:
1472 Example error output:
1473
1473
1474 .. code-block:: bash
1474 .. code-block:: bash
1475
1475
1476 id : <id_given_in_input>
1476 id : <id_given_in_input>
1477 result : null
1477 result : null
1478 error : {
1478 error : {
1479 'Error occurred locking repository `<reponame>`'
1479 'Error occurred locking repository `<reponame>`'
1480 }
1480 }
1481 """
1481 """
1482
1482
1483 repo = get_repo_or_error(repoid)
1483 repo = get_repo_or_error(repoid)
1484 if not has_superadmin_permission(apiuser):
1484 if not has_superadmin_permission(apiuser):
1485 # check if we have at least write permission for this repo !
1485 # check if we have at least write permission for this repo !
1486 _perms = ('repository.admin', 'repository.write',)
1486 _perms = ('repository.admin', 'repository.write',)
1487 validate_repo_permissions(apiuser, repoid, repo, _perms)
1487 validate_repo_permissions(apiuser, repoid, repo, _perms)
1488
1488
1489 # make sure normal user does not pass someone else userid,
1489 # make sure normal user does not pass someone else userid,
1490 # he is not allowed to do that
1490 # he is not allowed to do that
1491 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1491 if not isinstance(userid, Optional) and userid != apiuser.user_id:
1492 raise JSONRPCError('userid is not the same as your user')
1492 raise JSONRPCError('userid is not the same as your user')
1493
1493
1494 if isinstance(userid, Optional):
1494 if isinstance(userid, Optional):
1495 userid = apiuser.user_id
1495 userid = apiuser.user_id
1496
1496
1497 user = get_user_or_error(userid)
1497 user = get_user_or_error(userid)
1498
1498
1499 if isinstance(locked, Optional):
1499 if isinstance(locked, Optional):
1500 lockobj = repo.locked
1500 lockobj = repo.locked
1501
1501
1502 if lockobj[0] is None:
1502 if lockobj[0] is None:
1503 _d = {
1503 _d = {
1504 'repo': repo.repo_name,
1504 'repo': repo.repo_name,
1505 'locked': False,
1505 'locked': False,
1506 'locked_since': None,
1506 'locked_since': None,
1507 'locked_by': None,
1507 'locked_by': None,
1508 'lock_reason': None,
1508 'lock_reason': None,
1509 'lock_state_changed': False,
1509 'lock_state_changed': False,
1510 'msg': 'Repo `%s` not locked.' % repo.repo_name
1510 'msg': 'Repo `%s` not locked.' % repo.repo_name
1511 }
1511 }
1512 return _d
1512 return _d
1513 else:
1513 else:
1514 _user_id, _time, _reason = lockobj
1514 _user_id, _time, _reason = lockobj
1515 lock_user = get_user_or_error(userid)
1515 lock_user = get_user_or_error(userid)
1516 _d = {
1516 _d = {
1517 'repo': repo.repo_name,
1517 'repo': repo.repo_name,
1518 'locked': True,
1518 'locked': True,
1519 'locked_since': _time,
1519 'locked_since': _time,
1520 'locked_by': lock_user.username,
1520 'locked_by': lock_user.username,
1521 'lock_reason': _reason,
1521 'lock_reason': _reason,
1522 'lock_state_changed': False,
1522 'lock_state_changed': False,
1523 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1523 'msg': ('Repo `%s` locked by `%s` on `%s`.'
1524 % (repo.repo_name, lock_user.username,
1524 % (repo.repo_name, lock_user.username,
1525 json.dumps(time_to_datetime(_time))))
1525 json.dumps(time_to_datetime(_time))))
1526 }
1526 }
1527 return _d
1527 return _d
1528
1528
1529 # force locked state through a flag
1529 # force locked state through a flag
1530 else:
1530 else:
1531 locked = str2bool(locked)
1531 locked = str2bool(locked)
1532 lock_reason = Repository.LOCK_API
1532 lock_reason = Repository.LOCK_API
1533 try:
1533 try:
1534 if locked:
1534 if locked:
1535 lock_time = time.time()
1535 lock_time = time.time()
1536 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1536 Repository.lock(repo, user.user_id, lock_time, lock_reason)
1537 else:
1537 else:
1538 lock_time = None
1538 lock_time = None
1539 Repository.unlock(repo)
1539 Repository.unlock(repo)
1540 _d = {
1540 _d = {
1541 'repo': repo.repo_name,
1541 'repo': repo.repo_name,
1542 'locked': locked,
1542 'locked': locked,
1543 'locked_since': lock_time,
1543 'locked_since': lock_time,
1544 'locked_by': user.username,
1544 'locked_by': user.username,
1545 'lock_reason': lock_reason,
1545 'lock_reason': lock_reason,
1546 'lock_state_changed': True,
1546 'lock_state_changed': True,
1547 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1547 'msg': ('User `%s` set lock state for repo `%s` to `%s`'
1548 % (user.username, repo.repo_name, locked))
1548 % (user.username, repo.repo_name, locked))
1549 }
1549 }
1550 return _d
1550 return _d
1551 except Exception:
1551 except Exception:
1552 log.exception(
1552 log.exception(
1553 "Exception occurred while trying to lock repository")
1553 "Exception occurred while trying to lock repository")
1554 raise JSONRPCError(
1554 raise JSONRPCError(
1555 'Error occurred locking repository `%s`' % repo.repo_name
1555 'Error occurred locking repository `%s`' % repo.repo_name
1556 )
1556 )
1557
1557
1558
1558
1559 @jsonrpc_method()
1559 @jsonrpc_method()
1560 def comment_commit(
1560 def comment_commit(
1561 request, apiuser, repoid, commit_id, message, status=Optional(None),
1561 request, apiuser, repoid, commit_id, message, status=Optional(None),
1562 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1562 comment_type=Optional(ChangesetComment.COMMENT_TYPE_NOTE),
1563 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1563 resolves_comment_id=Optional(None), extra_recipients=Optional([]),
1564 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1564 userid=Optional(OAttr('apiuser')), send_email=Optional(True)):
1565 """
1565 """
1566 Set a commit comment, and optionally change the status of the commit.
1566 Set a commit comment, and optionally change the status of the commit.
1567
1567
1568 :param apiuser: This is filled automatically from the |authtoken|.
1568 :param apiuser: This is filled automatically from the |authtoken|.
1569 :type apiuser: AuthUser
1569 :type apiuser: AuthUser
1570 :param repoid: Set the repository name or repository ID.
1570 :param repoid: Set the repository name or repository ID.
1571 :type repoid: str or int
1571 :type repoid: str or int
1572 :param commit_id: Specify the commit_id for which to set a comment.
1572 :param commit_id: Specify the commit_id for which to set a comment.
1573 :type commit_id: str
1573 :type commit_id: str
1574 :param message: The comment text.
1574 :param message: The comment text.
1575 :type message: str
1575 :type message: str
1576 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1576 :param status: (**Optional**) status of commit, one of: 'not_reviewed',
1577 'approved', 'rejected', 'under_review'
1577 'approved', 'rejected', 'under_review'
1578 :type status: str
1578 :type status: str
1579 :param comment_type: Comment type, one of: 'note', 'todo'
1579 :param comment_type: Comment type, one of: 'note', 'todo'
1580 :type comment_type: Optional(str), default: 'note'
1580 :type comment_type: Optional(str), default: 'note'
1581 :param resolves_comment_id: id of comment which this one will resolve
1581 :param resolves_comment_id: id of comment which this one will resolve
1582 :type resolves_comment_id: Optional(int)
1582 :type resolves_comment_id: Optional(int)
1583 :param extra_recipients: list of user ids or usernames to add
1583 :param extra_recipients: list of user ids or usernames to add
1584 notifications for this comment. Acts like a CC for notification
1584 notifications for this comment. Acts like a CC for notification
1585 :type extra_recipients: Optional(list)
1585 :type extra_recipients: Optional(list)
1586 :param userid: Set the user name of the comment creator.
1586 :param userid: Set the user name of the comment creator.
1587 :type userid: Optional(str or int)
1587 :type userid: Optional(str or int)
1588 :param send_email: Define if this comment should also send email notification
1588 :param send_email: Define if this comment should also send email notification
1589 :type send_email: Optional(bool)
1589 :type send_email: Optional(bool)
1590
1590
1591 Example error output:
1591 Example error output:
1592
1592
1593 .. code-block:: bash
1593 .. code-block:: bash
1594
1594
1595 {
1595 {
1596 "id" : <id_given_in_input>,
1596 "id" : <id_given_in_input>,
1597 "result" : {
1597 "result" : {
1598 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1598 "msg": "Commented on commit `<commit_id>` for repository `<repoid>`",
1599 "status_change": null or <status>,
1599 "status_change": null or <status>,
1600 "success": true
1600 "success": true
1601 },
1601 },
1602 "error" : null
1602 "error" : null
1603 }
1603 }
1604
1604
1605 """
1605 """
1606 _ = request.translate
1606 _ = request.translate
1607
1607
1608 repo = get_repo_or_error(repoid)
1608 repo = get_repo_or_error(repoid)
1609 if not has_superadmin_permission(apiuser):
1609 if not has_superadmin_permission(apiuser):
1610 _perms = ('repository.read', 'repository.write', 'repository.admin')
1610 _perms = ('repository.read', 'repository.write', 'repository.admin')
1611 validate_repo_permissions(apiuser, repoid, repo, _perms)
1611 validate_repo_permissions(apiuser, repoid, repo, _perms)
1612 db_repo_name = repo.repo_name
1612 db_repo_name = repo.repo_name
1613
1613
1614 try:
1614 try:
1615 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1615 commit = repo.scm_instance().get_commit(commit_id=commit_id)
1616 commit_id = commit.raw_id
1616 commit_id = commit.raw_id
1617 except Exception as e:
1617 except Exception as e:
1618 log.exception('Failed to fetch commit')
1618 log.exception('Failed to fetch commit')
1619 raise JSONRPCError(safe_str(e))
1619 raise JSONRPCError(safe_str(e))
1620
1620
1621 if isinstance(userid, Optional):
1621 if isinstance(userid, Optional):
1622 userid = apiuser.user_id
1622 userid = apiuser.user_id
1623
1623
1624 user = get_user_or_error(userid)
1624 user = get_user_or_error(userid)
1625 status = Optional.extract(status)
1625 status = Optional.extract(status)
1626 comment_type = Optional.extract(comment_type)
1626 comment_type = Optional.extract(comment_type)
1627 resolves_comment_id = Optional.extract(resolves_comment_id)
1627 resolves_comment_id = Optional.extract(resolves_comment_id)
1628 extra_recipients = Optional.extract(extra_recipients)
1628 extra_recipients = Optional.extract(extra_recipients)
1629 send_email = Optional.extract(send_email, binary=True)
1629 send_email = Optional.extract(send_email, binary=True)
1630
1630
1631 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1631 allowed_statuses = [x[0] for x in ChangesetStatus.STATUSES]
1632 if status and status not in allowed_statuses:
1632 if status and status not in allowed_statuses:
1633 raise JSONRPCError('Bad status, must be on '
1633 raise JSONRPCError('Bad status, must be on '
1634 'of %s got %s' % (allowed_statuses, status,))
1634 'of %s got %s' % (allowed_statuses, status,))
1635
1635
1636 if resolves_comment_id:
1636 if resolves_comment_id:
1637 comment = ChangesetComment.get(resolves_comment_id)
1637 comment = ChangesetComment.get(resolves_comment_id)
1638 if not comment:
1638 if not comment:
1639 raise JSONRPCError(
1639 raise JSONRPCError(
1640 'Invalid resolves_comment_id `%s` for this commit.'
1640 'Invalid resolves_comment_id `%s` for this commit.'
1641 % resolves_comment_id)
1641 % resolves_comment_id)
1642 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1642 if comment.comment_type != ChangesetComment.COMMENT_TYPE_TODO:
1643 raise JSONRPCError(
1643 raise JSONRPCError(
1644 'Comment `%s` is wrong type for setting status to resolved.'
1644 'Comment `%s` is wrong type for setting status to resolved.'
1645 % resolves_comment_id)
1645 % resolves_comment_id)
1646
1646
1647 try:
1647 try:
1648 rc_config = SettingsModel().get_all_settings()
1648 rc_config = SettingsModel().get_all_settings()
1649 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1649 renderer = rc_config.get('rhodecode_markup_renderer', 'rst')
1650 status_change_label = ChangesetStatus.get_status_lbl(status)
1650 status_change_label = ChangesetStatus.get_status_lbl(status)
1651 comment = CommentsModel().create(
1651 comment = CommentsModel().create(
1652 message, repo, user, commit_id=commit_id,
1652 message, repo, user, commit_id=commit_id,
1653 status_change=status_change_label,
1653 status_change=status_change_label,
1654 status_change_type=status,
1654 status_change_type=status,
1655 renderer=renderer,
1655 renderer=renderer,
1656 comment_type=comment_type,
1656 comment_type=comment_type,
1657 resolves_comment_id=resolves_comment_id,
1657 resolves_comment_id=resolves_comment_id,
1658 auth_user=apiuser,
1658 auth_user=apiuser,
1659 extra_recipients=extra_recipients,
1659 extra_recipients=extra_recipients,
1660 send_email=send_email
1660 send_email=send_email
1661 )
1661 )
1662 is_inline = comment.is_inline
1662 is_inline = comment.is_inline
1663
1663
1664 if status:
1664 if status:
1665 # also do a status change
1665 # also do a status change
1666 try:
1666 try:
1667 ChangesetStatusModel().set_status(
1667 ChangesetStatusModel().set_status(
1668 repo, status, user, comment, revision=commit_id,
1668 repo, status, user, comment, revision=commit_id,
1669 dont_allow_on_closed_pull_request=True
1669 dont_allow_on_closed_pull_request=True
1670 )
1670 )
1671 except StatusChangeOnClosedPullRequestError:
1671 except StatusChangeOnClosedPullRequestError:
1672 log.exception(
1672 log.exception(
1673 "Exception occurred while trying to change repo commit status")
1673 "Exception occurred while trying to change repo commit status")
1674 msg = ('Changing status on a commit associated with '
1674 msg = ('Changing status on a commit associated with '
1675 'a closed pull request is not allowed')
1675 'a closed pull request is not allowed')
1676 raise JSONRPCError(msg)
1676 raise JSONRPCError(msg)
1677
1677
1678 CommentsModel().trigger_commit_comment_hook(
1678 CommentsModel().trigger_commit_comment_hook(
1679 repo, apiuser, 'create',
1679 repo, apiuser, 'create',
1680 data={'comment': comment, 'commit': commit})
1680 data={'comment': comment, 'commit': commit})
1681
1681
1682 Session().commit()
1682 Session().commit()
1683
1683
1684 comment_broadcast_channel = channelstream.comment_channel(
1684 comment_broadcast_channel = channelstream.comment_channel(
1685 db_repo_name, commit_obj=commit)
1685 db_repo_name, commit_obj=commit)
1686
1686
1687 comment_data = {'comment': comment, 'comment_id': comment.comment_id}
1687 comment_data = {'comment': comment, 'comment_id': comment.comment_id}
1688 comment_type = 'inline' if is_inline else 'general'
1688 comment_type = 'inline' if is_inline else 'general'
1689 channelstream.comment_channelstream_push(
1689 channelstream.comment_channelstream_push(
1690 request, comment_broadcast_channel, apiuser,
1690 request, comment_broadcast_channel, apiuser,
1691 _('posted a new {} comment').format(comment_type),
1691 _('posted a new {} comment').format(comment_type),
1692 comment_data=comment_data)
1692 comment_data=comment_data)
1693
1693
1694 return {
1694 return {
1695 'msg': (
1695 'msg': (
1696 'Commented on commit `{}` for repository `{}`'.format(
1696 'Commented on commit `{}` for repository `{}`'.format(
1697 comment.revision, repo.repo_name)),
1697 comment.revision, repo.repo_name)),
1698 'status_change': status,
1698 'status_change': status,
1699 'success': True,
1699 'success': True,
1700 }
1700 }
1701 except JSONRPCError:
1701 except JSONRPCError:
1702 # catch any inside errors, and re-raise them to prevent from
1702 # catch any inside errors, and re-raise them to prevent from
1703 # below global catch to silence them
1703 # below global catch to silence them
1704 raise
1704 raise
1705 except Exception:
1705 except Exception:
1706 log.exception("Exception occurred while trying to comment on commit")
1706 log.exception("Exception occurred while trying to comment on commit")
1707 raise JSONRPCError(
1707 raise JSONRPCError(
1708 f'failed to set comment on repository `{repo.repo_name}`'
1708 f'failed to set comment on repository `{repo.repo_name}`'
1709 )
1709 )
1710
1710
1711
1711
1712 @jsonrpc_method()
1712 @jsonrpc_method()
1713 def get_repo_comments(request, apiuser, repoid,
1713 def get_repo_comments(request, apiuser, repoid,
1714 commit_id=Optional(None), comment_type=Optional(None),
1714 commit_id=Optional(None), comment_type=Optional(None),
1715 userid=Optional(None)):
1715 userid=Optional(None)):
1716 """
1716 """
1717 Get all comments for a repository
1717 Get all comments for a repository
1718
1718
1719 :param apiuser: This is filled automatically from the |authtoken|.
1719 :param apiuser: This is filled automatically from the |authtoken|.
1720 :type apiuser: AuthUser
1720 :type apiuser: AuthUser
1721 :param repoid: Set the repository name or repository ID.
1721 :param repoid: Set the repository name or repository ID.
1722 :type repoid: str or int
1722 :type repoid: str or int
1723 :param commit_id: Optionally filter the comments by the commit_id
1723 :param commit_id: Optionally filter the comments by the commit_id
1724 :type commit_id: Optional(str), default: None
1724 :type commit_id: Optional(str), default: None
1725 :param comment_type: Optionally filter the comments by the comment_type
1725 :param comment_type: Optionally filter the comments by the comment_type
1726 one of: 'note', 'todo'
1726 one of: 'note', 'todo'
1727 :type comment_type: Optional(str), default: None
1727 :type comment_type: Optional(str), default: None
1728 :param userid: Optionally filter the comments by the author of comment
1728 :param userid: Optionally filter the comments by the author of comment
1729 :type userid: Optional(str or int), Default: None
1729 :type userid: Optional(str or int), Default: None
1730
1730
1731 Example error output:
1731 Example error output:
1732
1732
1733 .. code-block:: bash
1733 .. code-block:: bash
1734
1734
1735 {
1735 {
1736 "id" : <id_given_in_input>,
1736 "id" : <id_given_in_input>,
1737 "result" : [
1737 "result" : [
1738 {
1738 {
1739 "comment_author": <USER_DETAILS>,
1739 "comment_author": <USER_DETAILS>,
1740 "comment_created_on": "2017-02-01T14:38:16.309",
1740 "comment_created_on": "2017-02-01T14:38:16.309",
1741 "comment_f_path": "file.txt",
1741 "comment_f_path": "file.txt",
1742 "comment_id": 282,
1742 "comment_id": 282,
1743 "comment_lineno": "n1",
1743 "comment_lineno": "n1",
1744 "comment_resolved_by": null,
1744 "comment_resolved_by": null,
1745 "comment_status": [],
1745 "comment_status": [],
1746 "comment_text": "This file needs a header",
1746 "comment_text": "This file needs a header",
1747 "comment_type": "todo",
1747 "comment_type": "todo",
1748 "comment_last_version: 0
1748 "comment_last_version: 0
1749 }
1749 }
1750 ],
1750 ],
1751 "error" : null
1751 "error" : null
1752 }
1752 }
1753
1753
1754 """
1754 """
1755 repo = get_repo_or_error(repoid)
1755 repo = get_repo_or_error(repoid)
1756 if not has_superadmin_permission(apiuser):
1756 if not has_superadmin_permission(apiuser):
1757 _perms = ('repository.read', 'repository.write', 'repository.admin')
1757 _perms = ('repository.read', 'repository.write', 'repository.admin')
1758 validate_repo_permissions(apiuser, repoid, repo, _perms)
1758 validate_repo_permissions(apiuser, repoid, repo, _perms)
1759
1759
1760 commit_id = Optional.extract(commit_id)
1760 commit_id = Optional.extract(commit_id)
1761
1761
1762 userid = Optional.extract(userid)
1762 userid = Optional.extract(userid)
1763 if userid:
1763 if userid:
1764 user = get_user_or_error(userid)
1764 user = get_user_or_error(userid)
1765 else:
1765 else:
1766 user = None
1766 user = None
1767
1767
1768 comment_type = Optional.extract(comment_type)
1768 comment_type = Optional.extract(comment_type)
1769 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1769 if comment_type and comment_type not in ChangesetComment.COMMENT_TYPES:
1770 raise JSONRPCError(
1770 raise JSONRPCError(
1771 'comment_type must be one of `{}` got {}'.format(
1771 'comment_type must be one of `{}` got {}'.format(
1772 ChangesetComment.COMMENT_TYPES, comment_type)
1772 ChangesetComment.COMMENT_TYPES, comment_type)
1773 )
1773 )
1774
1774
1775 comments = CommentsModel().get_repository_comments(
1775 comments = CommentsModel().get_repository_comments(
1776 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1776 repo=repo, comment_type=comment_type, user=user, commit_id=commit_id)
1777 return comments
1777 return comments
1778
1778
1779
1779
1780 @jsonrpc_method()
1780 @jsonrpc_method()
1781 def get_comment(request, apiuser, comment_id):
1781 def get_comment(request, apiuser, comment_id):
1782 """
1782 """
1783 Get single comment from repository or pull_request
1783 Get single comment from repository or pull_request
1784
1784
1785 :param apiuser: This is filled automatically from the |authtoken|.
1785 :param apiuser: This is filled automatically from the |authtoken|.
1786 :type apiuser: AuthUser
1786 :type apiuser: AuthUser
1787 :param comment_id: comment id found in the URL of comment
1787 :param comment_id: comment id found in the URL of comment
1788 :type comment_id: str or int
1788 :type comment_id: str or int
1789
1789
1790 Example error output:
1790 Example error output:
1791
1791
1792 .. code-block:: bash
1792 .. code-block:: bash
1793
1793
1794 {
1794 {
1795 "id" : <id_given_in_input>,
1795 "id" : <id_given_in_input>,
1796 "result" : {
1796 "result" : {
1797 "comment_author": <USER_DETAILS>,
1797 "comment_author": <USER_DETAILS>,
1798 "comment_created_on": "2017-02-01T14:38:16.309",
1798 "comment_created_on": "2017-02-01T14:38:16.309",
1799 "comment_f_path": "file.txt",
1799 "comment_f_path": "file.txt",
1800 "comment_id": 282,
1800 "comment_id": 282,
1801 "comment_lineno": "n1",
1801 "comment_lineno": "n1",
1802 "comment_resolved_by": null,
1802 "comment_resolved_by": null,
1803 "comment_status": [],
1803 "comment_status": [],
1804 "comment_text": "This file needs a header",
1804 "comment_text": "This file needs a header",
1805 "comment_type": "todo",
1805 "comment_type": "todo",
1806 "comment_last_version: 0
1806 "comment_last_version: 0
1807 },
1807 },
1808 "error" : null
1808 "error" : null
1809 }
1809 }
1810
1810
1811 """
1811 """
1812
1812
1813 comment = ChangesetComment.get(comment_id)
1813 comment = ChangesetComment.get(comment_id)
1814 if not comment:
1814 if not comment:
1815 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1815 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1816
1816
1817 perms = ('repository.read', 'repository.write', 'repository.admin')
1817 perms = ('repository.read', 'repository.write', 'repository.admin')
1818 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1818 has_comment_perm = HasRepoPermissionAnyApi(*perms)\
1819 (user=apiuser, repo_name=comment.repo.repo_name)
1819 (user=apiuser, repo_name=comment.repo.repo_name)
1820
1820
1821 if not has_comment_perm:
1821 if not has_comment_perm:
1822 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1822 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1823
1823
1824 return comment
1824 return comment
1825
1825
1826
1826
1827 @jsonrpc_method()
1827 @jsonrpc_method()
1828 def edit_comment(request, apiuser, message, comment_id, version,
1828 def edit_comment(request, apiuser, message, comment_id, version,
1829 userid=Optional(OAttr('apiuser'))):
1829 userid=Optional(OAttr('apiuser'))):
1830 """
1830 """
1831 Edit comment on the pull request or commit,
1831 Edit comment on the pull request or commit,
1832 specified by the `comment_id` and version. Initially version should be 0
1832 specified by the `comment_id` and version. Initially version should be 0
1833
1833
1834 :param apiuser: This is filled automatically from the |authtoken|.
1834 :param apiuser: This is filled automatically from the |authtoken|.
1835 :type apiuser: AuthUser
1835 :type apiuser: AuthUser
1836 :param comment_id: Specify the comment_id for editing
1836 :param comment_id: Specify the comment_id for editing
1837 :type comment_id: int
1837 :type comment_id: int
1838 :param version: version of the comment that will be created, starts from 0
1838 :param version: version of the comment that will be created, starts from 0
1839 :type version: int
1839 :type version: int
1840 :param message: The text content of the comment.
1840 :param message: The text content of the comment.
1841 :type message: str
1841 :type message: str
1842 :param userid: Comment on the pull request as this user
1842 :param userid: Comment on the pull request as this user
1843 :type userid: Optional(str or int)
1843 :type userid: Optional(str or int)
1844
1844
1845 Example output:
1845 Example output:
1846
1846
1847 .. code-block:: bash
1847 .. code-block:: bash
1848
1848
1849 id : <id_given_in_input>
1849 id : <id_given_in_input>
1850 result : {
1850 result : {
1851 "comment": "<comment data>",
1851 "comment": "<comment data>",
1852 "version": "<Integer>",
1852 "version": "<Integer>",
1853 },
1853 },
1854 error : null
1854 error : null
1855 """
1855 """
1856
1856
1857 auth_user = apiuser
1857 auth_user = apiuser
1858 comment = ChangesetComment.get(comment_id)
1858 comment = ChangesetComment.get(comment_id)
1859 if not comment:
1859 if not comment:
1860 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1860 raise JSONRPCError(f'comment `{comment_id}` does not exist')
1861
1861
1862 is_super_admin = has_superadmin_permission(apiuser)
1862 is_super_admin = has_superadmin_permission(apiuser)
1863 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1863 is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1864 (user=apiuser, repo_name=comment.repo.repo_name)
1864 (user=apiuser, repo_name=comment.repo.repo_name)
1865
1865
1866 if not isinstance(userid, Optional):
1866 if not isinstance(userid, Optional):
1867 if is_super_admin or is_repo_admin:
1867 if is_super_admin or is_repo_admin:
1868 apiuser = get_user_or_error(userid)
1868 apiuser = get_user_or_error(userid)
1869 auth_user = apiuser.AuthUser()
1869 auth_user = apiuser.AuthUser()
1870 else:
1870 else:
1871 raise JSONRPCError('userid is not the same as your user')
1871 raise JSONRPCError('userid is not the same as your user')
1872
1872
1873 comment_author = comment.author.user_id == auth_user.user_id
1873 comment_author = comment.author.user_id == auth_user.user_id
1874
1874
1875 if comment.immutable:
1875 if comment.immutable:
1876 raise JSONRPCError("Immutable comment cannot be edited")
1876 raise JSONRPCError("Immutable comment cannot be edited")
1877
1877
1878 if not (is_super_admin or is_repo_admin or comment_author):
1878 if not (is_super_admin or is_repo_admin or comment_author):
1879 raise JSONRPCError("you don't have access to edit this comment")
1879 raise JSONRPCError("you don't have access to edit this comment")
1880
1880
1881 try:
1881 try:
1882 comment_history = CommentsModel().edit(
1882 comment_history = CommentsModel().edit(
1883 comment_id=comment_id,
1883 comment_id=comment_id,
1884 text=message,
1884 text=message,
1885 auth_user=auth_user,
1885 auth_user=auth_user,
1886 version=version,
1886 version=version,
1887 )
1887 )
1888 Session().commit()
1888 Session().commit()
1889 except CommentVersionMismatch:
1889 except CommentVersionMismatch:
1890 raise JSONRPCError(
1890 raise JSONRPCError(
1891 f'comment ({comment_id}) version ({version}) mismatch'
1891 f'comment ({comment_id}) version ({version}) mismatch'
1892 )
1892 )
1893 if not comment_history and not message:
1893 if not comment_history and not message:
1894 raise JSONRPCError(
1894 raise JSONRPCError(
1895 f"comment ({comment_id}) can't be changed with empty string"
1895 f"comment ({comment_id}) can't be changed with empty string"
1896 )
1896 )
1897
1897
1898 if comment.pull_request:
1898 if comment.pull_request:
1899 pull_request = comment.pull_request
1899 pull_request = comment.pull_request
1900 PullRequestModel().trigger_pull_request_hook(
1900 PullRequestModel().trigger_pull_request_hook(
1901 pull_request, apiuser, 'comment_edit',
1901 pull_request, apiuser, 'comment_edit',
1902 data={'comment': comment})
1902 data={'comment': comment})
1903 else:
1903 else:
1904 db_repo = comment.repo
1904 db_repo = comment.repo
1905 commit_id = comment.revision
1905 commit_id = comment.revision
1906 commit = db_repo.get_commit(commit_id)
1906 commit = db_repo.get_commit(commit_id)
1907 CommentsModel().trigger_commit_comment_hook(
1907 CommentsModel().trigger_commit_comment_hook(
1908 db_repo, apiuser, 'edit',
1908 db_repo, apiuser, 'edit',
1909 data={'comment': comment, 'commit': commit})
1909 data={'comment': comment, 'commit': commit})
1910
1910
1911 data = {
1911 data = {
1912 'comment': comment,
1912 'comment': comment,
1913 'version': comment_history.version if comment_history else None,
1913 'version': comment_history.version if comment_history else None,
1914 }
1914 }
1915 return data
1915 return data
1916
1916
1917
1917
1918 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1918 # TODO(marcink): write this with all required logic for deleting a comments in PR or commits
1919 # @jsonrpc_method()
1919 # @jsonrpc_method()
1920 # def delete_comment(request, apiuser, comment_id):
1920 # def delete_comment(request, apiuser, comment_id):
1921 # auth_user = apiuser
1921 # auth_user = apiuser
1922 #
1922 #
1923 # comment = ChangesetComment.get(comment_id)
1923 # comment = ChangesetComment.get(comment_id)
1924 # if not comment:
1924 # if not comment:
1925 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1925 # raise JSONRPCError('comment `%s` does not exist' % (comment_id,))
1926 #
1926 #
1927 # is_super_admin = has_superadmin_permission(apiuser)
1927 # is_super_admin = has_superadmin_permission(apiuser)
1928 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1928 # is_repo_admin = HasRepoPermissionAnyApi('repository.admin')\
1929 # (user=apiuser, repo_name=comment.repo.repo_name)
1929 # (user=apiuser, repo_name=comment.repo.repo_name)
1930 #
1930 #
1931 # comment_author = comment.author.user_id == auth_user.user_id
1931 # comment_author = comment.author.user_id == auth_user.user_id
1932 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1932 # if not (comment.immutable is False and (is_super_admin or is_repo_admin) or comment_author):
1933 # raise JSONRPCError("you don't have access to edit this comment")
1933 # raise JSONRPCError("you don't have access to edit this comment")
1934
1934
1935 @jsonrpc_method()
1935 @jsonrpc_method()
1936 def grant_user_permission(request, apiuser, repoid, userid, perm):
1936 def grant_user_permission(request, apiuser, repoid, userid, perm):
1937 """
1937 """
1938 Grant permissions for the specified user on the given repository,
1938 Grant permissions for the specified user on the given repository,
1939 or update existing permissions if found.
1939 or update existing permissions if found.
1940
1940
1941 This command can only be run using an |authtoken| with admin
1941 This command can only be run using an |authtoken| with admin
1942 permissions on the |repo|.
1942 permissions on the |repo|.
1943
1943
1944 :param apiuser: This is filled automatically from the |authtoken|.
1944 :param apiuser: This is filled automatically from the |authtoken|.
1945 :type apiuser: AuthUser
1945 :type apiuser: AuthUser
1946 :param repoid: Set the repository name or repository ID.
1946 :param repoid: Set the repository name or repository ID.
1947 :type repoid: str or int
1947 :type repoid: str or int
1948 :param userid: Set the user name.
1948 :param userid: Set the user name.
1949 :type userid: str
1949 :type userid: str
1950 :param perm: Set the user permissions, using the following format
1950 :param perm: Set the user permissions, using the following format
1951 ``(repository.(none|read|write|admin))``
1951 ``(repository.(none|read|write|admin))``
1952 :type perm: str
1952 :type perm: str
1953
1953
1954 Example output:
1954 Example output:
1955
1955
1956 .. code-block:: bash
1956 .. code-block:: bash
1957
1957
1958 id : <id_given_in_input>
1958 id : <id_given_in_input>
1959 result: {
1959 result: {
1960 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1960 "msg" : "Granted perm: `<perm>` for user: `<username>` in repo: `<reponame>`",
1961 "success": true
1961 "success": true
1962 }
1962 }
1963 error: null
1963 error: null
1964 """
1964 """
1965
1965
1966 repo = get_repo_or_error(repoid)
1966 repo = get_repo_or_error(repoid)
1967 user = get_user_or_error(userid)
1967 user = get_user_or_error(userid)
1968 perm = get_perm_or_error(perm)
1968 perm = get_perm_or_error(perm)
1969 if not has_superadmin_permission(apiuser):
1969 if not has_superadmin_permission(apiuser):
1970 _perms = ('repository.admin',)
1970 _perms = ('repository.admin',)
1971 validate_repo_permissions(apiuser, repoid, repo, _perms)
1971 validate_repo_permissions(apiuser, repoid, repo, _perms)
1972
1972
1973 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1973 perm_additions = [[user.user_id, perm.permission_name, "user"]]
1974 try:
1974 try:
1975 changes = RepoModel().update_permissions(
1975 changes = RepoModel().update_permissions(
1976 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1976 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
1977
1977
1978 action_data = {
1978 action_data = {
1979 'added': changes['added'],
1979 'added': changes['added'],
1980 'updated': changes['updated'],
1980 'updated': changes['updated'],
1981 'deleted': changes['deleted'],
1981 'deleted': changes['deleted'],
1982 }
1982 }
1983 audit_logger.store_api(
1983 audit_logger.store_api(
1984 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1984 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
1985 Session().commit()
1985 Session().commit()
1986 PermissionModel().flush_user_permission_caches(changes)
1986 PermissionModel().flush_user_permission_caches(changes)
1987
1987
1988 return {
1988 return {
1989 'msg': 'Granted perm: `{}` for user: `{}` in repo: `{}`'.format(
1989 'msg': 'Granted perm: `{}` for user: `{}` in repo: `{}`'.format(
1990 perm.permission_name, user.username, repo.repo_name
1990 perm.permission_name, user.username, repo.repo_name
1991 ),
1991 ),
1992 'success': True
1992 'success': True
1993 }
1993 }
1994 except Exception:
1994 except Exception:
1995 log.exception("Exception occurred while trying edit permissions for repo")
1995 log.exception("Exception occurred while trying edit permissions for repo")
1996 raise JSONRPCError(
1996 raise JSONRPCError(
1997 'failed to edit permission for user: `{}` in repo: `{}`'.format(
1997 'failed to edit permission for user: `{}` in repo: `{}`'.format(
1998 userid, repoid
1998 userid, repoid
1999 )
1999 )
2000 )
2000 )
2001
2001
2002
2002
2003 @jsonrpc_method()
2003 @jsonrpc_method()
2004 def revoke_user_permission(request, apiuser, repoid, userid):
2004 def revoke_user_permission(request, apiuser, repoid, userid):
2005 """
2005 """
2006 Revoke permission for a user on the specified repository.
2006 Revoke permission for a user on the specified repository.
2007
2007
2008 This command can only be run using an |authtoken| with admin
2008 This command can only be run using an |authtoken| with admin
2009 permissions on the |repo|.
2009 permissions on the |repo|.
2010
2010
2011 :param apiuser: This is filled automatically from the |authtoken|.
2011 :param apiuser: This is filled automatically from the |authtoken|.
2012 :type apiuser: AuthUser
2012 :type apiuser: AuthUser
2013 :param repoid: Set the repository name or repository ID.
2013 :param repoid: Set the repository name or repository ID.
2014 :type repoid: str or int
2014 :type repoid: str or int
2015 :param userid: Set the user name of revoked user.
2015 :param userid: Set the user name of revoked user.
2016 :type userid: str or int
2016 :type userid: str or int
2017
2017
2018 Example error output:
2018 Example error output:
2019
2019
2020 .. code-block:: bash
2020 .. code-block:: bash
2021
2021
2022 id : <id_given_in_input>
2022 id : <id_given_in_input>
2023 result: {
2023 result: {
2024 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
2024 "msg" : "Revoked perm for user: `<username>` in repo: `<reponame>`",
2025 "success": true
2025 "success": true
2026 }
2026 }
2027 error: null
2027 error: null
2028 """
2028 """
2029
2029
2030 repo = get_repo_or_error(repoid)
2030 repo = get_repo_or_error(repoid)
2031 user = get_user_or_error(userid)
2031 user = get_user_or_error(userid)
2032 if not has_superadmin_permission(apiuser):
2032 if not has_superadmin_permission(apiuser):
2033 _perms = ('repository.admin',)
2033 _perms = ('repository.admin',)
2034 validate_repo_permissions(apiuser, repoid, repo, _perms)
2034 validate_repo_permissions(apiuser, repoid, repo, _perms)
2035
2035
2036 perm_deletions = [[user.user_id, None, "user"]]
2036 perm_deletions = [[user.user_id, None, "user"]]
2037 try:
2037 try:
2038 changes = RepoModel().update_permissions(
2038 changes = RepoModel().update_permissions(
2039 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2039 repo=repo, perm_deletions=perm_deletions, cur_user=user)
2040
2040
2041 action_data = {
2041 action_data = {
2042 'added': changes['added'],
2042 'added': changes['added'],
2043 'updated': changes['updated'],
2043 'updated': changes['updated'],
2044 'deleted': changes['deleted'],
2044 'deleted': changes['deleted'],
2045 }
2045 }
2046 audit_logger.store_api(
2046 audit_logger.store_api(
2047 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2047 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2048 Session().commit()
2048 Session().commit()
2049 PermissionModel().flush_user_permission_caches(changes)
2049 PermissionModel().flush_user_permission_caches(changes)
2050
2050
2051 return {
2051 return {
2052 'msg': 'Revoked perm for user: `{}` in repo: `{}`'.format(
2052 'msg': 'Revoked perm for user: `{}` in repo: `{}`'.format(
2053 user.username, repo.repo_name
2053 user.username, repo.repo_name
2054 ),
2054 ),
2055 'success': True
2055 'success': True
2056 }
2056 }
2057 except Exception:
2057 except Exception:
2058 log.exception("Exception occurred while trying revoke permissions to repo")
2058 log.exception("Exception occurred while trying revoke permissions to repo")
2059 raise JSONRPCError(
2059 raise JSONRPCError(
2060 'failed to edit permission for user: `{}` in repo: `{}`'.format(
2060 'failed to edit permission for user: `{}` in repo: `{}`'.format(
2061 userid, repoid
2061 userid, repoid
2062 )
2062 )
2063 )
2063 )
2064
2064
2065
2065
2066 @jsonrpc_method()
2066 @jsonrpc_method()
2067 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2067 def grant_user_group_permission(request, apiuser, repoid, usergroupid, perm):
2068 """
2068 """
2069 Grant permission for a user group on the specified repository,
2069 Grant permission for a user group on the specified repository,
2070 or update existing permissions.
2070 or update existing permissions.
2071
2071
2072 This command can only be run using an |authtoken| with admin
2072 This command can only be run using an |authtoken| with admin
2073 permissions on the |repo|.
2073 permissions on the |repo|.
2074
2074
2075 :param apiuser: This is filled automatically from the |authtoken|.
2075 :param apiuser: This is filled automatically from the |authtoken|.
2076 :type apiuser: AuthUser
2076 :type apiuser: AuthUser
2077 :param repoid: Set the repository name or repository ID.
2077 :param repoid: Set the repository name or repository ID.
2078 :type repoid: str or int
2078 :type repoid: str or int
2079 :param usergroupid: Specify the ID of the user group.
2079 :param usergroupid: Specify the ID of the user group.
2080 :type usergroupid: str or int
2080 :type usergroupid: str or int
2081 :param perm: Set the user group permissions using the following
2081 :param perm: Set the user group permissions using the following
2082 format: (repository.(none|read|write|admin))
2082 format: (repository.(none|read|write|admin))
2083 :type perm: str
2083 :type perm: str
2084
2084
2085 Example output:
2085 Example output:
2086
2086
2087 .. code-block:: bash
2087 .. code-block:: bash
2088
2088
2089 id : <id_given_in_input>
2089 id : <id_given_in_input>
2090 result : {
2090 result : {
2091 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2091 "msg" : "Granted perm: `<perm>` for group: `<usersgroupname>` in repo: `<reponame>`",
2092 "success": true
2092 "success": true
2093
2093
2094 }
2094 }
2095 error : null
2095 error : null
2096
2096
2097 Example error output:
2097 Example error output:
2098
2098
2099 .. code-block:: bash
2099 .. code-block:: bash
2100
2100
2101 id : <id_given_in_input>
2101 id : <id_given_in_input>
2102 result : null
2102 result : null
2103 error : {
2103 error : {
2104 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2104 "failed to edit permission for user group: `<usergroup>` in repo `<repo>`'
2105 }
2105 }
2106
2106
2107 """
2107 """
2108
2108
2109 repo = get_repo_or_error(repoid)
2109 repo = get_repo_or_error(repoid)
2110 perm = get_perm_or_error(perm)
2110 perm = get_perm_or_error(perm)
2111 if not has_superadmin_permission(apiuser):
2111 if not has_superadmin_permission(apiuser):
2112 _perms = ('repository.admin',)
2112 _perms = ('repository.admin',)
2113 validate_repo_permissions(apiuser, repoid, repo, _perms)
2113 validate_repo_permissions(apiuser, repoid, repo, _perms)
2114
2114
2115 user_group = get_user_group_or_error(usergroupid)
2115 user_group = get_user_group_or_error(usergroupid)
2116 if not has_superadmin_permission(apiuser):
2116 if not has_superadmin_permission(apiuser):
2117 # check if we have at least read permission for this user group !
2117 # check if we have at least read permission for this user group !
2118 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2118 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2119 if not HasUserGroupPermissionAnyApi(*_perms)(
2119 if not HasUserGroupPermissionAnyApi(*_perms)(
2120 user=apiuser, user_group_name=user_group.users_group_name):
2120 user=apiuser, user_group_name=user_group.users_group_name):
2121 raise JSONRPCError(
2121 raise JSONRPCError(
2122 f'user group `{usergroupid}` does not exist')
2122 f'user group `{usergroupid}` does not exist')
2123
2123
2124 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2124 perm_additions = [[user_group.users_group_id, perm.permission_name, "user_group"]]
2125 try:
2125 try:
2126 changes = RepoModel().update_permissions(
2126 changes = RepoModel().update_permissions(
2127 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2127 repo=repo, perm_additions=perm_additions, cur_user=apiuser)
2128 action_data = {
2128 action_data = {
2129 'added': changes['added'],
2129 'added': changes['added'],
2130 'updated': changes['updated'],
2130 'updated': changes['updated'],
2131 'deleted': changes['deleted'],
2131 'deleted': changes['deleted'],
2132 }
2132 }
2133 audit_logger.store_api(
2133 audit_logger.store_api(
2134 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2134 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2135 Session().commit()
2135 Session().commit()
2136 PermissionModel().flush_user_permission_caches(changes)
2136 PermissionModel().flush_user_permission_caches(changes)
2137
2137
2138 return {
2138 return {
2139 'msg': 'Granted perm: `%s` for user group: `%s` in '
2139 'msg': 'Granted perm: `%s` for user group: `%s` in '
2140 'repo: `%s`' % (
2140 'repo: `%s`' % (
2141 perm.permission_name, user_group.users_group_name,
2141 perm.permission_name, user_group.users_group_name,
2142 repo.repo_name
2142 repo.repo_name
2143 ),
2143 ),
2144 'success': True
2144 'success': True
2145 }
2145 }
2146 except Exception:
2146 except Exception:
2147 log.exception(
2147 log.exception(
2148 "Exception occurred while trying change permission on repo")
2148 "Exception occurred while trying change permission on repo")
2149 raise JSONRPCError(
2149 raise JSONRPCError(
2150 'failed to edit permission for user group: `%s` in '
2150 'failed to edit permission for user group: `%s` in '
2151 'repo: `%s`' % (
2151 'repo: `%s`' % (
2152 usergroupid, repo.repo_name
2152 usergroupid, repo.repo_name
2153 )
2153 )
2154 )
2154 )
2155
2155
2156
2156
2157 @jsonrpc_method()
2157 @jsonrpc_method()
2158 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2158 def revoke_user_group_permission(request, apiuser, repoid, usergroupid):
2159 """
2159 """
2160 Revoke the permissions of a user group on a given repository.
2160 Revoke the permissions of a user group on a given repository.
2161
2161
2162 This command can only be run using an |authtoken| with admin
2162 This command can only be run using an |authtoken| with admin
2163 permissions on the |repo|.
2163 permissions on the |repo|.
2164
2164
2165 :param apiuser: This is filled automatically from the |authtoken|.
2165 :param apiuser: This is filled automatically from the |authtoken|.
2166 :type apiuser: AuthUser
2166 :type apiuser: AuthUser
2167 :param repoid: Set the repository name or repository ID.
2167 :param repoid: Set the repository name or repository ID.
2168 :type repoid: str or int
2168 :type repoid: str or int
2169 :param usergroupid: Specify the user group ID.
2169 :param usergroupid: Specify the user group ID.
2170 :type usergroupid: str or int
2170 :type usergroupid: str or int
2171
2171
2172 Example output:
2172 Example output:
2173
2173
2174 .. code-block:: bash
2174 .. code-block:: bash
2175
2175
2176 id : <id_given_in_input>
2176 id : <id_given_in_input>
2177 result: {
2177 result: {
2178 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2178 "msg" : "Revoked perm for group: `<usersgroupname>` in repo: `<reponame>`",
2179 "success": true
2179 "success": true
2180 }
2180 }
2181 error: null
2181 error: null
2182 """
2182 """
2183
2183
2184 repo = get_repo_or_error(repoid)
2184 repo = get_repo_or_error(repoid)
2185 if not has_superadmin_permission(apiuser):
2185 if not has_superadmin_permission(apiuser):
2186 _perms = ('repository.admin',)
2186 _perms = ('repository.admin',)
2187 validate_repo_permissions(apiuser, repoid, repo, _perms)
2187 validate_repo_permissions(apiuser, repoid, repo, _perms)
2188
2188
2189 user_group = get_user_group_or_error(usergroupid)
2189 user_group = get_user_group_or_error(usergroupid)
2190 if not has_superadmin_permission(apiuser):
2190 if not has_superadmin_permission(apiuser):
2191 # check if we have at least read permission for this user group !
2191 # check if we have at least read permission for this user group !
2192 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2192 _perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin',)
2193 if not HasUserGroupPermissionAnyApi(*_perms)(
2193 if not HasUserGroupPermissionAnyApi(*_perms)(
2194 user=apiuser, user_group_name=user_group.users_group_name):
2194 user=apiuser, user_group_name=user_group.users_group_name):
2195 raise JSONRPCError(
2195 raise JSONRPCError(
2196 f'user group `{usergroupid}` does not exist')
2196 f'user group `{usergroupid}` does not exist')
2197
2197
2198 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2198 perm_deletions = [[user_group.users_group_id, None, "user_group"]]
2199 try:
2199 try:
2200 changes = RepoModel().update_permissions(
2200 changes = RepoModel().update_permissions(
2201 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2201 repo=repo, perm_deletions=perm_deletions, cur_user=apiuser)
2202 action_data = {
2202 action_data = {
2203 'added': changes['added'],
2203 'added': changes['added'],
2204 'updated': changes['updated'],
2204 'updated': changes['updated'],
2205 'deleted': changes['deleted'],
2205 'deleted': changes['deleted'],
2206 }
2206 }
2207 audit_logger.store_api(
2207 audit_logger.store_api(
2208 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2208 'repo.edit.permissions', action_data=action_data, user=apiuser, repo=repo)
2209 Session().commit()
2209 Session().commit()
2210 PermissionModel().flush_user_permission_caches(changes)
2210 PermissionModel().flush_user_permission_caches(changes)
2211
2211
2212 return {
2212 return {
2213 'msg': 'Revoked perm for user group: `{}` in repo: `{}`'.format(
2213 'msg': 'Revoked perm for user group: `{}` in repo: `{}`'.format(
2214 user_group.users_group_name, repo.repo_name
2214 user_group.users_group_name, repo.repo_name
2215 ),
2215 ),
2216 'success': True
2216 'success': True
2217 }
2217 }
2218 except Exception:
2218 except Exception:
2219 log.exception("Exception occurred while trying revoke "
2219 log.exception("Exception occurred while trying revoke "
2220 "user group permission on repo")
2220 "user group permission on repo")
2221 raise JSONRPCError(
2221 raise JSONRPCError(
2222 'failed to edit permission for user group: `%s` in '
2222 'failed to edit permission for user group: `%s` in '
2223 'repo: `%s`' % (
2223 'repo: `%s`' % (
2224 user_group.users_group_name, repo.repo_name
2224 user_group.users_group_name, repo.repo_name
2225 )
2225 )
2226 )
2226 )
2227
2227
2228
2228
2229 @jsonrpc_method()
2229 @jsonrpc_method()
2230 def pull(request, apiuser, repoid, remote_uri=Optional(None)):
2230 def pull(request, apiuser, repoid, remote_uri=Optional(None), sync_large_objects=Optional(False)):
2231 """
2231 """
2232 Triggers a pull on the given repository from a remote location. You
2232 Triggers a pull on the given repository from a remote location. You
2233 can use this to keep remote repositories up-to-date.
2233 can use this to keep remote repositories up-to-date.
2234
2234
2235 This command can only be run using an |authtoken| with admin
2235 This command can only be run using an |authtoken| with admin
2236 rights to the specified repository. For more information,
2236 rights to the specified repository. For more information,
2237 see :ref:`config-token-ref`.
2237 see :ref:`config-token-ref`.
2238
2238
2239 This command takes the following options:
2239 This command takes the following options:
2240
2240
2241 :param apiuser: This is filled automatically from the |authtoken|.
2241 :param apiuser: This is filled automatically from the |authtoken|.
2242 :type apiuser: AuthUser
2242 :type apiuser: AuthUser
2243 :param repoid: The repository name or repository ID.
2243 :param repoid: The repository name or repository ID.
2244 :type repoid: str or int
2244 :type repoid: str or int
2245 :param remote_uri: Optional remote URI to pass in for pull
2245 :param remote_uri: Optional remote URI to pass in for pull
2246 :type remote_uri: str
2246 :type remote_uri: str
2247 :param sync_large_objects: Optional flag for pulling LFS objects.
2248 :type sync_large_objects: bool
2247
2249
2248 Example output:
2250 Example output:
2249
2251
2250 .. code-block:: bash
2252 .. code-block:: bash
2251
2253
2252 id : <id_given_in_input>
2254 id : <id_given_in_input>
2253 result : {
2255 result : {
2254 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2256 "msg": "Pulled from url `<remote_url>` on repo `<repository name>`"
2255 "repository": "<repository name>"
2257 "repository": "<repository name>"
2256 }
2258 }
2257 error : null
2259 error : null
2258
2260
2259 Example error output:
2261 Example error output:
2260
2262
2261 .. code-block:: bash
2263 .. code-block:: bash
2262
2264
2263 id : <id_given_in_input>
2265 id : <id_given_in_input>
2264 result : null
2266 result : null
2265 error : {
2267 error : {
2266 "Unable to push changes from `<remote_url>`"
2268 "Unable to push changes from `<remote_url>`"
2267 }
2269 }
2268
2270
2269 """
2271 """
2270
2272
2271 repo = get_repo_or_error(repoid)
2273 repo = get_repo_or_error(repoid)
2272 remote_uri = Optional.extract(remote_uri)
2274 remote_uri = Optional.extract(remote_uri)
2273 remote_uri_display = remote_uri or repo.clone_uri_hidden
2275 remote_uri_display = remote_uri or repo.clone_uri_hidden
2274 if not has_superadmin_permission(apiuser):
2276 if not has_superadmin_permission(apiuser):
2275 _perms = ('repository.admin',)
2277 _perms = ('repository.admin',)
2276 validate_repo_permissions(apiuser, repoid, repo, _perms)
2278 validate_repo_permissions(apiuser, repoid, repo, _perms)
2277
2279
2278 try:
2280 try:
2279 ScmModel().pull_changes(
2281 ScmModel().pull_changes(
2280 repo.repo_name, apiuser.username, remote_uri=remote_uri)
2282 repo.repo_name, apiuser.username, remote_uri=remote_uri, sync_large_objects=sync_large_objects)
2281 return {
2283 return {
2282 'msg': 'Pulled from url `{}` on repo `{}`'.format(
2284 'msg': 'Pulled from url `{}` on repo `{}`'.format(
2283 remote_uri_display, repo.repo_name),
2285 remote_uri_display, repo.repo_name),
2284 'repository': repo.repo_name
2286 'repository': repo.repo_name
2285 }
2287 }
2286 except Exception:
2288 except Exception:
2287 log.exception("Exception occurred while trying to "
2289 log.exception("Exception occurred while trying to "
2288 "pull changes from remote location")
2290 "pull changes from remote location")
2289 raise JSONRPCError(
2291 raise JSONRPCError(
2290 'Unable to pull changes from `%s`' % remote_uri_display
2292 'Unable to pull changes from `%s`' % remote_uri_display
2291 )
2293 )
2292
2294
2293
2295
2294 @jsonrpc_method()
2296 @jsonrpc_method()
2295 def strip(request, apiuser, repoid, revision, branch):
2297 def strip(request, apiuser, repoid, revision, branch):
2296 """
2298 """
2297 Strips the given revision from the specified repository.
2299 Strips the given revision from the specified repository.
2298
2300
2299 * This will remove the revision and all of its decendants.
2301 * This will remove the revision and all of its decendants.
2300
2302
2301 This command can only be run using an |authtoken| with admin rights to
2303 This command can only be run using an |authtoken| with admin rights to
2302 the specified repository.
2304 the specified repository.
2303
2305
2304 This command takes the following options:
2306 This command takes the following options:
2305
2307
2306 :param apiuser: This is filled automatically from the |authtoken|.
2308 :param apiuser: This is filled automatically from the |authtoken|.
2307 :type apiuser: AuthUser
2309 :type apiuser: AuthUser
2308 :param repoid: The repository name or repository ID.
2310 :param repoid: The repository name or repository ID.
2309 :type repoid: str or int
2311 :type repoid: str or int
2310 :param revision: The revision you wish to strip.
2312 :param revision: The revision you wish to strip.
2311 :type revision: str
2313 :type revision: str
2312 :param branch: The branch from which to strip the revision.
2314 :param branch: The branch from which to strip the revision.
2313 :type branch: str
2315 :type branch: str
2314
2316
2315 Example output:
2317 Example output:
2316
2318
2317 .. code-block:: bash
2319 .. code-block:: bash
2318
2320
2319 id : <id_given_in_input>
2321 id : <id_given_in_input>
2320 result : {
2322 result : {
2321 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2323 "msg": "'Stripped commit <commit_hash> from repo `<repository name>`'"
2322 "repository": "<repository name>"
2324 "repository": "<repository name>"
2323 }
2325 }
2324 error : null
2326 error : null
2325
2327
2326 Example error output:
2328 Example error output:
2327
2329
2328 .. code-block:: bash
2330 .. code-block:: bash
2329
2331
2330 id : <id_given_in_input>
2332 id : <id_given_in_input>
2331 result : null
2333 result : null
2332 error : {
2334 error : {
2333 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2335 "Unable to strip commit <commit_hash> from repo `<repository name>`"
2334 }
2336 }
2335
2337
2336 """
2338 """
2337
2339
2338 repo = get_repo_or_error(repoid)
2340 repo = get_repo_or_error(repoid)
2339 if not has_superadmin_permission(apiuser):
2341 if not has_superadmin_permission(apiuser):
2340 _perms = ('repository.admin',)
2342 _perms = ('repository.admin',)
2341 validate_repo_permissions(apiuser, repoid, repo, _perms)
2343 validate_repo_permissions(apiuser, repoid, repo, _perms)
2342
2344
2343 try:
2345 try:
2344 ScmModel().strip(repo, revision, branch)
2346 ScmModel().strip(repo, revision, branch)
2345 audit_logger.store_api(
2347 audit_logger.store_api(
2346 'repo.commit.strip', action_data={'commit_id': revision},
2348 'repo.commit.strip', action_data={'commit_id': revision},
2347 repo=repo,
2349 repo=repo,
2348 user=apiuser, commit=True)
2350 user=apiuser, commit=True)
2349
2351
2350 return {
2352 return {
2351 'msg': 'Stripped commit {} from repo `{}`'.format(
2353 'msg': 'Stripped commit {} from repo `{}`'.format(
2352 revision, repo.repo_name),
2354 revision, repo.repo_name),
2353 'repository': repo.repo_name
2355 'repository': repo.repo_name
2354 }
2356 }
2355 except Exception:
2357 except Exception:
2356 log.exception("Exception while trying to strip")
2358 log.exception("Exception while trying to strip")
2357 raise JSONRPCError(
2359 raise JSONRPCError(
2358 'Unable to strip commit {} from repo `{}`'.format(
2360 'Unable to strip commit {} from repo `{}`'.format(
2359 revision, repo.repo_name)
2361 revision, repo.repo_name)
2360 )
2362 )
2361
2363
2362
2364
2363 @jsonrpc_method()
2365 @jsonrpc_method()
2364 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2366 def get_repo_settings(request, apiuser, repoid, key=Optional(None)):
2365 """
2367 """
2366 Returns all settings for a repository. If key is given it only returns the
2368 Returns all settings for a repository. If key is given it only returns the
2367 setting identified by the key or null.
2369 setting identified by the key or null.
2368
2370
2369 :param apiuser: This is filled automatically from the |authtoken|.
2371 :param apiuser: This is filled automatically from the |authtoken|.
2370 :type apiuser: AuthUser
2372 :type apiuser: AuthUser
2371 :param repoid: The repository name or repository id.
2373 :param repoid: The repository name or repository id.
2372 :type repoid: str or int
2374 :type repoid: str or int
2373 :param key: Key of the setting to return.
2375 :param key: Key of the setting to return.
2374 :type: key: Optional(str)
2376 :type: key: Optional(str)
2375
2377
2376 Example output:
2378 Example output:
2377
2379
2378 .. code-block:: bash
2380 .. code-block:: bash
2379
2381
2380 {
2382 {
2381 "error": null,
2383 "error": null,
2382 "id": 237,
2384 "id": 237,
2383 "result": {
2385 "result": {
2384 "extensions_largefiles": true,
2386 "extensions_largefiles": true,
2385 "extensions_evolve": true,
2387 "extensions_evolve": true,
2386 "hooks_changegroup_push_logger": true,
2388 "hooks_changegroup_push_logger": true,
2387 "hooks_changegroup_repo_size": false,
2389 "hooks_changegroup_repo_size": false,
2388 "hooks_outgoing_pull_logger": true,
2390 "hooks_outgoing_pull_logger": true,
2389 "phases_publish": "True",
2391 "phases_publish": "True",
2390 "rhodecode_hg_use_rebase_for_merging": true,
2392 "rhodecode_hg_use_rebase_for_merging": true,
2391 "rhodecode_pr_merge_enabled": true,
2393 "rhodecode_pr_merge_enabled": true,
2392 "rhodecode_use_outdated_comments": true
2394 "rhodecode_use_outdated_comments": true
2393 }
2395 }
2394 }
2396 }
2395 """
2397 """
2396
2398
2397 # Restrict access to this api method to super-admins, and repo admins only.
2399 # Restrict access to this api method to super-admins, and repo admins only.
2398 repo = get_repo_or_error(repoid)
2400 repo = get_repo_or_error(repoid)
2399 if not has_superadmin_permission(apiuser):
2401 if not has_superadmin_permission(apiuser):
2400 _perms = ('repository.admin',)
2402 _perms = ('repository.admin',)
2401 validate_repo_permissions(apiuser, repoid, repo, _perms)
2403 validate_repo_permissions(apiuser, repoid, repo, _perms)
2402
2404
2403 try:
2405 try:
2404 settings_model = VcsSettingsModel(repo=repo)
2406 settings_model = VcsSettingsModel(repo=repo)
2405 settings = settings_model.get_global_settings()
2407 settings = settings_model.get_global_settings()
2406 settings.update(settings_model.get_repo_settings())
2408 settings.update(settings_model.get_repo_settings())
2407
2409
2408 # If only a single setting is requested fetch it from all settings.
2410 # If only a single setting is requested fetch it from all settings.
2409 key = Optional.extract(key)
2411 key = Optional.extract(key)
2410 if key is not None:
2412 if key is not None:
2411 settings = settings.get(key, None)
2413 settings = settings.get(key, None)
2412 except Exception:
2414 except Exception:
2413 msg = f'Failed to fetch settings for repository `{repoid}`'
2415 msg = f'Failed to fetch settings for repository `{repoid}`'
2414 log.exception(msg)
2416 log.exception(msg)
2415 raise JSONRPCError(msg)
2417 raise JSONRPCError(msg)
2416
2418
2417 return settings
2419 return settings
2418
2420
2419
2421
2420 @jsonrpc_method()
2422 @jsonrpc_method()
2421 def set_repo_settings(request, apiuser, repoid, settings):
2423 def set_repo_settings(request, apiuser, repoid, settings):
2422 """
2424 """
2423 Update repository settings. Returns true on success.
2425 Update repository settings. Returns true on success.
2424
2426
2425 :param apiuser: This is filled automatically from the |authtoken|.
2427 :param apiuser: This is filled automatically from the |authtoken|.
2426 :type apiuser: AuthUser
2428 :type apiuser: AuthUser
2427 :param repoid: The repository name or repository id.
2429 :param repoid: The repository name or repository id.
2428 :type repoid: str or int
2430 :type repoid: str or int
2429 :param settings: The new settings for the repository.
2431 :param settings: The new settings for the repository.
2430 :type: settings: dict
2432 :type: settings: dict
2431
2433
2432 Example output:
2434 Example output:
2433
2435
2434 .. code-block:: bash
2436 .. code-block:: bash
2435
2437
2436 {
2438 {
2437 "error": null,
2439 "error": null,
2438 "id": 237,
2440 "id": 237,
2439 "result": true
2441 "result": true
2440 }
2442 }
2441 """
2443 """
2442 # Restrict access to this api method to super-admins, and repo admins only.
2444 # Restrict access to this api method to super-admins, and repo admins only.
2443 repo = get_repo_or_error(repoid)
2445 repo = get_repo_or_error(repoid)
2444 if not has_superadmin_permission(apiuser):
2446 if not has_superadmin_permission(apiuser):
2445 _perms = ('repository.admin',)
2447 _perms = ('repository.admin',)
2446 validate_repo_permissions(apiuser, repoid, repo, _perms)
2448 validate_repo_permissions(apiuser, repoid, repo, _perms)
2447
2449
2448 if type(settings) is not dict:
2450 if type(settings) is not dict:
2449 raise JSONRPCError('Settings have to be a JSON Object.')
2451 raise JSONRPCError('Settings have to be a JSON Object.')
2450
2452
2451 try:
2453 try:
2452 settings_model = VcsSettingsModel(repo=repoid)
2454 settings_model = VcsSettingsModel(repo=repoid)
2453
2455
2454 # Merge global, repo and incoming settings.
2456 # Merge global, repo and incoming settings.
2455 new_settings = settings_model.get_global_settings()
2457 new_settings = settings_model.get_global_settings()
2456 new_settings.update(settings_model.get_repo_settings())
2458 new_settings.update(settings_model.get_repo_settings())
2457 new_settings.update(settings)
2459 new_settings.update(settings)
2458
2460
2459 # Update the settings.
2461 # Update the settings.
2460 inherit_global_settings = new_settings.get(
2462 inherit_global_settings = new_settings.get(
2461 'inherit_global_settings', False)
2463 'inherit_global_settings', False)
2462 settings_model.create_or_update_repo_settings(
2464 settings_model.create_or_update_repo_settings(
2463 new_settings, inherit_global_settings=inherit_global_settings)
2465 new_settings, inherit_global_settings=inherit_global_settings)
2464 Session().commit()
2466 Session().commit()
2465 except Exception:
2467 except Exception:
2466 msg = f'Failed to update settings for repository `{repoid}`'
2468 msg = f'Failed to update settings for repository `{repoid}`'
2467 log.exception(msg)
2469 log.exception(msg)
2468 raise JSONRPCError(msg)
2470 raise JSONRPCError(msg)
2469
2471
2470 # Indicate success.
2472 # Indicate success.
2471 return True
2473 return True
2472
2474
2473
2475
2474 @jsonrpc_method()
2476 @jsonrpc_method()
2475 def maintenance(request, apiuser, repoid):
2477 def maintenance(request, apiuser, repoid):
2476 """
2478 """
2477 Triggers a maintenance on the given repository.
2479 Triggers a maintenance on the given repository.
2478
2480
2479 This command can only be run using an |authtoken| with admin
2481 This command can only be run using an |authtoken| with admin
2480 rights to the specified repository. For more information,
2482 rights to the specified repository. For more information,
2481 see :ref:`config-token-ref`.
2483 see :ref:`config-token-ref`.
2482
2484
2483 This command takes the following options:
2485 This command takes the following options:
2484
2486
2485 :param apiuser: This is filled automatically from the |authtoken|.
2487 :param apiuser: This is filled automatically from the |authtoken|.
2486 :type apiuser: AuthUser
2488 :type apiuser: AuthUser
2487 :param repoid: The repository name or repository ID.
2489 :param repoid: The repository name or repository ID.
2488 :type repoid: str or int
2490 :type repoid: str or int
2489
2491
2490 Example output:
2492 Example output:
2491
2493
2492 .. code-block:: bash
2494 .. code-block:: bash
2493
2495
2494 id : <id_given_in_input>
2496 id : <id_given_in_input>
2495 result : {
2497 result : {
2496 "msg": "executed maintenance command",
2498 "msg": "executed maintenance command",
2497 "executed_actions": [
2499 "executed_actions": [
2498 <action_message>, <action_message2>...
2500 <action_message>, <action_message2>...
2499 ],
2501 ],
2500 "repository": "<repository name>"
2502 "repository": "<repository name>"
2501 }
2503 }
2502 error : null
2504 error : null
2503
2505
2504 Example error output:
2506 Example error output:
2505
2507
2506 .. code-block:: bash
2508 .. code-block:: bash
2507
2509
2508 id : <id_given_in_input>
2510 id : <id_given_in_input>
2509 result : null
2511 result : null
2510 error : {
2512 error : {
2511 "Unable to execute maintenance on `<reponame>`"
2513 "Unable to execute maintenance on `<reponame>`"
2512 }
2514 }
2513
2515
2514 """
2516 """
2515
2517
2516 repo = get_repo_or_error(repoid)
2518 repo = get_repo_or_error(repoid)
2517 if not has_superadmin_permission(apiuser):
2519 if not has_superadmin_permission(apiuser):
2518 _perms = ('repository.admin',)
2520 _perms = ('repository.admin',)
2519 validate_repo_permissions(apiuser, repoid, repo, _perms)
2521 validate_repo_permissions(apiuser, repoid, repo, _perms)
2520
2522
2521 try:
2523 try:
2522 maintenance = repo_maintenance.RepoMaintenance()
2524 maintenance = repo_maintenance.RepoMaintenance()
2523 executed_actions = maintenance.execute(repo)
2525 executed_actions = maintenance.execute(repo)
2524
2526
2525 return {
2527 return {
2526 'msg': 'executed maintenance command',
2528 'msg': 'executed maintenance command',
2527 'executed_actions': executed_actions,
2529 'executed_actions': executed_actions,
2528 'repository': repo.repo_name
2530 'repository': repo.repo_name
2529 }
2531 }
2530 except Exception:
2532 except Exception:
2531 log.exception("Exception occurred while trying to run maintenance")
2533 log.exception("Exception occurred while trying to run maintenance")
2532 raise JSONRPCError(
2534 raise JSONRPCError(
2533 'Unable to execute maintenance on `%s`' % repo.repo_name)
2535 'Unable to execute maintenance on `%s`' % repo.repo_name)
@@ -1,62 +1,62 b''
1 # Copyright (C) 2017-2023 RhodeCode GmbH
1 # Copyright (C) 2017-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 import logging
19 import logging
20
20
21 from pyramid.httpexceptions import HTTPFound
21 from pyramid.httpexceptions import HTTPFound
22
22
23
23
24 from rhodecode.apps._base import RepoAppView
24 from rhodecode.apps._base import RepoAppView
25 from rhodecode.lib import helpers as h
25 from rhodecode.lib import helpers as h
26 from rhodecode.lib.auth import (
26 from rhodecode.lib.auth import (
27 LoginRequired, CSRFRequired, HasRepoPermissionAnyDecorator)
27 LoginRequired, CSRFRequired, HasRepoPermissionAnyDecorator)
28 from rhodecode.model.scm import ScmModel
28 from rhodecode.model.scm import ScmModel
29
29
30 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
31
31
32
32
33 class RepoSettingsRemoteView(RepoAppView):
33 class RepoSettingsRemoteView(RepoAppView):
34 def load_default_context(self):
34 def load_default_context(self):
35 c = self._get_local_tmpl_context()
35 c = self._get_local_tmpl_context()
36 return c
36 return c
37
37
38 @LoginRequired()
38 @LoginRequired()
39 @HasRepoPermissionAnyDecorator('repository.admin')
39 @HasRepoPermissionAnyDecorator('repository.admin')
40 def repo_remote_edit_form(self):
40 def repo_remote_edit_form(self):
41 c = self.load_default_context()
41 c = self.load_default_context()
42 c.active = 'remote'
42 c.active = 'remote'
43
43
44 return self._get_template_context(c)
44 return self._get_template_context(c)
45
45
46 @LoginRequired()
46 @LoginRequired()
47 @HasRepoPermissionAnyDecorator('repository.admin')
47 @HasRepoPermissionAnyDecorator('repository.admin')
48 @CSRFRequired()
48 @CSRFRequired()
49 def repo_remote_pull_changes(self):
49 def repo_remote_pull_changes(self):
50 _ = self.request.translate
50 _ = self.request.translate
51 self.load_default_context()
51 self.load_default_context()
52
52
53 try:
53 try:
54 ScmModel().pull_changes(
54 ScmModel().pull_changes(
55 self.db_repo_name, self._rhodecode_user.username)
55 self.db_repo_name, self._rhodecode_user.username, sync_large_objects=True)
56 h.flash(_('Pulled from remote location'), category='success')
56 h.flash(_('Pulled from remote location'), category='success')
57 except Exception:
57 except Exception:
58 log.exception("Exception during pull from remote")
58 log.exception("Exception during pull from remote")
59 h.flash(_('An error occurred during pull from remote location'),
59 h.flash(_('An error occurred during pull from remote location'),
60 category='error')
60 category='error')
61 raise HTTPFound(
61 raise HTTPFound(
62 h.route_path('edit_repo_remote', repo_name=self.db_repo_name))
62 h.route_path('edit_repo_remote', repo_name=self.db_repo_name))
@@ -1,1053 +1,1053 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 GIT repository module
20 GIT repository module
21 """
21 """
22
22
23 import logging
23 import logging
24 import os
24 import os
25 import re
25 import re
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from collections import OrderedDict
29 from collections import OrderedDict
30 from rhodecode.lib.datelib import (
30 from rhodecode.lib.datelib import (
31 utcdate_fromtimestamp, makedate, date_astimestamp)
31 utcdate_fromtimestamp, makedate, date_astimestamp)
32 from rhodecode.lib.hash_utils import safe_str
32 from rhodecode.lib.hash_utils import safe_str
33 from rhodecode.lib.utils2 import CachedProperty
33 from rhodecode.lib.utils2 import CachedProperty
34 from rhodecode.lib.vcs import connection, path as vcspath
34 from rhodecode.lib.vcs import connection, path as vcspath
35 from rhodecode.lib.vcs.backends.base import (
35 from rhodecode.lib.vcs.backends.base import (
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
36 BaseRepository, CollectionGenerator, Config, MergeResponse,
37 MergeFailureReason, Reference)
37 MergeFailureReason, Reference)
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
38 from rhodecode.lib.vcs.backends.git.commit import GitCommit
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
39 from rhodecode.lib.vcs.backends.git.diff import GitDiff
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
40 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
41 from rhodecode.lib.vcs.exceptions import (
41 from rhodecode.lib.vcs.exceptions import (
42 CommitDoesNotExistError, EmptyRepositoryError,
42 CommitDoesNotExistError, EmptyRepositoryError,
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
43 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
44
44
45
45
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
46 SHA_PATTERN = re.compile(r'^([0-9a-fA-F]{12}|[0-9a-fA-F]{40})$')
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 class GitRepository(BaseRepository):
51 class GitRepository(BaseRepository):
52 """
52 """
53 Git repository backend.
53 Git repository backend.
54 """
54 """
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
55 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
56 DEFAULT_REF = f'branch:{DEFAULT_BRANCH_NAME}'
57
57
58 contact = BaseRepository.DEFAULT_CONTACT
58 contact = BaseRepository.DEFAULT_CONTACT
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62
62
63 self.path = safe_str(os.path.abspath(repo_path))
63 self.path = safe_str(os.path.abspath(repo_path))
64 self.config = config if config else self.get_default_config()
64 self.config = config if config else self.get_default_config()
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
65 self.with_wire = with_wire or {"cache": False} # default should not use cache
66
66
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
67 self._init_repo(create, src_url, do_workspace_checkout, bare)
68
68
69 # caches
69 # caches
70 self._commit_ids = {}
70 self._commit_ids = {}
71
71
72 @LazyProperty
72 @LazyProperty
73 def _remote(self):
73 def _remote(self):
74 repo_id = self.path
74 repo_id = self.path
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
75 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
76
76
77 @LazyProperty
77 @LazyProperty
78 def bare(self):
78 def bare(self):
79 return self._remote.bare()
79 return self._remote.bare()
80
80
81 @LazyProperty
81 @LazyProperty
82 def head(self):
82 def head(self):
83 return self._remote.head()
83 return self._remote.head()
84
84
85 @CachedProperty
85 @CachedProperty
86 def commit_ids(self):
86 def commit_ids(self):
87 """
87 """
88 Returns list of commit ids, in ascending order. Being lazy
88 Returns list of commit ids, in ascending order. Being lazy
89 attribute allows external tools to inject commit ids from cache.
89 attribute allows external tools to inject commit ids from cache.
90 """
90 """
91 commit_ids = self._get_all_commit_ids()
91 commit_ids = self._get_all_commit_ids()
92 self._rebuild_cache(commit_ids)
92 self._rebuild_cache(commit_ids)
93 return commit_ids
93 return commit_ids
94
94
95 def _rebuild_cache(self, commit_ids):
95 def _rebuild_cache(self, commit_ids):
96 self._commit_ids = {commit_id: index
96 self._commit_ids = {commit_id: index
97 for index, commit_id in enumerate(commit_ids)}
97 for index, commit_id in enumerate(commit_ids)}
98
98
99 def run_git_command(self, cmd, **opts):
99 def run_git_command(self, cmd, **opts):
100 """
100 """
101 Runs given ``cmd`` as git command and returns tuple
101 Runs given ``cmd`` as git command and returns tuple
102 (stdout, stderr).
102 (stdout, stderr).
103
103
104 :param cmd: git command to be executed
104 :param cmd: git command to be executed
105 :param opts: env options to pass into Subprocess command
105 :param opts: env options to pass into Subprocess command
106 """
106 """
107 if not isinstance(cmd, list):
107 if not isinstance(cmd, list):
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
108 raise ValueError(f'cmd must be a list, got {type(cmd)} instead')
109
109
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
110 skip_stderr_log = opts.pop('skip_stderr_log', False)
111 out, err = self._remote.run_git_command(cmd, **opts)
111 out, err = self._remote.run_git_command(cmd, **opts)
112 if err and not skip_stderr_log:
112 if err and not skip_stderr_log:
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
113 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
114 return out, err
114 return out, err
115
115
116 @staticmethod
116 @staticmethod
117 def check_url(url, config):
117 def check_url(url, config):
118 """
118 """
119 Function will check given url and try to verify if it's a valid
119 Function will check given url and try to verify if it's a valid
120 link. Sometimes it may happened that git will issue basic
120 link. Sometimes it may happened that git will issue basic
121 auth request that can cause whole API to hang when used from python
121 auth request that can cause whole API to hang when used from python
122 or other external calls.
122 or other external calls.
123
123
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
124 On failures it'll raise urllib2.HTTPError, exception is also thrown
125 when the return code is non 200
125 when the return code is non 200
126 """
126 """
127 # check first if it's not an url
127 # check first if it's not an url
128 if os.path.isdir(url) or url.startswith('file:'):
128 if os.path.isdir(url) or url.startswith('file:'):
129 return True
129 return True
130
130
131 if '+' in url.split('://', 1)[0]:
131 if '+' in url.split('://', 1)[0]:
132 url = url.split('+', 1)[1]
132 url = url.split('+', 1)[1]
133
133
134 # Request the _remote to verify the url
134 # Request the _remote to verify the url
135 return connection.Git.check_url(url, config.serialize())
135 return connection.Git.check_url(url, config.serialize())
136
136
137 @staticmethod
137 @staticmethod
138 def is_valid_repository(path):
138 def is_valid_repository(path):
139 if os.path.isdir(os.path.join(path, '.git')):
139 if os.path.isdir(os.path.join(path, '.git')):
140 return True
140 return True
141 # check case of bare repository
141 # check case of bare repository
142 try:
142 try:
143 GitRepository(path)
143 GitRepository(path)
144 return True
144 return True
145 except VCSError:
145 except VCSError:
146 pass
146 pass
147 return False
147 return False
148
148
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
149 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
150 bare=False):
150 bare=False):
151 if create and os.path.exists(self.path):
151 if create and os.path.exists(self.path):
152 raise RepositoryError(
152 raise RepositoryError(
153 f"Cannot create repository at {self.path}, location already exist")
153 f"Cannot create repository at {self.path}, location already exist")
154
154
155 if bare and do_workspace_checkout:
155 if bare and do_workspace_checkout:
156 raise RepositoryError("Cannot update a bare repository")
156 raise RepositoryError("Cannot update a bare repository")
157 try:
157 try:
158
158
159 if src_url:
159 if src_url:
160 # check URL before any actions
160 # check URL before any actions
161 GitRepository.check_url(src_url, self.config)
161 GitRepository.check_url(src_url, self.config)
162
162
163 if create:
163 if create:
164 if bare:
164 if bare:
165 self._remote.init_bare()
165 self._remote.init_bare()
166 else:
166 else:
167 self._remote.init()
167 self._remote.init()
168
168
169 if src_url and bare:
169 if src_url and bare:
170 # bare repository only allows a fetch and checkout is not allowed
170 # bare repository only allows a fetch and checkout is not allowed
171 self.fetch(src_url, commit_ids=None)
171 self.fetch(src_url, commit_ids=None)
172 elif src_url:
172 elif src_url:
173 self.pull(src_url, commit_ids=None,
173 self.pull(src_url, commit_ids=None,
174 update_after=do_workspace_checkout)
174 update_after=do_workspace_checkout)
175
175
176 else:
176 else:
177 if not self._remote.assert_correct_path():
177 if not self._remote.assert_correct_path():
178 raise RepositoryError(
178 raise RepositoryError(
179 f'Path "{self.path}" does not contain a Git repository')
179 f'Path "{self.path}" does not contain a Git repository')
180
180
181 # TODO: johbo: check if we have to translate the OSError here
181 # TODO: johbo: check if we have to translate the OSError here
182 except OSError as err:
182 except OSError as err:
183 raise RepositoryError(err)
183 raise RepositoryError(err)
184
184
185 def _get_all_commit_ids(self):
185 def _get_all_commit_ids(self):
186 return self._remote.get_all_commit_ids()
186 return self._remote.get_all_commit_ids()
187
187
188 def _get_commit_ids(self, filters=None):
188 def _get_commit_ids(self, filters=None):
189 # we must check if this repo is not empty, since later command
189 # we must check if this repo is not empty, since later command
190 # fails if it is. And it's cheaper to ask than throw the subprocess
190 # fails if it is. And it's cheaper to ask than throw the subprocess
191 # errors
191 # errors
192
192
193 head = self._remote.head(show_exc=False)
193 head = self._remote.head(show_exc=False)
194
194
195 if not head:
195 if not head:
196 return []
196 return []
197
197
198 rev_filter = ['--branches', '--tags']
198 rev_filter = ['--branches', '--tags']
199 extra_filter = []
199 extra_filter = []
200
200
201 if filters:
201 if filters:
202 if filters.get('since'):
202 if filters.get('since'):
203 extra_filter.append('--since=%s' % (filters['since']))
203 extra_filter.append('--since=%s' % (filters['since']))
204 if filters.get('until'):
204 if filters.get('until'):
205 extra_filter.append('--until=%s' % (filters['until']))
205 extra_filter.append('--until=%s' % (filters['until']))
206 if filters.get('branch_name'):
206 if filters.get('branch_name'):
207 rev_filter = []
207 rev_filter = []
208 extra_filter.append(filters['branch_name'])
208 extra_filter.append(filters['branch_name'])
209 rev_filter.extend(extra_filter)
209 rev_filter.extend(extra_filter)
210
210
211 # if filters.get('start') or filters.get('end'):
211 # if filters.get('start') or filters.get('end'):
212 # # skip is offset, max-count is limit
212 # # skip is offset, max-count is limit
213 # if filters.get('start'):
213 # if filters.get('start'):
214 # extra_filter += ' --skip=%s' % filters['start']
214 # extra_filter += ' --skip=%s' % filters['start']
215 # if filters.get('end'):
215 # if filters.get('end'):
216 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
216 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
217
217
218 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
218 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
219 try:
219 try:
220 output, __ = self.run_git_command(cmd)
220 output, __ = self.run_git_command(cmd)
221 except RepositoryError:
221 except RepositoryError:
222 # Can be raised for empty repositories
222 # Can be raised for empty repositories
223 return []
223 return []
224 return output.splitlines()
224 return output.splitlines()
225
225
226 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
226 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
227
227
228 def is_null(value):
228 def is_null(value):
229 return len(value) == commit_id_or_idx.count('0')
229 return len(value) == commit_id_or_idx.count('0')
230
230
231 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
231 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
232 return self.commit_ids[-1]
232 return self.commit_ids[-1]
233
233
234 commit_missing_err = "Commit {} does not exist for `{}`".format(
234 commit_missing_err = "Commit {} does not exist for `{}`".format(
235 *map(safe_str, [commit_id_or_idx, self.name]))
235 *map(safe_str, [commit_id_or_idx, self.name]))
236
236
237 is_bstr = isinstance(commit_id_or_idx, str)
237 is_bstr = isinstance(commit_id_or_idx, str)
238 is_branch = reference_obj and reference_obj.branch
238 is_branch = reference_obj and reference_obj.branch
239
239
240 lookup_ok = False
240 lookup_ok = False
241 if is_bstr:
241 if is_bstr:
242 # Need to call remote to translate id for tagging scenarios,
242 # Need to call remote to translate id for tagging scenarios,
243 # or branch that are numeric
243 # or branch that are numeric
244 try:
244 try:
245 remote_data = self._remote.get_object(commit_id_or_idx,
245 remote_data = self._remote.get_object(commit_id_or_idx,
246 maybe_unreachable=maybe_unreachable)
246 maybe_unreachable=maybe_unreachable)
247 commit_id_or_idx = remote_data["commit_id"]
247 commit_id_or_idx = remote_data["commit_id"]
248 lookup_ok = True
248 lookup_ok = True
249 except (CommitDoesNotExistError,):
249 except (CommitDoesNotExistError,):
250 lookup_ok = False
250 lookup_ok = False
251
251
252 if lookup_ok is False:
252 if lookup_ok is False:
253 is_numeric_idx = \
253 is_numeric_idx = \
254 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
254 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
255 or isinstance(commit_id_or_idx, int)
255 or isinstance(commit_id_or_idx, int)
256 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
256 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
257 try:
257 try:
258 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
258 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
259 lookup_ok = True
259 lookup_ok = True
260 except Exception:
260 except Exception:
261 raise CommitDoesNotExistError(commit_missing_err)
261 raise CommitDoesNotExistError(commit_missing_err)
262
262
263 # we failed regular lookup, and by integer number lookup
263 # we failed regular lookup, and by integer number lookup
264 if lookup_ok is False:
264 if lookup_ok is False:
265 raise CommitDoesNotExistError(commit_missing_err)
265 raise CommitDoesNotExistError(commit_missing_err)
266
266
267 # Ensure we return full id
267 # Ensure we return full id
268 if not SHA_PATTERN.match(str(commit_id_or_idx)):
268 if not SHA_PATTERN.match(str(commit_id_or_idx)):
269 raise CommitDoesNotExistError(
269 raise CommitDoesNotExistError(
270 "Given commit id %s not recognized" % commit_id_or_idx)
270 "Given commit id %s not recognized" % commit_id_or_idx)
271 return commit_id_or_idx
271 return commit_id_or_idx
272
272
273 def get_hook_location(self):
273 def get_hook_location(self):
274 """
274 """
275 returns absolute path to location where hooks are stored
275 returns absolute path to location where hooks are stored
276 """
276 """
277 loc = os.path.join(self.path, 'hooks')
277 loc = os.path.join(self.path, 'hooks')
278 if not self.bare:
278 if not self.bare:
279 loc = os.path.join(self.path, '.git', 'hooks')
279 loc = os.path.join(self.path, '.git', 'hooks')
280 return loc
280 return loc
281
281
282 @LazyProperty
282 @LazyProperty
283 def last_change(self):
283 def last_change(self):
284 """
284 """
285 Returns last change made on this repository as
285 Returns last change made on this repository as
286 `datetime.datetime` object.
286 `datetime.datetime` object.
287 """
287 """
288 try:
288 try:
289 return self.get_commit().date
289 return self.get_commit().date
290 except RepositoryError:
290 except RepositoryError:
291 tzoffset = makedate()[1]
291 tzoffset = makedate()[1]
292 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
292 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
293
293
294 def _get_fs_mtime(self):
294 def _get_fs_mtime(self):
295 idx_loc = '' if self.bare else '.git'
295 idx_loc = '' if self.bare else '.git'
296 # fallback to filesystem
296 # fallback to filesystem
297 in_path = os.path.join(self.path, idx_loc, "index")
297 in_path = os.path.join(self.path, idx_loc, "index")
298 he_path = os.path.join(self.path, idx_loc, "HEAD")
298 he_path = os.path.join(self.path, idx_loc, "HEAD")
299 if os.path.exists(in_path):
299 if os.path.exists(in_path):
300 return os.stat(in_path).st_mtime
300 return os.stat(in_path).st_mtime
301 else:
301 else:
302 return os.stat(he_path).st_mtime
302 return os.stat(he_path).st_mtime
303
303
304 @LazyProperty
304 @LazyProperty
305 def description(self):
305 def description(self):
306 description = self._remote.get_description()
306 description = self._remote.get_description()
307 return safe_str(description or self.DEFAULT_DESCRIPTION)
307 return safe_str(description or self.DEFAULT_DESCRIPTION)
308
308
309 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
309 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
310 if self.is_empty():
310 if self.is_empty():
311 return OrderedDict()
311 return OrderedDict()
312
312
313 result = []
313 result = []
314 for ref, sha in self._refs.items():
314 for ref, sha in self._refs.items():
315 if ref.startswith(prefix):
315 if ref.startswith(prefix):
316 ref_name = ref
316 ref_name = ref
317 if strip_prefix:
317 if strip_prefix:
318 ref_name = ref[len(prefix):]
318 ref_name = ref[len(prefix):]
319 result.append((safe_str(ref_name), sha))
319 result.append((safe_str(ref_name), sha))
320
320
321 def get_name(entry):
321 def get_name(entry):
322 return entry[0]
322 return entry[0]
323
323
324 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
324 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
325
325
326 def _get_branches(self):
326 def _get_branches(self):
327 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
327 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
328
328
329 @CachedProperty
329 @CachedProperty
330 def branches(self):
330 def branches(self):
331 return self._get_branches()
331 return self._get_branches()
332
332
333 @CachedProperty
333 @CachedProperty
334 def branches_closed(self):
334 def branches_closed(self):
335 return {}
335 return {}
336
336
337 @CachedProperty
337 @CachedProperty
338 def bookmarks(self):
338 def bookmarks(self):
339 return {}
339 return {}
340
340
341 @CachedProperty
341 @CachedProperty
342 def branches_all(self):
342 def branches_all(self):
343 all_branches = {}
343 all_branches = {}
344 all_branches.update(self.branches)
344 all_branches.update(self.branches)
345 all_branches.update(self.branches_closed)
345 all_branches.update(self.branches_closed)
346 return all_branches
346 return all_branches
347
347
348 @CachedProperty
348 @CachedProperty
349 def tags(self):
349 def tags(self):
350 return self._get_tags()
350 return self._get_tags()
351
351
352 def _get_tags(self):
352 def _get_tags(self):
353 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
353 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
354
354
355 def tag(self, name, user, commit_id=None, message=None, date=None,
355 def tag(self, name, user, commit_id=None, message=None, date=None,
356 **kwargs):
356 **kwargs):
357 # TODO: fix this method to apply annotated tags correct with message
357 # TODO: fix this method to apply annotated tags correct with message
358 """
358 """
359 Creates and returns a tag for the given ``commit_id``.
359 Creates and returns a tag for the given ``commit_id``.
360
360
361 :param name: name for new tag
361 :param name: name for new tag
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
362 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
363 :param commit_id: commit id for which new tag would be created
363 :param commit_id: commit id for which new tag would be created
364 :param message: message of the tag's commit
364 :param message: message of the tag's commit
365 :param date: date of tag's commit
365 :param date: date of tag's commit
366
366
367 :raises TagAlreadyExistError: if tag with same name already exists
367 :raises TagAlreadyExistError: if tag with same name already exists
368 """
368 """
369 if name in self.tags:
369 if name in self.tags:
370 raise TagAlreadyExistError("Tag %s already exists" % name)
370 raise TagAlreadyExistError("Tag %s already exists" % name)
371 commit = self.get_commit(commit_id=commit_id)
371 commit = self.get_commit(commit_id=commit_id)
372 message = message or f"Added tag {name} for commit {commit.raw_id}"
372 message = message or f"Added tag {name} for commit {commit.raw_id}"
373
373
374 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
374 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
375
375
376 self._invalidate_prop_cache('tags')
376 self._invalidate_prop_cache('tags')
377 self._invalidate_prop_cache('_refs')
377 self._invalidate_prop_cache('_refs')
378
378
379 return commit
379 return commit
380
380
381 def remove_tag(self, name, user, message=None, date=None):
381 def remove_tag(self, name, user, message=None, date=None):
382 """
382 """
383 Removes tag with the given ``name``.
383 Removes tag with the given ``name``.
384
384
385 :param name: name of the tag to be removed
385 :param name: name of the tag to be removed
386 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
386 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
387 :param message: message of the tag's removal commit
387 :param message: message of the tag's removal commit
388 :param date: date of tag's removal commit
388 :param date: date of tag's removal commit
389
389
390 :raises TagDoesNotExistError: if tag with given name does not exists
390 :raises TagDoesNotExistError: if tag with given name does not exists
391 """
391 """
392 if name not in self.tags:
392 if name not in self.tags:
393 raise TagDoesNotExistError("Tag %s does not exist" % name)
393 raise TagDoesNotExistError("Tag %s does not exist" % name)
394
394
395 self._remote.tag_remove(name)
395 self._remote.tag_remove(name)
396 self._invalidate_prop_cache('tags')
396 self._invalidate_prop_cache('tags')
397 self._invalidate_prop_cache('_refs')
397 self._invalidate_prop_cache('_refs')
398
398
399 def _get_refs(self):
399 def _get_refs(self):
400 return self._remote.get_refs()
400 return self._remote.get_refs()
401
401
402 @CachedProperty
402 @CachedProperty
403 def _refs(self):
403 def _refs(self):
404 return self._get_refs()
404 return self._get_refs()
405
405
406 @property
406 @property
407 def _ref_tree(self):
407 def _ref_tree(self):
408 node = tree = {}
408 node = tree = {}
409 for ref, sha in self._refs.items():
409 for ref, sha in self._refs.items():
410 path = ref.split('/')
410 path = ref.split('/')
411 for bit in path[:-1]:
411 for bit in path[:-1]:
412 node = node.setdefault(bit, {})
412 node = node.setdefault(bit, {})
413 node[path[-1]] = sha
413 node[path[-1]] = sha
414 node = tree
414 node = tree
415 return tree
415 return tree
416
416
417 def get_remote_ref(self, ref_name):
417 def get_remote_ref(self, ref_name):
418 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
418 ref_key = f'refs/remotes/origin/{safe_str(ref_name)}'
419 try:
419 try:
420 return self._refs[ref_key]
420 return self._refs[ref_key]
421 except Exception:
421 except Exception:
422 return
422 return
423
423
424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
424 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
425 translate_tag=True, maybe_unreachable=False, reference_obj=None):
425 translate_tag=True, maybe_unreachable=False, reference_obj=None):
426 """
426 """
427 Returns `GitCommit` object representing commit from git repository
427 Returns `GitCommit` object representing commit from git repository
428 at the given `commit_id` or head (most recent commit) if None given.
428 at the given `commit_id` or head (most recent commit) if None given.
429 """
429 """
430
430
431 if self.is_empty():
431 if self.is_empty():
432 raise EmptyRepositoryError("There are no commits yet")
432 raise EmptyRepositoryError("There are no commits yet")
433
433
434 if commit_id is not None:
434 if commit_id is not None:
435 self._validate_commit_id(commit_id)
435 self._validate_commit_id(commit_id)
436 try:
436 try:
437 # we have cached idx, use it without contacting the remote
437 # we have cached idx, use it without contacting the remote
438 idx = self._commit_ids[commit_id]
438 idx = self._commit_ids[commit_id]
439 return GitCommit(self, commit_id, idx, pre_load=pre_load)
439 return GitCommit(self, commit_id, idx, pre_load=pre_load)
440 except KeyError:
440 except KeyError:
441 pass
441 pass
442
442
443 elif commit_idx is not None:
443 elif commit_idx is not None:
444 self._validate_commit_idx(commit_idx)
444 self._validate_commit_idx(commit_idx)
445 try:
445 try:
446 _commit_id = self.commit_ids[commit_idx]
446 _commit_id = self.commit_ids[commit_idx]
447 if commit_idx < 0:
447 if commit_idx < 0:
448 commit_idx = self.commit_ids.index(_commit_id)
448 commit_idx = self.commit_ids.index(_commit_id)
449 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
449 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
450 except IndexError:
450 except IndexError:
451 commit_id = commit_idx
451 commit_id = commit_idx
452 else:
452 else:
453 commit_id = "tip"
453 commit_id = "tip"
454
454
455 if translate_tag:
455 if translate_tag:
456 commit_id = self._lookup_commit(
456 commit_id = self._lookup_commit(
457 commit_id, maybe_unreachable=maybe_unreachable,
457 commit_id, maybe_unreachable=maybe_unreachable,
458 reference_obj=reference_obj)
458 reference_obj=reference_obj)
459
459
460 try:
460 try:
461 idx = self._commit_ids[commit_id]
461 idx = self._commit_ids[commit_id]
462 except KeyError:
462 except KeyError:
463 idx = -1
463 idx = -1
464
464
465 return GitCommit(self, commit_id, idx, pre_load=pre_load)
465 return GitCommit(self, commit_id, idx, pre_load=pre_load)
466
466
467 def get_commits(
467 def get_commits(
468 self, start_id=None, end_id=None, start_date=None, end_date=None,
468 self, start_id=None, end_id=None, start_date=None, end_date=None,
469 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
469 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
470 """
470 """
471 Returns generator of `GitCommit` objects from start to end (both
471 Returns generator of `GitCommit` objects from start to end (both
472 are inclusive), in ascending date order.
472 are inclusive), in ascending date order.
473
473
474 :param start_id: None, str(commit_id)
474 :param start_id: None, str(commit_id)
475 :param end_id: None, str(commit_id)
475 :param end_id: None, str(commit_id)
476 :param start_date: if specified, commits with commit date less than
476 :param start_date: if specified, commits with commit date less than
477 ``start_date`` would be filtered out from returned set
477 ``start_date`` would be filtered out from returned set
478 :param end_date: if specified, commits with commit date greater than
478 :param end_date: if specified, commits with commit date greater than
479 ``end_date`` would be filtered out from returned set
479 ``end_date`` would be filtered out from returned set
480 :param branch_name: if specified, commits not reachable from given
480 :param branch_name: if specified, commits not reachable from given
481 branch would be filtered out from returned set
481 branch would be filtered out from returned set
482 :param show_hidden: Show hidden commits such as obsolete or hidden from
482 :param show_hidden: Show hidden commits such as obsolete or hidden from
483 Mercurial evolve
483 Mercurial evolve
484 :raise BranchDoesNotExistError: If given `branch_name` does not
484 :raise BranchDoesNotExistError: If given `branch_name` does not
485 exist.
485 exist.
486 :raise CommitDoesNotExistError: If commits for given `start` or
486 :raise CommitDoesNotExistError: If commits for given `start` or
487 `end` could not be found.
487 `end` could not be found.
488
488
489 """
489 """
490 if self.is_empty():
490 if self.is_empty():
491 raise EmptyRepositoryError("There are no commits yet")
491 raise EmptyRepositoryError("There are no commits yet")
492
492
493 self._validate_branch_name(branch_name)
493 self._validate_branch_name(branch_name)
494
494
495 if start_id is not None:
495 if start_id is not None:
496 self._validate_commit_id(start_id)
496 self._validate_commit_id(start_id)
497 if end_id is not None:
497 if end_id is not None:
498 self._validate_commit_id(end_id)
498 self._validate_commit_id(end_id)
499
499
500 start_raw_id = self._lookup_commit(start_id)
500 start_raw_id = self._lookup_commit(start_id)
501 start_pos = self._commit_ids[start_raw_id] if start_id else None
501 start_pos = self._commit_ids[start_raw_id] if start_id else None
502 end_raw_id = self._lookup_commit(end_id)
502 end_raw_id = self._lookup_commit(end_id)
503 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
503 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
504
504
505 if None not in [start_id, end_id] and start_pos > end_pos:
505 if None not in [start_id, end_id] and start_pos > end_pos:
506 raise RepositoryError(
506 raise RepositoryError(
507 "Start commit '%s' cannot be after end commit '%s'" %
507 "Start commit '%s' cannot be after end commit '%s'" %
508 (start_id, end_id))
508 (start_id, end_id))
509
509
510 if end_pos is not None:
510 if end_pos is not None:
511 end_pos += 1
511 end_pos += 1
512
512
513 filter_ = []
513 filter_ = []
514 if branch_name:
514 if branch_name:
515 filter_.append({'branch_name': branch_name})
515 filter_.append({'branch_name': branch_name})
516 if start_date and not end_date:
516 if start_date and not end_date:
517 filter_.append({'since': start_date})
517 filter_.append({'since': start_date})
518 if end_date and not start_date:
518 if end_date and not start_date:
519 filter_.append({'until': end_date})
519 filter_.append({'until': end_date})
520 if start_date and end_date:
520 if start_date and end_date:
521 filter_.append({'since': start_date})
521 filter_.append({'since': start_date})
522 filter_.append({'until': end_date})
522 filter_.append({'until': end_date})
523
523
524 # if start_pos or end_pos:
524 # if start_pos or end_pos:
525 # filter_.append({'start': start_pos})
525 # filter_.append({'start': start_pos})
526 # filter_.append({'end': end_pos})
526 # filter_.append({'end': end_pos})
527
527
528 if filter_:
528 if filter_:
529 revfilters = {
529 revfilters = {
530 'branch_name': branch_name,
530 'branch_name': branch_name,
531 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
531 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
532 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
532 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
533 'start': start_pos,
533 'start': start_pos,
534 'end': end_pos,
534 'end': end_pos,
535 }
535 }
536 commit_ids = self._get_commit_ids(filters=revfilters)
536 commit_ids = self._get_commit_ids(filters=revfilters)
537
537
538 else:
538 else:
539 commit_ids = self.commit_ids
539 commit_ids = self.commit_ids
540
540
541 if start_pos or end_pos:
541 if start_pos or end_pos:
542 commit_ids = commit_ids[start_pos: end_pos]
542 commit_ids = commit_ids[start_pos: end_pos]
543
543
544 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
544 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
545 translate_tag=translate_tags)
545 translate_tag=translate_tags)
546
546
547 def get_diff(
547 def get_diff(
548 self, commit1, commit2, path='', ignore_whitespace=False,
548 self, commit1, commit2, path='', ignore_whitespace=False,
549 context=3, path1=None):
549 context=3, path1=None):
550 """
550 """
551 Returns (git like) *diff*, as plain text. Shows changes introduced by
551 Returns (git like) *diff*, as plain text. Shows changes introduced by
552 ``commit2`` since ``commit1``.
552 ``commit2`` since ``commit1``.
553
553
554 :param commit1: Entry point from which diff is shown. Can be
554 :param commit1: Entry point from which diff is shown. Can be
555 ``self.EMPTY_COMMIT`` - in this case, patch showing all
555 ``self.EMPTY_COMMIT`` - in this case, patch showing all
556 the changes since empty state of the repository until ``commit2``
556 the changes since empty state of the repository until ``commit2``
557 :param commit2: Until which commits changes should be shown.
557 :param commit2: Until which commits changes should be shown.
558 :param path:
558 :param path:
559 :param ignore_whitespace: If set to ``True``, would not show whitespace
559 :param ignore_whitespace: If set to ``True``, would not show whitespace
560 changes. Defaults to ``False``.
560 changes. Defaults to ``False``.
561 :param context: How many lines before/after changed lines should be
561 :param context: How many lines before/after changed lines should be
562 shown. Defaults to ``3``.
562 shown. Defaults to ``3``.
563 :param path1:
563 :param path1:
564 """
564 """
565 self._validate_diff_commits(commit1, commit2)
565 self._validate_diff_commits(commit1, commit2)
566 if path1 is not None and path1 != path:
566 if path1 is not None and path1 != path:
567 raise ValueError("Diff of two different paths not supported.")
567 raise ValueError("Diff of two different paths not supported.")
568
568
569 if path:
569 if path:
570 file_filter = path
570 file_filter = path
571 else:
571 else:
572 file_filter = None
572 file_filter = None
573
573
574 diff = self._remote.diff(
574 diff = self._remote.diff(
575 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
575 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
576 opt_ignorews=ignore_whitespace,
576 opt_ignorews=ignore_whitespace,
577 context=context)
577 context=context)
578
578
579 return GitDiff(diff)
579 return GitDiff(diff)
580
580
581 def strip(self, commit_id, branch_name):
581 def strip(self, commit_id, branch_name):
582 commit = self.get_commit(commit_id=commit_id)
582 commit = self.get_commit(commit_id=commit_id)
583 if commit.merge:
583 if commit.merge:
584 raise Exception('Cannot reset to merge commit')
584 raise Exception('Cannot reset to merge commit')
585
585
586 if not branch_name:
586 if not branch_name:
587 raise ValueError(f'git strip requires a valid branch name, got {branch_name} instead')
587 raise ValueError(f'git strip requires a valid branch name, got {branch_name} instead')
588
588
589 # parent is going to be the new head now
589 # parent is going to be the new head now
590 commit = commit.parents[0]
590 commit = commit.parents[0]
591 self._remote.update_refs(f'refs/heads/{branch_name}', commit.raw_id)
591 self._remote.update_refs(f'refs/heads/{branch_name}', commit.raw_id)
592
592
593 # clear cached properties
593 # clear cached properties
594 self._invalidate_prop_cache('commit_ids')
594 self._invalidate_prop_cache('commit_ids')
595 self._invalidate_prop_cache('_refs')
595 self._invalidate_prop_cache('_refs')
596 self._invalidate_prop_cache('branches')
596 self._invalidate_prop_cache('branches')
597
597
598 return len(self.commit_ids)
598 return len(self.commit_ids)
599
599
600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
602 self, commit_id1, repo2, commit_id2)
602 self, commit_id1, repo2, commit_id2)
603
603
604 if commit_id1 == commit_id2:
604 if commit_id1 == commit_id2:
605 return commit_id1
605 return commit_id1
606
606
607 if self != repo2:
607 if self != repo2:
608 commits = self._remote.get_missing_revs(
608 commits = self._remote.get_missing_revs(
609 commit_id1, commit_id2, repo2.path)
609 commit_id1, commit_id2, repo2.path)
610 if commits:
610 if commits:
611 commit = repo2.get_commit(commits[-1])
611 commit = repo2.get_commit(commits[-1])
612 if commit.parents:
612 if commit.parents:
613 ancestor_id = commit.parents[0].raw_id
613 ancestor_id = commit.parents[0].raw_id
614 else:
614 else:
615 ancestor_id = None
615 ancestor_id = None
616 else:
616 else:
617 # no commits from other repo, ancestor_id is the commit_id2
617 # no commits from other repo, ancestor_id is the commit_id2
618 ancestor_id = commit_id2
618 ancestor_id = commit_id2
619 else:
619 else:
620 output, __ = self.run_git_command(
620 output, __ = self.run_git_command(
621 ['merge-base', commit_id1, commit_id2])
621 ['merge-base', commit_id1, commit_id2])
622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
623
623
624 log.debug('Found common ancestor with sha: %s', ancestor_id)
624 log.debug('Found common ancestor with sha: %s', ancestor_id)
625
625
626 return ancestor_id
626 return ancestor_id
627
627
628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
629 repo1 = self
629 repo1 = self
630 ancestor_id = None
630 ancestor_id = None
631
631
632 if commit_id1 == commit_id2:
632 if commit_id1 == commit_id2:
633 commits = []
633 commits = []
634 elif repo1 != repo2:
634 elif repo1 != repo2:
635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
636 repo2.path)
636 repo2.path)
637 commits = [
637 commits = [
638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
639 for commit_id in reversed(missing_ids)]
639 for commit_id in reversed(missing_ids)]
640 else:
640 else:
641 output, __ = repo1.run_git_command(
641 output, __ = repo1.run_git_command(
642 ['log', '--reverse', '--pretty=format: %H', '-s',
642 ['log', '--reverse', '--pretty=format: %H', '-s',
643 f'{commit_id1}..{commit_id2}'])
643 f'{commit_id1}..{commit_id2}'])
644 commits = [
644 commits = [
645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
647
647
648 return commits
648 return commits
649
649
650 @LazyProperty
650 @LazyProperty
651 def in_memory_commit(self):
651 def in_memory_commit(self):
652 """
652 """
653 Returns ``GitInMemoryCommit`` object for this repository.
653 Returns ``GitInMemoryCommit`` object for this repository.
654 """
654 """
655 return GitInMemoryCommit(self)
655 return GitInMemoryCommit(self)
656
656
657 def pull(self, url, commit_ids=None, update_after=False):
657 def pull(self, url, commit_ids=None, update_after=False):
658 """
658 """
659 Pull changes from external location. Pull is different in GIT
659 Pull changes from external location. Pull is different in GIT
660 that fetch since it's doing a checkout
660 that fetch since it's doing a checkout
661
661
662 :param commit_ids: Optional. Can be set to a list of commit ids
662 :param commit_ids: Optional. Can be set to a list of commit ids
663 which shall be pulled from the other repository.
663 which shall be pulled from the other repository.
664 """
664 """
665 refs = None
665 refs = None
666 if commit_ids is not None:
666 if commit_ids is not None:
667 remote_refs = self._remote.get_remote_refs(url)
667 remote_refs = self._remote.get_remote_refs(url)
668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
669 self._remote.pull(url, refs=refs, update_after=update_after)
669 self._remote.pull(url, refs=refs, update_after=update_after)
670 self._remote.invalidate_vcs_cache()
670 self._remote.invalidate_vcs_cache()
671
671
672 def fetch(self, url, commit_ids=None):
672 def fetch(self, url, commit_ids=None, **kwargs):
673 """
673 """
674 Fetch all git objects from external location.
674 Fetch all git objects from external location.
675 """
675 """
676 self._remote.sync_fetch(url, refs=commit_ids)
676 self._remote.sync_fetch(url, refs=commit_ids, **kwargs)
677 self._remote.invalidate_vcs_cache()
677 self._remote.invalidate_vcs_cache()
678
678
679 def push(self, url):
679 def push(self, url):
680 refs = None
680 refs = None
681 self._remote.sync_push(url, refs=refs)
681 self._remote.sync_push(url, refs=refs)
682
682
683 def set_refs(self, ref_name, commit_id):
683 def set_refs(self, ref_name, commit_id):
684 self._remote.set_refs(ref_name, commit_id)
684 self._remote.set_refs(ref_name, commit_id)
685 self._invalidate_prop_cache('_refs')
685 self._invalidate_prop_cache('_refs')
686
686
687 def remove_ref(self, ref_name):
687 def remove_ref(self, ref_name):
688 self._remote.remove_ref(ref_name)
688 self._remote.remove_ref(ref_name)
689 self._invalidate_prop_cache('_refs')
689 self._invalidate_prop_cache('_refs')
690
690
691 def run_gc(self, prune=True):
691 def run_gc(self, prune=True):
692 cmd = ['gc', '--aggressive']
692 cmd = ['gc', '--aggressive']
693 if prune:
693 if prune:
694 cmd += ['--prune=now']
694 cmd += ['--prune=now']
695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
696 return stderr
696 return stderr
697
697
698 def _update_server_info(self):
698 def _update_server_info(self):
699 """
699 """
700 runs gits update-server-info command in this repo instance
700 runs gits update-server-info command in this repo instance
701 """
701 """
702 self._remote.update_server_info()
702 self._remote.update_server_info()
703
703
704 def _current_branch(self):
704 def _current_branch(self):
705 """
705 """
706 Return the name of the current branch.
706 Return the name of the current branch.
707
707
708 It only works for non bare repositories (i.e. repositories with a
708 It only works for non bare repositories (i.e. repositories with a
709 working copy)
709 working copy)
710 """
710 """
711 if self.bare:
711 if self.bare:
712 raise RepositoryError('Bare git repos do not have active branches')
712 raise RepositoryError('Bare git repos do not have active branches')
713
713
714 if self.is_empty():
714 if self.is_empty():
715 return None
715 return None
716
716
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
718 return stdout.strip()
718 return stdout.strip()
719
719
720 def _checkout(self, branch_name, create=False, force=False):
720 def _checkout(self, branch_name, create=False, force=False):
721 """
721 """
722 Checkout a branch in the working directory.
722 Checkout a branch in the working directory.
723
723
724 It tries to create the branch if create is True, failing if the branch
724 It tries to create the branch if create is True, failing if the branch
725 already exists.
725 already exists.
726
726
727 It only works for non bare repositories (i.e. repositories with a
727 It only works for non bare repositories (i.e. repositories with a
728 working copy)
728 working copy)
729 """
729 """
730 if self.bare:
730 if self.bare:
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
731 raise RepositoryError('Cannot checkout branches in a bare git repo')
732
732
733 cmd = ['checkout']
733 cmd = ['checkout']
734 if force:
734 if force:
735 cmd.append('-f')
735 cmd.append('-f')
736 if create:
736 if create:
737 cmd.append('-b')
737 cmd.append('-b')
738 cmd.append(branch_name)
738 cmd.append(branch_name)
739 self.run_git_command(cmd, fail_on_stderr=False)
739 self.run_git_command(cmd, fail_on_stderr=False)
740
740
741 def _create_branch(self, branch_name, commit_id):
741 def _create_branch(self, branch_name, commit_id):
742 """
742 """
743 creates a branch in a GIT repo
743 creates a branch in a GIT repo
744 """
744 """
745 self._remote.create_branch(branch_name, commit_id)
745 self._remote.create_branch(branch_name, commit_id)
746
746
747 def _identify(self):
747 def _identify(self):
748 """
748 """
749 Return the current state of the working directory.
749 Return the current state of the working directory.
750 """
750 """
751 if self.bare:
751 if self.bare:
752 raise RepositoryError('Bare git repos do not have active branches')
752 raise RepositoryError('Bare git repos do not have active branches')
753
753
754 if self.is_empty():
754 if self.is_empty():
755 return None
755 return None
756
756
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
758 return stdout.strip()
758 return stdout.strip()
759
759
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
760 def _local_clone(self, clone_path, branch_name, source_branch=None):
761 """
761 """
762 Create a local clone of the current repo.
762 Create a local clone of the current repo.
763 """
763 """
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
764 # N.B.(skreft): the --branch option is required as otherwise the shallow
765 # clone will only fetch the active branch.
765 # clone will only fetch the active branch.
766 cmd = ['clone', '--branch', branch_name,
766 cmd = ['clone', '--branch', branch_name,
767 self.path, os.path.abspath(clone_path)]
767 self.path, os.path.abspath(clone_path)]
768
768
769 self.run_git_command(cmd, fail_on_stderr=False)
769 self.run_git_command(cmd, fail_on_stderr=False)
770
770
771 # if we get the different source branch, make sure we also fetch it for
771 # if we get the different source branch, make sure we also fetch it for
772 # merge conditions
772 # merge conditions
773 if source_branch and source_branch != branch_name:
773 if source_branch and source_branch != branch_name:
774 # check if the ref exists.
774 # check if the ref exists.
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
775 shadow_repo = GitRepository(os.path.abspath(clone_path))
776 if shadow_repo.get_remote_ref(source_branch):
776 if shadow_repo.get_remote_ref(source_branch):
777 cmd = ['fetch', self.path, source_branch]
777 cmd = ['fetch', self.path, source_branch]
778 self.run_git_command(cmd, fail_on_stderr=False)
778 self.run_git_command(cmd, fail_on_stderr=False)
779
779
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
781 """
781 """
782 Fetch a branch from a local repository.
782 Fetch a branch from a local repository.
783 """
783 """
784 repository_path = os.path.abspath(repository_path)
784 repository_path = os.path.abspath(repository_path)
785 if repository_path == self.path:
785 if repository_path == self.path:
786 raise ValueError('Cannot fetch from the same repository')
786 raise ValueError('Cannot fetch from the same repository')
787
787
788 if use_origin:
788 if use_origin:
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
789 branch_name = '+{branch}:refs/heads/{branch}'.format(
790 branch=branch_name)
790 branch=branch_name)
791
791
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
792 cmd = ['fetch', '--no-tags', '--update-head-ok',
793 repository_path, branch_name]
793 repository_path, branch_name]
794 self.run_git_command(cmd, fail_on_stderr=False)
794 self.run_git_command(cmd, fail_on_stderr=False)
795
795
796 def _local_reset(self, branch_name):
796 def _local_reset(self, branch_name):
797 branch_name = f'{branch_name}'
797 branch_name = f'{branch_name}'
798 cmd = ['reset', '--hard', branch_name, '--']
798 cmd = ['reset', '--hard', branch_name, '--']
799 self.run_git_command(cmd, fail_on_stderr=False)
799 self.run_git_command(cmd, fail_on_stderr=False)
800
800
801 def _last_fetch_heads(self):
801 def _last_fetch_heads(self):
802 """
802 """
803 Return the last fetched heads that need merging.
803 Return the last fetched heads that need merging.
804
804
805 The algorithm is defined at
805 The algorithm is defined at
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
807 """
807 """
808 if not self.bare:
808 if not self.bare:
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
810 else:
810 else:
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
812
812
813 heads = []
813 heads = []
814 with open(fetch_heads_path) as f:
814 with open(fetch_heads_path) as f:
815 for line in f:
815 for line in f:
816 if ' not-for-merge ' in line:
816 if ' not-for-merge ' in line:
817 continue
817 continue
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
819 heads.append(line)
819 heads.append(line)
820
820
821 return heads
821 return heads
822
822
823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
825
825
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
826 def _local_pull(self, repository_path, branch_name, ff_only=True):
827 """
827 """
828 Pull a branch from a local repository.
828 Pull a branch from a local repository.
829 """
829 """
830 if self.bare:
830 if self.bare:
831 raise RepositoryError('Cannot pull into a bare git repository')
831 raise RepositoryError('Cannot pull into a bare git repository')
832 # N.B.(skreft): The --ff-only option is to make sure this is a
832 # N.B.(skreft): The --ff-only option is to make sure this is a
833 # fast-forward (i.e., we are only pulling new changes and there are no
833 # fast-forward (i.e., we are only pulling new changes and there are no
834 # conflicts with our current branch)
834 # conflicts with our current branch)
835 # Additionally, that option needs to go before --no-tags, otherwise git
835 # Additionally, that option needs to go before --no-tags, otherwise git
836 # pull complains about it being an unknown flag.
836 # pull complains about it being an unknown flag.
837 cmd = ['pull']
837 cmd = ['pull']
838 if ff_only:
838 if ff_only:
839 cmd.append('--ff-only')
839 cmd.append('--ff-only')
840 cmd.extend(['--no-tags', repository_path, branch_name])
840 cmd.extend(['--no-tags', repository_path, branch_name])
841 self.run_git_command(cmd, fail_on_stderr=False)
841 self.run_git_command(cmd, fail_on_stderr=False)
842
842
843 def _local_merge(self, merge_message, user_name, user_email, heads):
843 def _local_merge(self, merge_message, user_name, user_email, heads):
844 """
844 """
845 Merge the given head into the checked out branch.
845 Merge the given head into the checked out branch.
846
846
847 It will force a merge commit.
847 It will force a merge commit.
848
848
849 Currently it raises an error if the repo is empty, as it is not possible
849 Currently it raises an error if the repo is empty, as it is not possible
850 to create a merge commit in an empty repo.
850 to create a merge commit in an empty repo.
851
851
852 :param merge_message: The message to use for the merge commit.
852 :param merge_message: The message to use for the merge commit.
853 :param heads: the heads to merge.
853 :param heads: the heads to merge.
854 """
854 """
855 if self.bare:
855 if self.bare:
856 raise RepositoryError('Cannot merge into a bare git repository')
856 raise RepositoryError('Cannot merge into a bare git repository')
857
857
858 if not heads:
858 if not heads:
859 return
859 return
860
860
861 if self.is_empty():
861 if self.is_empty():
862 # TODO(skreft): do something more robust in this case.
862 # TODO(skreft): do something more robust in this case.
863 raise RepositoryError('Do not know how to merge into empty repositories yet')
863 raise RepositoryError('Do not know how to merge into empty repositories yet')
864 unresolved = None
864 unresolved = None
865
865
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
867 # commit message. We also specify the user who is doing the merge.
867 # commit message. We also specify the user who is doing the merge.
868 cmd = ['-c', f'user.name="{user_name}"',
868 cmd = ['-c', f'user.name="{user_name}"',
869 '-c', f'user.email={user_email}',
869 '-c', f'user.email={user_email}',
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
870 'merge', '--no-ff', '-m', safe_str(merge_message)]
871
871
872 merge_cmd = cmd + heads
872 merge_cmd = cmd + heads
873
873
874 try:
874 try:
875 self.run_git_command(merge_cmd, fail_on_stderr=False)
875 self.run_git_command(merge_cmd, fail_on_stderr=False)
876 except RepositoryError:
876 except RepositoryError:
877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
878 fail_on_stderr=False)[0].splitlines()
878 fail_on_stderr=False)[0].splitlines()
879 # NOTE(marcink): we add U notation for consistent with HG backend output
879 # NOTE(marcink): we add U notation for consistent with HG backend output
880 unresolved = [f'U {f}' for f in files]
880 unresolved = [f'U {f}' for f in files]
881
881
882 # Cleanup any merge leftovers
882 # Cleanup any merge leftovers
883 self._remote.invalidate_vcs_cache()
883 self._remote.invalidate_vcs_cache()
884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
885
885
886 if unresolved:
886 if unresolved:
887 raise UnresolvedFilesInRepo(unresolved)
887 raise UnresolvedFilesInRepo(unresolved)
888 else:
888 else:
889 raise
889 raise
890
890
891 def _local_push(
891 def _local_push(
892 self, source_branch, repository_path, target_branch,
892 self, source_branch, repository_path, target_branch,
893 enable_hooks=False, rc_scm_data=None):
893 enable_hooks=False, rc_scm_data=None):
894 """
894 """
895 Push the source_branch to the given repository and target_branch.
895 Push the source_branch to the given repository and target_branch.
896
896
897 Currently it if the target_branch is not master and the target repo is
897 Currently it if the target_branch is not master and the target repo is
898 empty, the push will work, but then GitRepository won't be able to find
898 empty, the push will work, but then GitRepository won't be able to find
899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
900 pointing to master, which does not exist).
900 pointing to master, which does not exist).
901
901
902 It does not run the hooks in the target repo.
902 It does not run the hooks in the target repo.
903 """
903 """
904 # TODO(skreft): deal with the case in which the target repo is empty,
904 # TODO(skreft): deal with the case in which the target repo is empty,
905 # and the target_branch is not master.
905 # and the target_branch is not master.
906 target_repo = GitRepository(repository_path)
906 target_repo = GitRepository(repository_path)
907 if (not target_repo.bare and
907 if (not target_repo.bare and
908 target_repo._current_branch() == target_branch):
908 target_repo._current_branch() == target_branch):
909 # Git prevents pushing to the checked out branch, so simulate it by
909 # Git prevents pushing to the checked out branch, so simulate it by
910 # pulling into the target repository.
910 # pulling into the target repository.
911 target_repo._local_pull(self.path, source_branch)
911 target_repo._local_pull(self.path, source_branch)
912 else:
912 else:
913 cmd = ['push', os.path.abspath(repository_path),
913 cmd = ['push', os.path.abspath(repository_path),
914 f'{source_branch}:{target_branch}']
914 f'{source_branch}:{target_branch}']
915 gitenv = {}
915 gitenv = {}
916 if rc_scm_data:
916 if rc_scm_data:
917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
918
918
919 if not enable_hooks:
919 if not enable_hooks:
920 gitenv['RC_SKIP_HOOKS'] = '1'
920 gitenv['RC_SKIP_HOOKS'] = '1'
921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
922
922
923 def _get_new_pr_branch(self, source_branch, target_branch):
923 def _get_new_pr_branch(self, source_branch, target_branch):
924 prefix = f'pr_{source_branch}-{target_branch}_'
924 prefix = f'pr_{source_branch}-{target_branch}_'
925 pr_branches = []
925 pr_branches = []
926 for branch in self.branches:
926 for branch in self.branches:
927 if branch.startswith(prefix):
927 if branch.startswith(prefix):
928 pr_branches.append(int(branch[len(prefix):]))
928 pr_branches.append(int(branch[len(prefix):]))
929
929
930 if not pr_branches:
930 if not pr_branches:
931 branch_id = 0
931 branch_id = 0
932 else:
932 else:
933 branch_id = max(pr_branches) + 1
933 branch_id = max(pr_branches) + 1
934
934
935 return '%s%d' % (prefix, branch_id)
935 return '%s%d' % (prefix, branch_id)
936
936
937 def _maybe_prepare_merge_workspace(
937 def _maybe_prepare_merge_workspace(
938 self, repo_id, workspace_id, target_ref, source_ref):
938 self, repo_id, workspace_id, target_ref, source_ref):
939 shadow_repository_path = self._get_shadow_repository_path(
939 shadow_repository_path = self._get_shadow_repository_path(
940 self.path, repo_id, workspace_id)
940 self.path, repo_id, workspace_id)
941 if not os.path.exists(shadow_repository_path):
941 if not os.path.exists(shadow_repository_path):
942 self._local_clone(
942 self._local_clone(
943 shadow_repository_path, target_ref.name, source_ref.name)
943 shadow_repository_path, target_ref.name, source_ref.name)
944 log.debug('Prepared %s shadow repository in %s',
944 log.debug('Prepared %s shadow repository in %s',
945 self.alias, shadow_repository_path)
945 self.alias, shadow_repository_path)
946
946
947 return shadow_repository_path
947 return shadow_repository_path
948
948
949 def _merge_repo(self, repo_id, workspace_id, target_ref,
949 def _merge_repo(self, repo_id, workspace_id, target_ref,
950 source_repo, source_ref, merge_message,
950 source_repo, source_ref, merge_message,
951 merger_name, merger_email, dry_run=False,
951 merger_name, merger_email, dry_run=False,
952 use_rebase=False, close_branch=False):
952 use_rebase=False, close_branch=False):
953
953
954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
955 'rebase' if use_rebase else 'merge', dry_run)
955 'rebase' if use_rebase else 'merge', dry_run)
956
956
957 if target_ref.commit_id != self.branches[target_ref.name]:
957 if target_ref.commit_id != self.branches[target_ref.name]:
958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
958 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
959 target_ref.commit_id, self.branches[target_ref.name])
959 target_ref.commit_id, self.branches[target_ref.name])
960 return MergeResponse(
960 return MergeResponse(
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
961 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
962 metadata={'target_ref': target_ref})
962 metadata={'target_ref': target_ref})
963
963
964 shadow_repository_path = self._maybe_prepare_merge_workspace(
964 shadow_repository_path = self._maybe_prepare_merge_workspace(
965 repo_id, workspace_id, target_ref, source_ref)
965 repo_id, workspace_id, target_ref, source_ref)
966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
966 shadow_repo = self.get_shadow_instance(shadow_repository_path)
967
967
968 # checkout source, if it's different. Otherwise we could not
968 # checkout source, if it's different. Otherwise we could not
969 # fetch proper commits for merge testing
969 # fetch proper commits for merge testing
970 if source_ref.name != target_ref.name:
970 if source_ref.name != target_ref.name:
971 if shadow_repo.get_remote_ref(source_ref.name):
971 if shadow_repo.get_remote_ref(source_ref.name):
972 shadow_repo._checkout(source_ref.name, force=True)
972 shadow_repo._checkout(source_ref.name, force=True)
973
973
974 # checkout target, and fetch changes
974 # checkout target, and fetch changes
975 shadow_repo._checkout(target_ref.name, force=True)
975 shadow_repo._checkout(target_ref.name, force=True)
976
976
977 # fetch/reset pull the target, in case it is changed
977 # fetch/reset pull the target, in case it is changed
978 # this handles even force changes
978 # this handles even force changes
979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
979 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
980 shadow_repo._local_reset(target_ref.name)
980 shadow_repo._local_reset(target_ref.name)
981
981
982 # Need to reload repo to invalidate the cache, or otherwise we cannot
982 # Need to reload repo to invalidate the cache, or otherwise we cannot
983 # retrieve the last target commit.
983 # retrieve the last target commit.
984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
984 shadow_repo = self.get_shadow_instance(shadow_repository_path)
985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
985 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
986 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
987 target_ref, target_ref.commit_id,
987 target_ref, target_ref.commit_id,
988 shadow_repo.branches[target_ref.name])
988 shadow_repo.branches[target_ref.name])
989 return MergeResponse(
989 return MergeResponse(
990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
990 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
991 metadata={'target_ref': target_ref})
991 metadata={'target_ref': target_ref})
992
992
993 # calculate new branch
993 # calculate new branch
994 pr_branch = shadow_repo._get_new_pr_branch(
994 pr_branch = shadow_repo._get_new_pr_branch(
995 source_ref.name, target_ref.name)
995 source_ref.name, target_ref.name)
996 log.debug('using pull-request merge branch: `%s`', pr_branch)
996 log.debug('using pull-request merge branch: `%s`', pr_branch)
997 # checkout to temp branch, and fetch changes
997 # checkout to temp branch, and fetch changes
998 shadow_repo._checkout(pr_branch, create=True)
998 shadow_repo._checkout(pr_branch, create=True)
999 try:
999 try:
1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1000 shadow_repo._local_fetch(source_repo.path, source_ref.name)
1001 except RepositoryError:
1001 except RepositoryError:
1002 log.exception('Failure when doing local fetch on '
1002 log.exception('Failure when doing local fetch on '
1003 'shadow repo: %s', shadow_repo)
1003 'shadow repo: %s', shadow_repo)
1004 return MergeResponse(
1004 return MergeResponse(
1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1005 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1006 metadata={'source_ref': source_ref})
1006 metadata={'source_ref': source_ref})
1007
1007
1008 merge_ref = None
1008 merge_ref = None
1009 merge_failure_reason = MergeFailureReason.NONE
1009 merge_failure_reason = MergeFailureReason.NONE
1010 metadata = {}
1010 metadata = {}
1011 try:
1011 try:
1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1012 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1013 [source_ref.commit_id])
1013 [source_ref.commit_id])
1014 merge_possible = True
1014 merge_possible = True
1015
1015
1016 # Need to invalidate the cache, or otherwise we
1016 # Need to invalidate the cache, or otherwise we
1017 # cannot retrieve the merge commit.
1017 # cannot retrieve the merge commit.
1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1018 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1019 merge_commit_id = shadow_repo.branches[pr_branch]
1019 merge_commit_id = shadow_repo.branches[pr_branch]
1020
1020
1021 # Set a reference pointing to the merge commit. This reference may
1021 # Set a reference pointing to the merge commit. This reference may
1022 # be used to easily identify the last successful merge commit in
1022 # be used to easily identify the last successful merge commit in
1023 # the shadow repository.
1023 # the shadow repository.
1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1024 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1025 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1026 except RepositoryError as e:
1026 except RepositoryError as e:
1027 log.exception('Failure when doing local merge on git shadow repo')
1027 log.exception('Failure when doing local merge on git shadow repo')
1028 if isinstance(e, UnresolvedFilesInRepo):
1028 if isinstance(e, UnresolvedFilesInRepo):
1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1029 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1030
1030
1031 merge_possible = False
1031 merge_possible = False
1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1032 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1033
1033
1034 if merge_possible and not dry_run:
1034 if merge_possible and not dry_run:
1035 try:
1035 try:
1036 shadow_repo._local_push(
1036 shadow_repo._local_push(
1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1037 pr_branch, self.path, target_ref.name, enable_hooks=True,
1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1038 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1039 merge_succeeded = True
1039 merge_succeeded = True
1040 except RepositoryError:
1040 except RepositoryError:
1041 log.exception(
1041 log.exception(
1042 'Failure when doing local push from the shadow '
1042 'Failure when doing local push from the shadow '
1043 'repository to the target repository at %s.', self.path)
1043 'repository to the target repository at %s.', self.path)
1044 merge_succeeded = False
1044 merge_succeeded = False
1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1045 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1046 metadata['target'] = 'git shadow repo'
1046 metadata['target'] = 'git shadow repo'
1047 metadata['merge_commit'] = pr_branch
1047 metadata['merge_commit'] = pr_branch
1048 else:
1048 else:
1049 merge_succeeded = False
1049 merge_succeeded = False
1050
1050
1051 return MergeResponse(
1051 return MergeResponse(
1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1052 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1053 metadata=metadata)
1053 metadata=metadata)
@@ -1,1024 +1,1024 b''
1 # Copyright (C) 2014-2023 RhodeCode GmbH
1 # Copyright (C) 2014-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 HG repository module
20 HG repository module
21 """
21 """
22 import os
22 import os
23 import logging
23 import logging
24 import binascii
24 import binascii
25 import configparser
25 import configparser
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from collections import OrderedDict
32 from collections import OrderedDict
33 from rhodecode.lib.datelib import (
33 from rhodecode.lib.datelib import (
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
34 date_to_timestamp_plus_offset, utcdate_fromtimestamp, makedate)
35 from rhodecode.lib.str_utils import safe_str
35 from rhodecode.lib.str_utils import safe_str
36 from rhodecode.lib.utils2 import CachedProperty
36 from rhodecode.lib.utils2 import CachedProperty
37 from rhodecode.lib.vcs import connection, exceptions
37 from rhodecode.lib.vcs import connection, exceptions
38 from rhodecode.lib.vcs.backends.base import (
38 from rhodecode.lib.vcs.backends.base import (
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 BaseRepository, CollectionGenerator, Config, MergeResponse,
40 MergeFailureReason, Reference, BasePathPermissionChecker)
40 MergeFailureReason, Reference, BasePathPermissionChecker)
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
41 from rhodecode.lib.vcs.backends.hg.commit import MercurialCommit
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
42 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
43 from rhodecode.lib.vcs.backends.hg.inmemory import MercurialInMemoryCommit
44 from rhodecode.lib.vcs.exceptions import (
44 from rhodecode.lib.vcs.exceptions import (
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
45 EmptyRepositoryError, RepositoryError, TagAlreadyExistError,
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
46 TagDoesNotExistError, CommitDoesNotExistError, SubrepoMergeError, UnresolvedFilesInRepo)
47
47
48 hexlify = binascii.hexlify
48 hexlify = binascii.hexlify
49 nullid = "\0" * 20
49 nullid = "\0" * 20
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class MercurialRepository(BaseRepository):
54 class MercurialRepository(BaseRepository):
55 """
55 """
56 Mercurial repository backend
56 Mercurial repository backend
57 """
57 """
58 DEFAULT_BRANCH_NAME = 'default'
58 DEFAULT_BRANCH_NAME = 'default'
59
59
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
60 def __init__(self, repo_path, config=None, create=False, src_url=None,
61 do_workspace_checkout=False, with_wire=None, bare=False):
61 do_workspace_checkout=False, with_wire=None, bare=False):
62 """
62 """
63 Raises RepositoryError if repository could not be find at the given
63 Raises RepositoryError if repository could not be find at the given
64 ``repo_path``.
64 ``repo_path``.
65
65
66 :param repo_path: local path of the repository
66 :param repo_path: local path of the repository
67 :param config: config object containing the repo configuration
67 :param config: config object containing the repo configuration
68 :param create=False: if set to True, would try to create repository if
68 :param create=False: if set to True, would try to create repository if
69 it does not exist rather than raising exception
69 it does not exist rather than raising exception
70 :param src_url=None: would try to clone repository from given location
70 :param src_url=None: would try to clone repository from given location
71 :param do_workspace_checkout=False: sets update of working copy after
71 :param do_workspace_checkout=False: sets update of working copy after
72 making a clone
72 making a clone
73 :param bare: not used, compatible with other VCS
73 :param bare: not used, compatible with other VCS
74 """
74 """
75
75
76 self.path = safe_str(os.path.abspath(repo_path))
76 self.path = safe_str(os.path.abspath(repo_path))
77 # mercurial since 4.4.X requires certain configuration to be present
77 # mercurial since 4.4.X requires certain configuration to be present
78 # because sometimes we init the repos with config we need to meet
78 # because sometimes we init the repos with config we need to meet
79 # special requirements
79 # special requirements
80 self.config = config if config else self.get_default_config(
80 self.config = config if config else self.get_default_config(
81 default=[('extensions', 'largefiles', '')])
81 default=[('extensions', 'largefiles', '')])
82
82
83 # NOTE(marcink): since python3 hgsubversion is deprecated.
83 # NOTE(marcink): since python3 hgsubversion is deprecated.
84 # From old installations we might still have this set enabled
84 # From old installations we might still have this set enabled
85 # we explicitly remove this now here to make sure it wont propagate further
85 # we explicitly remove this now here to make sure it wont propagate further
86 if config and config.get('extensions', 'hgsubversion') is not None:
86 if config and config.get('extensions', 'hgsubversion') is not None:
87 config.drop_option('extensions', 'hgsubversion')
87 config.drop_option('extensions', 'hgsubversion')
88
88
89 self.with_wire = with_wire or {"cache": False} # default should not use cache
89 self.with_wire = with_wire or {"cache": False} # default should not use cache
90
90
91 self._init_repo(create, src_url, do_workspace_checkout)
91 self._init_repo(create, src_url, do_workspace_checkout)
92
92
93 # caches
93 # caches
94 self._commit_ids = {}
94 self._commit_ids = {}
95
95
96 @LazyProperty
96 @LazyProperty
97 def _remote(self):
97 def _remote(self):
98 repo_id = self.path
98 repo_id = self.path
99 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
99 return connection.Hg(self.path, repo_id, self.config, with_wire=self.with_wire)
100
100
101 @CachedProperty
101 @CachedProperty
102 def commit_ids(self):
102 def commit_ids(self):
103 """
103 """
104 Returns list of commit ids, in ascending order. Being lazy
104 Returns list of commit ids, in ascending order. Being lazy
105 attribute allows external tools to inject shas from cache.
105 attribute allows external tools to inject shas from cache.
106 """
106 """
107 commit_ids = self._get_all_commit_ids()
107 commit_ids = self._get_all_commit_ids()
108 self._rebuild_cache(commit_ids)
108 self._rebuild_cache(commit_ids)
109 return commit_ids
109 return commit_ids
110
110
111 def _rebuild_cache(self, commit_ids):
111 def _rebuild_cache(self, commit_ids):
112 self._commit_ids = {commit_id: index
112 self._commit_ids = {commit_id: index
113 for index, commit_id in enumerate(commit_ids)}
113 for index, commit_id in enumerate(commit_ids)}
114
114
115 @CachedProperty
115 @CachedProperty
116 def branches(self):
116 def branches(self):
117 return self._get_branches()
117 return self._get_branches()
118
118
119 @CachedProperty
119 @CachedProperty
120 def branches_closed(self):
120 def branches_closed(self):
121 return self._get_branches(active=False, closed=True)
121 return self._get_branches(active=False, closed=True)
122
122
123 @CachedProperty
123 @CachedProperty
124 def branches_all(self):
124 def branches_all(self):
125 all_branches = {}
125 all_branches = {}
126 all_branches.update(self.branches)
126 all_branches.update(self.branches)
127 all_branches.update(self.branches_closed)
127 all_branches.update(self.branches_closed)
128 return all_branches
128 return all_branches
129
129
130 def _get_branches(self, active=True, closed=False):
130 def _get_branches(self, active=True, closed=False):
131 """
131 """
132 Gets branches for this repository
132 Gets branches for this repository
133 Returns only not closed active branches by default
133 Returns only not closed active branches by default
134
134
135 :param active: return also active branches
135 :param active: return also active branches
136 :param closed: return also closed branches
136 :param closed: return also closed branches
137
137
138 """
138 """
139 if self.is_empty():
139 if self.is_empty():
140 return {}
140 return {}
141
141
142 def get_name(ctx):
142 def get_name(ctx):
143 return ctx[0]
143 return ctx[0]
144
144
145 _branches = [(n, h,) for n, h in
145 _branches = [(n, h,) for n, h in
146 self._remote.branches(active, closed).items()]
146 self._remote.branches(active, closed).items()]
147
147
148 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
148 return OrderedDict(sorted(_branches, key=get_name, reverse=False))
149
149
150 @CachedProperty
150 @CachedProperty
151 def tags(self):
151 def tags(self):
152 """
152 """
153 Gets tags for this repository
153 Gets tags for this repository
154 """
154 """
155 return self._get_tags()
155 return self._get_tags()
156
156
157 def _get_tags(self):
157 def _get_tags(self):
158 if self.is_empty():
158 if self.is_empty():
159 return {}
159 return {}
160
160
161 def get_name(ctx):
161 def get_name(ctx):
162 return ctx[0]
162 return ctx[0]
163
163
164 _tags = [(n, h,) for n, h in
164 _tags = [(n, h,) for n, h in
165 self._remote.tags().items()]
165 self._remote.tags().items()]
166
166
167 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
167 return OrderedDict(sorted(_tags, key=get_name, reverse=True))
168
168
169 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
169 def tag(self, name, user, commit_id=None, message=None, date=None, **kwargs):
170 """
170 """
171 Creates and returns a tag for the given ``commit_id``.
171 Creates and returns a tag for the given ``commit_id``.
172
172
173 :param name: name for new tag
173 :param name: name for new tag
174 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
174 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
175 :param commit_id: commit id for which new tag would be created
175 :param commit_id: commit id for which new tag would be created
176 :param message: message of the tag's commit
176 :param message: message of the tag's commit
177 :param date: date of tag's commit
177 :param date: date of tag's commit
178
178
179 :raises TagAlreadyExistError: if tag with same name already exists
179 :raises TagAlreadyExistError: if tag with same name already exists
180 """
180 """
181 if name in self.tags:
181 if name in self.tags:
182 raise TagAlreadyExistError("Tag %s already exists" % name)
182 raise TagAlreadyExistError("Tag %s already exists" % name)
183
183
184 commit = self.get_commit(commit_id=commit_id)
184 commit = self.get_commit(commit_id=commit_id)
185 local = kwargs.setdefault('local', False)
185 local = kwargs.setdefault('local', False)
186
186
187 if message is None:
187 if message is None:
188 message = f"Added tag {name} for commit {commit.short_id}"
188 message = f"Added tag {name} for commit {commit.short_id}"
189
189
190 date, tz = date_to_timestamp_plus_offset(date)
190 date, tz = date_to_timestamp_plus_offset(date)
191
191
192 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
192 self._remote.tag(name, commit.raw_id, message, local, user, date, tz)
193 self._remote.invalidate_vcs_cache()
193 self._remote.invalidate_vcs_cache()
194
194
195 # Reinitialize tags
195 # Reinitialize tags
196 self._invalidate_prop_cache('tags')
196 self._invalidate_prop_cache('tags')
197 tag_id = self.tags[name]
197 tag_id = self.tags[name]
198
198
199 return self.get_commit(commit_id=tag_id)
199 return self.get_commit(commit_id=tag_id)
200
200
201 def remove_tag(self, name, user, message=None, date=None):
201 def remove_tag(self, name, user, message=None, date=None):
202 """
202 """
203 Removes tag with the given `name`.
203 Removes tag with the given `name`.
204
204
205 :param name: name of the tag to be removed
205 :param name: name of the tag to be removed
206 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
206 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
207 :param message: message of the tag's removal commit
207 :param message: message of the tag's removal commit
208 :param date: date of tag's removal commit
208 :param date: date of tag's removal commit
209
209
210 :raises TagDoesNotExistError: if tag with given name does not exists
210 :raises TagDoesNotExistError: if tag with given name does not exists
211 """
211 """
212 if name not in self.tags:
212 if name not in self.tags:
213 raise TagDoesNotExistError("Tag %s does not exist" % name)
213 raise TagDoesNotExistError("Tag %s does not exist" % name)
214
214
215 if message is None:
215 if message is None:
216 message = "Removed tag %s" % name
216 message = "Removed tag %s" % name
217 local = False
217 local = False
218
218
219 date, tz = date_to_timestamp_plus_offset(date)
219 date, tz = date_to_timestamp_plus_offset(date)
220
220
221 self._remote.tag(name, nullid, message, local, user, date, tz)
221 self._remote.tag(name, nullid, message, local, user, date, tz)
222 self._remote.invalidate_vcs_cache()
222 self._remote.invalidate_vcs_cache()
223 self._invalidate_prop_cache('tags')
223 self._invalidate_prop_cache('tags')
224
224
225 @LazyProperty
225 @LazyProperty
226 def bookmarks(self):
226 def bookmarks(self):
227 """
227 """
228 Gets bookmarks for this repository
228 Gets bookmarks for this repository
229 """
229 """
230 return self._get_bookmarks()
230 return self._get_bookmarks()
231
231
232 def _get_bookmarks(self):
232 def _get_bookmarks(self):
233 if self.is_empty():
233 if self.is_empty():
234 return {}
234 return {}
235
235
236 def get_name(ctx):
236 def get_name(ctx):
237 return ctx[0]
237 return ctx[0]
238
238
239 _bookmarks = [
239 _bookmarks = [
240 (n, h) for n, h in
240 (n, h) for n, h in
241 self._remote.bookmarks().items()]
241 self._remote.bookmarks().items()]
242
242
243 return OrderedDict(sorted(_bookmarks, key=get_name))
243 return OrderedDict(sorted(_bookmarks, key=get_name))
244
244
245 def _get_all_commit_ids(self):
245 def _get_all_commit_ids(self):
246 return self._remote.get_all_commit_ids('visible')
246 return self._remote.get_all_commit_ids('visible')
247
247
248 def get_diff(
248 def get_diff(
249 self, commit1, commit2, path='', ignore_whitespace=False,
249 self, commit1, commit2, path='', ignore_whitespace=False,
250 context=3, path1=None):
250 context=3, path1=None):
251 """
251 """
252 Returns (git like) *diff*, as plain text. Shows changes introduced by
252 Returns (git like) *diff*, as plain text. Shows changes introduced by
253 `commit2` since `commit1`.
253 `commit2` since `commit1`.
254
254
255 :param commit1: Entry point from which diff is shown. Can be
255 :param commit1: Entry point from which diff is shown. Can be
256 ``self.EMPTY_COMMIT`` - in this case, patch showing all
256 ``self.EMPTY_COMMIT`` - in this case, patch showing all
257 the changes since empty state of the repository until `commit2`
257 the changes since empty state of the repository until `commit2`
258 :param commit2: Until which commit changes should be shown.
258 :param commit2: Until which commit changes should be shown.
259 :param ignore_whitespace: If set to ``True``, would not show whitespace
259 :param ignore_whitespace: If set to ``True``, would not show whitespace
260 changes. Defaults to ``False``.
260 changes. Defaults to ``False``.
261 :param context: How many lines before/after changed lines should be
261 :param context: How many lines before/after changed lines should be
262 shown. Defaults to ``3``.
262 shown. Defaults to ``3``.
263 """
263 """
264 self._validate_diff_commits(commit1, commit2)
264 self._validate_diff_commits(commit1, commit2)
265 if path1 is not None and path1 != path:
265 if path1 is not None and path1 != path:
266 raise ValueError("Diff of two different paths not supported.")
266 raise ValueError("Diff of two different paths not supported.")
267
267
268 if path:
268 if path:
269 file_filter = [self.path, path]
269 file_filter = [self.path, path]
270 else:
270 else:
271 file_filter = None
271 file_filter = None
272
272
273 diff = self._remote.diff(
273 diff = self._remote.diff(
274 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
274 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
275 opt_git=True, opt_ignorews=ignore_whitespace,
275 opt_git=True, opt_ignorews=ignore_whitespace,
276 context=context)
276 context=context)
277 return MercurialDiff(diff)
277 return MercurialDiff(diff)
278
278
279 def strip(self, commit_id, branch=None):
279 def strip(self, commit_id, branch=None):
280 self._remote.strip(commit_id, update=False, backup=False)
280 self._remote.strip(commit_id, update=False, backup=False)
281
281
282 self._remote.invalidate_vcs_cache()
282 self._remote.invalidate_vcs_cache()
283 # clear cache
283 # clear cache
284 self._invalidate_prop_cache('commit_ids')
284 self._invalidate_prop_cache('commit_ids')
285
285
286 return len(self.commit_ids)
286 return len(self.commit_ids)
287
287
288 def verify(self):
288 def verify(self):
289 verify = self._remote.verify()
289 verify = self._remote.verify()
290
290
291 self._remote.invalidate_vcs_cache()
291 self._remote.invalidate_vcs_cache()
292 return verify
292 return verify
293
293
294 def hg_update_cache(self):
294 def hg_update_cache(self):
295 update_cache = self._remote.hg_update_cache()
295 update_cache = self._remote.hg_update_cache()
296
296
297 self._remote.invalidate_vcs_cache()
297 self._remote.invalidate_vcs_cache()
298 return update_cache
298 return update_cache
299
299
300 def hg_rebuild_fn_cache(self):
300 def hg_rebuild_fn_cache(self):
301 update_cache = self._remote.hg_rebuild_fn_cache()
301 update_cache = self._remote.hg_rebuild_fn_cache()
302
302
303 self._remote.invalidate_vcs_cache()
303 self._remote.invalidate_vcs_cache()
304 return update_cache
304 return update_cache
305
305
306 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
306 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
307 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
307 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
308 self, commit_id1, repo2, commit_id2)
308 self, commit_id1, repo2, commit_id2)
309
309
310 if commit_id1 == commit_id2:
310 if commit_id1 == commit_id2:
311 return commit_id1
311 return commit_id1
312
312
313 ancestors = self._remote.revs_from_revspec(
313 ancestors = self._remote.revs_from_revspec(
314 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
314 "ancestor(id(%s), id(%s))", commit_id1, commit_id2,
315 other_path=repo2.path)
315 other_path=repo2.path)
316
316
317 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
317 ancestor_id = repo2[ancestors[0]].raw_id if ancestors else None
318
318
319 log.debug('Found common ancestor with sha: %s', ancestor_id)
319 log.debug('Found common ancestor with sha: %s', ancestor_id)
320 return ancestor_id
320 return ancestor_id
321
321
322 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
322 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
323 if commit_id1 == commit_id2:
323 if commit_id1 == commit_id2:
324 commits = []
324 commits = []
325 else:
325 else:
326 if merge:
326 if merge:
327 indexes = self._remote.revs_from_revspec(
327 indexes = self._remote.revs_from_revspec(
328 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
328 "ancestors(id(%s)) - ancestors(id(%s)) - id(%s)",
329 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
329 commit_id2, commit_id1, commit_id1, other_path=repo2.path)
330 else:
330 else:
331 indexes = self._remote.revs_from_revspec(
331 indexes = self._remote.revs_from_revspec(
332 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
332 "id(%s)..id(%s) - id(%s)", commit_id1, commit_id2,
333 commit_id1, other_path=repo2.path)
333 commit_id1, other_path=repo2.path)
334
334
335 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
335 commits = [repo2.get_commit(commit_idx=idx, pre_load=pre_load)
336 for idx in indexes]
336 for idx in indexes]
337
337
338 return commits
338 return commits
339
339
340 @staticmethod
340 @staticmethod
341 def check_url(url, config):
341 def check_url(url, config):
342 """
342 """
343 Function will check given url and try to verify if it's a valid
343 Function will check given url and try to verify if it's a valid
344 link. Sometimes it may happened that mercurial will issue basic
344 link. Sometimes it may happened that mercurial will issue basic
345 auth request that can cause whole API to hang when used from python
345 auth request that can cause whole API to hang when used from python
346 or other external calls.
346 or other external calls.
347
347
348 On failures it'll raise urllib2.HTTPError, exception is also thrown
348 On failures it'll raise urllib2.HTTPError, exception is also thrown
349 when the return code is non 200
349 when the return code is non 200
350 """
350 """
351 # check first if it's not an local url
351 # check first if it's not an local url
352 if os.path.isdir(url) or url.startswith('file:'):
352 if os.path.isdir(url) or url.startswith('file:'):
353 return True
353 return True
354
354
355 # Request the _remote to verify the url
355 # Request the _remote to verify the url
356 return connection.Hg.check_url(url, config.serialize())
356 return connection.Hg.check_url(url, config.serialize())
357
357
358 @staticmethod
358 @staticmethod
359 def is_valid_repository(path):
359 def is_valid_repository(path):
360 return os.path.isdir(os.path.join(path, '.hg'))
360 return os.path.isdir(os.path.join(path, '.hg'))
361
361
362 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
362 def _init_repo(self, create, src_url=None, do_workspace_checkout=False):
363 """
363 """
364 Function will check for mercurial repository in given path. If there
364 Function will check for mercurial repository in given path. If there
365 is no repository in that path it will raise an exception unless
365 is no repository in that path it will raise an exception unless
366 `create` parameter is set to True - in that case repository would
366 `create` parameter is set to True - in that case repository would
367 be created.
367 be created.
368
368
369 If `src_url` is given, would try to clone repository from the
369 If `src_url` is given, would try to clone repository from the
370 location at given clone_point. Additionally it'll make update to
370 location at given clone_point. Additionally it'll make update to
371 working copy accordingly to `do_workspace_checkout` flag.
371 working copy accordingly to `do_workspace_checkout` flag.
372 """
372 """
373 if create and os.path.exists(self.path):
373 if create and os.path.exists(self.path):
374 raise RepositoryError(
374 raise RepositoryError(
375 f"Cannot create repository at {self.path}, location already exist")
375 f"Cannot create repository at {self.path}, location already exist")
376
376
377 if src_url:
377 if src_url:
378 url = str(self._get_url(src_url))
378 url = str(self._get_url(src_url))
379 MercurialRepository.check_url(url, self.config)
379 MercurialRepository.check_url(url, self.config)
380
380
381 self._remote.clone(url, self.path, do_workspace_checkout)
381 self._remote.clone(url, self.path, do_workspace_checkout)
382
382
383 # Don't try to create if we've already cloned repo
383 # Don't try to create if we've already cloned repo
384 create = False
384 create = False
385
385
386 if create:
386 if create:
387 os.makedirs(self.path, mode=0o755)
387 os.makedirs(self.path, mode=0o755)
388
388
389 self._remote.localrepository(create)
389 self._remote.localrepository(create)
390
390
391 @LazyProperty
391 @LazyProperty
392 def in_memory_commit(self):
392 def in_memory_commit(self):
393 return MercurialInMemoryCommit(self)
393 return MercurialInMemoryCommit(self)
394
394
395 @LazyProperty
395 @LazyProperty
396 def description(self):
396 def description(self):
397 description = self._remote.get_config_value(
397 description = self._remote.get_config_value(
398 'web', 'description', untrusted=True)
398 'web', 'description', untrusted=True)
399 return safe_str(description or self.DEFAULT_DESCRIPTION)
399 return safe_str(description or self.DEFAULT_DESCRIPTION)
400
400
401 @LazyProperty
401 @LazyProperty
402 def contact(self):
402 def contact(self):
403 contact = (
403 contact = (
404 self._remote.get_config_value("web", "contact") or
404 self._remote.get_config_value("web", "contact") or
405 self._remote.get_config_value("ui", "username"))
405 self._remote.get_config_value("ui", "username"))
406 return safe_str(contact or self.DEFAULT_CONTACT)
406 return safe_str(contact or self.DEFAULT_CONTACT)
407
407
408 @LazyProperty
408 @LazyProperty
409 def last_change(self):
409 def last_change(self):
410 """
410 """
411 Returns last change made on this repository as
411 Returns last change made on this repository as
412 `datetime.datetime` object.
412 `datetime.datetime` object.
413 """
413 """
414 try:
414 try:
415 return self.get_commit().date
415 return self.get_commit().date
416 except RepositoryError:
416 except RepositoryError:
417 tzoffset = makedate()[1]
417 tzoffset = makedate()[1]
418 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
418 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
419
419
420 def _get_fs_mtime(self):
420 def _get_fs_mtime(self):
421 # fallback to filesystem
421 # fallback to filesystem
422 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
422 cl_path = os.path.join(self.path, '.hg', "00changelog.i")
423 st_path = os.path.join(self.path, '.hg', "store")
423 st_path = os.path.join(self.path, '.hg', "store")
424 if os.path.exists(cl_path):
424 if os.path.exists(cl_path):
425 return os.stat(cl_path).st_mtime
425 return os.stat(cl_path).st_mtime
426 else:
426 else:
427 return os.stat(st_path).st_mtime
427 return os.stat(st_path).st_mtime
428
428
429 def _get_url(self, url):
429 def _get_url(self, url):
430 """
430 """
431 Returns normalized url. If schema is not given, would fall
431 Returns normalized url. If schema is not given, would fall
432 to filesystem
432 to filesystem
433 (``file:///``) schema.
433 (``file:///``) schema.
434 """
434 """
435 if url != 'default' and '://' not in url:
435 if url != 'default' and '://' not in url:
436 url = "file:" + urllib.request.pathname2url(url)
436 url = "file:" + urllib.request.pathname2url(url)
437 return url
437 return url
438
438
439 def get_hook_location(self):
439 def get_hook_location(self):
440 """
440 """
441 returns absolute path to location where hooks are stored
441 returns absolute path to location where hooks are stored
442 """
442 """
443 return os.path.join(self.path, '.hg', '.hgrc')
443 return os.path.join(self.path, '.hg', '.hgrc')
444
444
445 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
445 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
446 translate_tag=None, maybe_unreachable=False, reference_obj=None):
446 translate_tag=None, maybe_unreachable=False, reference_obj=None):
447 """
447 """
448 Returns ``MercurialCommit`` object representing repository's
448 Returns ``MercurialCommit`` object representing repository's
449 commit at the given `commit_id` or `commit_idx`.
449 commit at the given `commit_id` or `commit_idx`.
450 """
450 """
451 if self.is_empty():
451 if self.is_empty():
452 raise EmptyRepositoryError("There are no commits yet")
452 raise EmptyRepositoryError("There are no commits yet")
453
453
454 if commit_id is not None:
454 if commit_id is not None:
455 self._validate_commit_id(commit_id)
455 self._validate_commit_id(commit_id)
456 try:
456 try:
457 # we have cached idx, use it without contacting the remote
457 # we have cached idx, use it without contacting the remote
458 idx = self._commit_ids[commit_id]
458 idx = self._commit_ids[commit_id]
459 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
459 return MercurialCommit(self, commit_id, idx, pre_load=pre_load)
460 except KeyError:
460 except KeyError:
461 pass
461 pass
462
462
463 elif commit_idx is not None:
463 elif commit_idx is not None:
464 self._validate_commit_idx(commit_idx)
464 self._validate_commit_idx(commit_idx)
465 try:
465 try:
466 _commit_id = self.commit_ids[commit_idx]
466 _commit_id = self.commit_ids[commit_idx]
467 if commit_idx < 0:
467 if commit_idx < 0:
468 commit_idx = self.commit_ids.index(_commit_id)
468 commit_idx = self.commit_ids.index(_commit_id)
469
469
470 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
470 return MercurialCommit(self, _commit_id, commit_idx, pre_load=pre_load)
471 except IndexError:
471 except IndexError:
472 commit_id = commit_idx
472 commit_id = commit_idx
473 else:
473 else:
474 commit_id = "tip"
474 commit_id = "tip"
475
475
476 # case here is no cached version, do an actual lookup instead
476 # case here is no cached version, do an actual lookup instead
477 try:
477 try:
478 raw_id, idx = self._remote.lookup(commit_id, both=True)
478 raw_id, idx = self._remote.lookup(commit_id, both=True)
479 except CommitDoesNotExistError:
479 except CommitDoesNotExistError:
480 msg = "Commit {} does not exist for `{}`".format(
480 msg = "Commit {} does not exist for `{}`".format(
481 *map(safe_str, [commit_id, self.name]))
481 *map(safe_str, [commit_id, self.name]))
482 raise CommitDoesNotExistError(msg)
482 raise CommitDoesNotExistError(msg)
483
483
484 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
484 return MercurialCommit(self, raw_id, idx, pre_load=pre_load)
485
485
486 def get_commits(
486 def get_commits(
487 self, start_id=None, end_id=None, start_date=None, end_date=None,
487 self, start_id=None, end_id=None, start_date=None, end_date=None,
488 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
488 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
489 """
489 """
490 Returns generator of ``MercurialCommit`` objects from start to end
490 Returns generator of ``MercurialCommit`` objects from start to end
491 (both are inclusive)
491 (both are inclusive)
492
492
493 :param start_id: None, str(commit_id)
493 :param start_id: None, str(commit_id)
494 :param end_id: None, str(commit_id)
494 :param end_id: None, str(commit_id)
495 :param start_date: if specified, commits with commit date less than
495 :param start_date: if specified, commits with commit date less than
496 ``start_date`` would be filtered out from returned set
496 ``start_date`` would be filtered out from returned set
497 :param end_date: if specified, commits with commit date greater than
497 :param end_date: if specified, commits with commit date greater than
498 ``end_date`` would be filtered out from returned set
498 ``end_date`` would be filtered out from returned set
499 :param branch_name: if specified, commits not reachable from given
499 :param branch_name: if specified, commits not reachable from given
500 branch would be filtered out from returned set
500 branch would be filtered out from returned set
501 :param show_hidden: Show hidden commits such as obsolete or hidden from
501 :param show_hidden: Show hidden commits such as obsolete or hidden from
502 Mercurial evolve
502 Mercurial evolve
503 :raise BranchDoesNotExistError: If given ``branch_name`` does not
503 :raise BranchDoesNotExistError: If given ``branch_name`` does not
504 exist.
504 exist.
505 :raise CommitDoesNotExistError: If commit for given ``start`` or
505 :raise CommitDoesNotExistError: If commit for given ``start`` or
506 ``end`` could not be found.
506 ``end`` could not be found.
507 """
507 """
508 # actually we should check now if it's not an empty repo
508 # actually we should check now if it's not an empty repo
509 if self.is_empty():
509 if self.is_empty():
510 raise EmptyRepositoryError("There are no commits yet")
510 raise EmptyRepositoryError("There are no commits yet")
511 self._validate_branch_name(branch_name)
511 self._validate_branch_name(branch_name)
512
512
513 branch_ancestors = False
513 branch_ancestors = False
514 if start_id is not None:
514 if start_id is not None:
515 self._validate_commit_id(start_id)
515 self._validate_commit_id(start_id)
516 c_start = self.get_commit(commit_id=start_id)
516 c_start = self.get_commit(commit_id=start_id)
517 start_pos = self._commit_ids[c_start.raw_id]
517 start_pos = self._commit_ids[c_start.raw_id]
518 else:
518 else:
519 start_pos = None
519 start_pos = None
520
520
521 if end_id is not None:
521 if end_id is not None:
522 self._validate_commit_id(end_id)
522 self._validate_commit_id(end_id)
523 c_end = self.get_commit(commit_id=end_id)
523 c_end = self.get_commit(commit_id=end_id)
524 end_pos = max(0, self._commit_ids[c_end.raw_id])
524 end_pos = max(0, self._commit_ids[c_end.raw_id])
525 else:
525 else:
526 end_pos = None
526 end_pos = None
527
527
528 if None not in [start_id, end_id] and start_pos > end_pos:
528 if None not in [start_id, end_id] and start_pos > end_pos:
529 raise RepositoryError(
529 raise RepositoryError(
530 "Start commit '%s' cannot be after end commit '%s'" %
530 "Start commit '%s' cannot be after end commit '%s'" %
531 (start_id, end_id))
531 (start_id, end_id))
532
532
533 if end_pos is not None:
533 if end_pos is not None:
534 end_pos += 1
534 end_pos += 1
535
535
536 commit_filter = []
536 commit_filter = []
537
537
538 if branch_name and not branch_ancestors:
538 if branch_name and not branch_ancestors:
539 commit_filter.append(f'branch("{branch_name}")')
539 commit_filter.append(f'branch("{branch_name}")')
540 elif branch_name and branch_ancestors:
540 elif branch_name and branch_ancestors:
541 commit_filter.append(f'ancestors(branch("{branch_name}"))')
541 commit_filter.append(f'ancestors(branch("{branch_name}"))')
542
542
543 if start_date and not end_date:
543 if start_date and not end_date:
544 commit_filter.append(f'date(">{start_date}")')
544 commit_filter.append(f'date(">{start_date}")')
545 if end_date and not start_date:
545 if end_date and not start_date:
546 commit_filter.append(f'date("<{end_date}")')
546 commit_filter.append(f'date("<{end_date}")')
547 if start_date and end_date:
547 if start_date and end_date:
548 commit_filter.append(
548 commit_filter.append(
549 f'date(">{start_date}") and date("<{end_date}")')
549 f'date(">{start_date}") and date("<{end_date}")')
550
550
551 if not show_hidden:
551 if not show_hidden:
552 commit_filter.append('not obsolete()')
552 commit_filter.append('not obsolete()')
553 commit_filter.append('not hidden()')
553 commit_filter.append('not hidden()')
554
554
555 # TODO: johbo: Figure out a simpler way for this solution
555 # TODO: johbo: Figure out a simpler way for this solution
556 collection_generator = CollectionGenerator
556 collection_generator = CollectionGenerator
557 if commit_filter:
557 if commit_filter:
558 commit_filter = ' and '.join(map(safe_str, commit_filter))
558 commit_filter = ' and '.join(map(safe_str, commit_filter))
559 revisions = self._remote.rev_range([commit_filter])
559 revisions = self._remote.rev_range([commit_filter])
560 collection_generator = MercurialIndexBasedCollectionGenerator
560 collection_generator = MercurialIndexBasedCollectionGenerator
561 else:
561 else:
562 revisions = self.commit_ids
562 revisions = self.commit_ids
563
563
564 if start_pos or end_pos:
564 if start_pos or end_pos:
565 revisions = revisions[start_pos:end_pos]
565 revisions = revisions[start_pos:end_pos]
566
566
567 return collection_generator(self, revisions, pre_load=pre_load)
567 return collection_generator(self, revisions, pre_load=pre_load)
568
568
569 def pull(self, url, commit_ids=None):
569 def pull(self, url, commit_ids=None):
570 """
570 """
571 Pull changes from external location.
571 Pull changes from external location.
572
572
573 :param commit_ids: Optional. Can be set to a list of commit ids
573 :param commit_ids: Optional. Can be set to a list of commit ids
574 which shall be pulled from the other repository.
574 which shall be pulled from the other repository.
575 """
575 """
576 url = self._get_url(url)
576 url = self._get_url(url)
577 self._remote.pull(url, commit_ids=commit_ids)
577 self._remote.pull(url, commit_ids=commit_ids)
578 self._remote.invalidate_vcs_cache()
578 self._remote.invalidate_vcs_cache()
579
579
580 def fetch(self, url, commit_ids=None):
580 def fetch(self, url, commit_ids=None, **kwargs):
581 """
581 """
582 Backward compatibility with GIT fetch==pull
582 Backward compatibility with GIT fetch==pull
583 """
583 """
584 return self.pull(url, commit_ids=commit_ids)
584 return self.pull(url, commit_ids=commit_ids)
585
585
586 def push(self, url):
586 def push(self, url):
587 url = self._get_url(url)
587 url = self._get_url(url)
588 self._remote.sync_push(url)
588 self._remote.sync_push(url)
589
589
590 def _local_clone(self, clone_path):
590 def _local_clone(self, clone_path):
591 """
591 """
592 Create a local clone of the current repo.
592 Create a local clone of the current repo.
593 """
593 """
594 self._remote.clone(self.path, clone_path, update_after_clone=True,
594 self._remote.clone(self.path, clone_path, update_after_clone=True,
595 hooks=False)
595 hooks=False)
596
596
597 def _update(self, revision, clean=False):
597 def _update(self, revision, clean=False):
598 """
598 """
599 Update the working copy to the specified revision.
599 Update the working copy to the specified revision.
600 """
600 """
601 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
601 log.debug('Doing checkout to commit: `%s` for %s', revision, self)
602 self._remote.update(revision, clean=clean)
602 self._remote.update(revision, clean=clean)
603
603
604 def _identify(self):
604 def _identify(self):
605 """
605 """
606 Return the current state of the working directory.
606 Return the current state of the working directory.
607 """
607 """
608 return self._remote.identify().strip().rstrip('+')
608 return self._remote.identify().strip().rstrip('+')
609
609
610 def _heads(self, branch=None):
610 def _heads(self, branch=None):
611 """
611 """
612 Return the commit ids of the repository heads.
612 Return the commit ids of the repository heads.
613 """
613 """
614 return self._remote.heads(branch=branch).strip().split(' ')
614 return self._remote.heads(branch=branch).strip().split(' ')
615
615
616 def _ancestor(self, revision1, revision2):
616 def _ancestor(self, revision1, revision2):
617 """
617 """
618 Return the common ancestor of the two revisions.
618 Return the common ancestor of the two revisions.
619 """
619 """
620 return self._remote.ancestor(revision1, revision2)
620 return self._remote.ancestor(revision1, revision2)
621
621
622 def _local_push(
622 def _local_push(
623 self, revision, repository_path, push_branches=False,
623 self, revision, repository_path, push_branches=False,
624 enable_hooks=False):
624 enable_hooks=False):
625 """
625 """
626 Push the given revision to the specified repository.
626 Push the given revision to the specified repository.
627
627
628 :param push_branches: allow to create branches in the target repo.
628 :param push_branches: allow to create branches in the target repo.
629 """
629 """
630 self._remote.push(
630 self._remote.push(
631 [revision], repository_path, hooks=enable_hooks,
631 [revision], repository_path, hooks=enable_hooks,
632 push_branches=push_branches)
632 push_branches=push_branches)
633
633
634 def _local_merge(self, target_ref, merge_message, user_name, user_email,
634 def _local_merge(self, target_ref, merge_message, user_name, user_email,
635 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
635 source_ref, use_rebase=False, close_commit_id=None, dry_run=False):
636 """
636 """
637 Merge the given source_revision into the checked out revision.
637 Merge the given source_revision into the checked out revision.
638
638
639 Returns the commit id of the merge and a boolean indicating if the
639 Returns the commit id of the merge and a boolean indicating if the
640 commit needs to be pushed.
640 commit needs to be pushed.
641 """
641 """
642
642
643 source_ref_commit_id = source_ref.commit_id
643 source_ref_commit_id = source_ref.commit_id
644 target_ref_commit_id = target_ref.commit_id
644 target_ref_commit_id = target_ref.commit_id
645
645
646 # update our workdir to target ref, for proper merge
646 # update our workdir to target ref, for proper merge
647 self._update(target_ref_commit_id, clean=True)
647 self._update(target_ref_commit_id, clean=True)
648
648
649 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
649 ancestor = self._ancestor(target_ref_commit_id, source_ref_commit_id)
650 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
650 is_the_same_branch = self._is_the_same_branch(target_ref, source_ref)
651
651
652 if close_commit_id:
652 if close_commit_id:
653 # NOTE(marcink): if we get the close commit, this is our new source
653 # NOTE(marcink): if we get the close commit, this is our new source
654 # which will include the close commit itself.
654 # which will include the close commit itself.
655 source_ref_commit_id = close_commit_id
655 source_ref_commit_id = close_commit_id
656
656
657 if ancestor == source_ref_commit_id:
657 if ancestor == source_ref_commit_id:
658 # Nothing to do, the changes were already integrated
658 # Nothing to do, the changes were already integrated
659 return target_ref_commit_id, False
659 return target_ref_commit_id, False
660
660
661 elif ancestor == target_ref_commit_id and is_the_same_branch:
661 elif ancestor == target_ref_commit_id and is_the_same_branch:
662 # In this case we should force a commit message
662 # In this case we should force a commit message
663 return source_ref_commit_id, True
663 return source_ref_commit_id, True
664
664
665 unresolved = None
665 unresolved = None
666 if use_rebase:
666 if use_rebase:
667 try:
667 try:
668 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
668 bookmark_name = f'rcbook{source_ref_commit_id}{target_ref_commit_id}'
669 self.bookmark(bookmark_name, revision=source_ref.commit_id)
669 self.bookmark(bookmark_name, revision=source_ref.commit_id)
670 self._remote.rebase(
670 self._remote.rebase(
671 source=source_ref_commit_id, dest=target_ref_commit_id)
671 source=source_ref_commit_id, dest=target_ref_commit_id)
672 self._remote.invalidate_vcs_cache()
672 self._remote.invalidate_vcs_cache()
673 self._update(bookmark_name, clean=True)
673 self._update(bookmark_name, clean=True)
674 return self._identify(), True
674 return self._identify(), True
675 except RepositoryError as e:
675 except RepositoryError as e:
676 # The rebase-abort may raise another exception which 'hides'
676 # The rebase-abort may raise another exception which 'hides'
677 # the original one, therefore we log it here.
677 # the original one, therefore we log it here.
678 log.exception('Error while rebasing shadow repo during merge.')
678 log.exception('Error while rebasing shadow repo during merge.')
679 if 'unresolved conflicts' in safe_str(e):
679 if 'unresolved conflicts' in safe_str(e):
680 unresolved = self._remote.get_unresolved_files()
680 unresolved = self._remote.get_unresolved_files()
681 log.debug('unresolved files: %s', unresolved)
681 log.debug('unresolved files: %s', unresolved)
682
682
683 # Cleanup any rebase leftovers
683 # Cleanup any rebase leftovers
684 self._remote.invalidate_vcs_cache()
684 self._remote.invalidate_vcs_cache()
685 self._remote.rebase(abort=True)
685 self._remote.rebase(abort=True)
686 self._remote.invalidate_vcs_cache()
686 self._remote.invalidate_vcs_cache()
687 self._remote.update(clean=True)
687 self._remote.update(clean=True)
688 if unresolved:
688 if unresolved:
689 raise UnresolvedFilesInRepo(unresolved)
689 raise UnresolvedFilesInRepo(unresolved)
690 else:
690 else:
691 raise
691 raise
692 else:
692 else:
693 try:
693 try:
694 self._remote.merge(source_ref_commit_id)
694 self._remote.merge(source_ref_commit_id)
695 self._remote.invalidate_vcs_cache()
695 self._remote.invalidate_vcs_cache()
696 self._remote.commit(
696 self._remote.commit(
697 message=safe_str(merge_message),
697 message=safe_str(merge_message),
698 username=safe_str(f'{user_name} <{user_email}>'))
698 username=safe_str(f'{user_name} <{user_email}>'))
699 self._remote.invalidate_vcs_cache()
699 self._remote.invalidate_vcs_cache()
700 return self._identify(), True
700 return self._identify(), True
701 except RepositoryError as e:
701 except RepositoryError as e:
702 # The merge-abort may raise another exception which 'hides'
702 # The merge-abort may raise another exception which 'hides'
703 # the original one, therefore we log it here.
703 # the original one, therefore we log it here.
704 log.exception('Error while merging shadow repo during merge.')
704 log.exception('Error while merging shadow repo during merge.')
705 if 'unresolved merge conflicts' in safe_str(e):
705 if 'unresolved merge conflicts' in safe_str(e):
706 unresolved = self._remote.get_unresolved_files()
706 unresolved = self._remote.get_unresolved_files()
707 log.debug('unresolved files: %s', unresolved)
707 log.debug('unresolved files: %s', unresolved)
708
708
709 # Cleanup any merge leftovers
709 # Cleanup any merge leftovers
710 self._remote.update(clean=True)
710 self._remote.update(clean=True)
711 if unresolved:
711 if unresolved:
712 raise UnresolvedFilesInRepo(unresolved)
712 raise UnresolvedFilesInRepo(unresolved)
713 else:
713 else:
714 raise
714 raise
715
715
716 def _local_close(self, target_ref, user_name, user_email,
716 def _local_close(self, target_ref, user_name, user_email,
717 source_ref, close_message=''):
717 source_ref, close_message=''):
718 """
718 """
719 Close the branch of the given source_revision
719 Close the branch of the given source_revision
720
720
721 Returns the commit id of the close and a boolean indicating if the
721 Returns the commit id of the close and a boolean indicating if the
722 commit needs to be pushed.
722 commit needs to be pushed.
723 """
723 """
724 self._update(source_ref.commit_id)
724 self._update(source_ref.commit_id)
725 message = close_message or f"Closing branch: `{source_ref.name}`"
725 message = close_message or f"Closing branch: `{source_ref.name}`"
726 try:
726 try:
727 self._remote.commit(
727 self._remote.commit(
728 message=safe_str(message),
728 message=safe_str(message),
729 username=safe_str(f'{user_name} <{user_email}>'),
729 username=safe_str(f'{user_name} <{user_email}>'),
730 close_branch=True)
730 close_branch=True)
731 self._remote.invalidate_vcs_cache()
731 self._remote.invalidate_vcs_cache()
732 return self._identify(), True
732 return self._identify(), True
733 except RepositoryError:
733 except RepositoryError:
734 # Cleanup any commit leftovers
734 # Cleanup any commit leftovers
735 self._remote.update(clean=True)
735 self._remote.update(clean=True)
736 raise
736 raise
737
737
738 def _is_the_same_branch(self, target_ref, source_ref):
738 def _is_the_same_branch(self, target_ref, source_ref):
739 return (
739 return (
740 self._get_branch_name(target_ref) ==
740 self._get_branch_name(target_ref) ==
741 self._get_branch_name(source_ref))
741 self._get_branch_name(source_ref))
742
742
743 def _get_branch_name(self, ref):
743 def _get_branch_name(self, ref):
744 if ref.type == 'branch':
744 if ref.type == 'branch':
745 return ref.name
745 return ref.name
746 return self._remote.ctx_branch(ref.commit_id)
746 return self._remote.ctx_branch(ref.commit_id)
747
747
748 def _maybe_prepare_merge_workspace(
748 def _maybe_prepare_merge_workspace(
749 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
749 self, repo_id, workspace_id, unused_target_ref, unused_source_ref):
750 shadow_repository_path = self._get_shadow_repository_path(
750 shadow_repository_path = self._get_shadow_repository_path(
751 self.path, repo_id, workspace_id)
751 self.path, repo_id, workspace_id)
752 if not os.path.exists(shadow_repository_path):
752 if not os.path.exists(shadow_repository_path):
753 self._local_clone(shadow_repository_path)
753 self._local_clone(shadow_repository_path)
754 log.debug(
754 log.debug(
755 'Prepared shadow repository in %s', shadow_repository_path)
755 'Prepared shadow repository in %s', shadow_repository_path)
756
756
757 return shadow_repository_path
757 return shadow_repository_path
758
758
759 def _merge_repo(self, repo_id, workspace_id, target_ref,
759 def _merge_repo(self, repo_id, workspace_id, target_ref,
760 source_repo, source_ref, merge_message,
760 source_repo, source_ref, merge_message,
761 merger_name, merger_email, dry_run=False,
761 merger_name, merger_email, dry_run=False,
762 use_rebase=False, close_branch=False):
762 use_rebase=False, close_branch=False):
763
763
764 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
764 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
765 'rebase' if use_rebase else 'merge', dry_run)
765 'rebase' if use_rebase else 'merge', dry_run)
766
766
767 if target_ref.commit_id not in self._heads():
767 if target_ref.commit_id not in self._heads():
768 return MergeResponse(
768 return MergeResponse(
769 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
769 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
770 metadata={'target_ref': target_ref})
770 metadata={'target_ref': target_ref})
771
771
772 try:
772 try:
773 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
773 if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1:
774 heads_all = self._heads(target_ref.name)
774 heads_all = self._heads(target_ref.name)
775 max_heads = 10
775 max_heads = 10
776 if len(heads_all) > max_heads:
776 if len(heads_all) > max_heads:
777 heads = '\n,'.join(
777 heads = '\n,'.join(
778 heads_all[:max_heads] +
778 heads_all[:max_heads] +
779 [f'and {len(heads_all)-max_heads} more.'])
779 [f'and {len(heads_all)-max_heads} more.'])
780 else:
780 else:
781 heads = '\n,'.join(heads_all)
781 heads = '\n,'.join(heads_all)
782 metadata = {
782 metadata = {
783 'target_ref': target_ref,
783 'target_ref': target_ref,
784 'source_ref': source_ref,
784 'source_ref': source_ref,
785 'heads': heads
785 'heads': heads
786 }
786 }
787 return MergeResponse(
787 return MergeResponse(
788 False, False, None,
788 False, False, None,
789 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
789 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS,
790 metadata=metadata)
790 metadata=metadata)
791 except CommitDoesNotExistError:
791 except CommitDoesNotExistError:
792 log.exception('Failure when looking up branch heads on hg target')
792 log.exception('Failure when looking up branch heads on hg target')
793 return MergeResponse(
793 return MergeResponse(
794 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
794 False, False, None, MergeFailureReason.MISSING_TARGET_REF,
795 metadata={'target_ref': target_ref})
795 metadata={'target_ref': target_ref})
796
796
797 shadow_repository_path = self._maybe_prepare_merge_workspace(
797 shadow_repository_path = self._maybe_prepare_merge_workspace(
798 repo_id, workspace_id, target_ref, source_ref)
798 repo_id, workspace_id, target_ref, source_ref)
799 shadow_repo = self.get_shadow_instance(shadow_repository_path)
799 shadow_repo = self.get_shadow_instance(shadow_repository_path)
800
800
801 log.debug('Pulling in target reference %s', target_ref)
801 log.debug('Pulling in target reference %s', target_ref)
802 self._validate_pull_reference(target_ref)
802 self._validate_pull_reference(target_ref)
803 shadow_repo._local_pull(self.path, target_ref)
803 shadow_repo._local_pull(self.path, target_ref)
804
804
805 try:
805 try:
806 log.debug('Pulling in source reference %s', source_ref)
806 log.debug('Pulling in source reference %s', source_ref)
807 source_repo._validate_pull_reference(source_ref)
807 source_repo._validate_pull_reference(source_ref)
808 shadow_repo._local_pull(source_repo.path, source_ref)
808 shadow_repo._local_pull(source_repo.path, source_ref)
809 except CommitDoesNotExistError:
809 except CommitDoesNotExistError:
810 log.exception('Failure when doing local pull on hg shadow repo')
810 log.exception('Failure when doing local pull on hg shadow repo')
811 return MergeResponse(
811 return MergeResponse(
812 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
812 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
813 metadata={'source_ref': source_ref})
813 metadata={'source_ref': source_ref})
814
814
815 merge_ref = None
815 merge_ref = None
816 merge_commit_id = None
816 merge_commit_id = None
817 close_commit_id = None
817 close_commit_id = None
818 merge_failure_reason = MergeFailureReason.NONE
818 merge_failure_reason = MergeFailureReason.NONE
819 metadata = {}
819 metadata = {}
820
820
821 # enforce that close branch should be used only in case we source from
821 # enforce that close branch should be used only in case we source from
822 # an actual Branch
822 # an actual Branch
823 close_branch = close_branch and source_ref.type == 'branch'
823 close_branch = close_branch and source_ref.type == 'branch'
824
824
825 # don't allow to close branch if source and target are the same
825 # don't allow to close branch if source and target are the same
826 close_branch = close_branch and source_ref.name != target_ref.name
826 close_branch = close_branch and source_ref.name != target_ref.name
827
827
828 needs_push_on_close = False
828 needs_push_on_close = False
829 if close_branch and not use_rebase and not dry_run:
829 if close_branch and not use_rebase and not dry_run:
830 try:
830 try:
831 close_commit_id, needs_push_on_close = shadow_repo._local_close(
831 close_commit_id, needs_push_on_close = shadow_repo._local_close(
832 target_ref, merger_name, merger_email, source_ref)
832 target_ref, merger_name, merger_email, source_ref)
833 merge_possible = True
833 merge_possible = True
834 except RepositoryError:
834 except RepositoryError:
835 log.exception('Failure when doing close branch on '
835 log.exception('Failure when doing close branch on '
836 'shadow repo: %s', shadow_repo)
836 'shadow repo: %s', shadow_repo)
837 merge_possible = False
837 merge_possible = False
838 merge_failure_reason = MergeFailureReason.MERGE_FAILED
838 merge_failure_reason = MergeFailureReason.MERGE_FAILED
839 else:
839 else:
840 merge_possible = True
840 merge_possible = True
841
841
842 needs_push = False
842 needs_push = False
843 if merge_possible:
843 if merge_possible:
844
844
845 try:
845 try:
846 merge_commit_id, needs_push = shadow_repo._local_merge(
846 merge_commit_id, needs_push = shadow_repo._local_merge(
847 target_ref, merge_message, merger_name, merger_email,
847 target_ref, merge_message, merger_name, merger_email,
848 source_ref, use_rebase=use_rebase,
848 source_ref, use_rebase=use_rebase,
849 close_commit_id=close_commit_id, dry_run=dry_run)
849 close_commit_id=close_commit_id, dry_run=dry_run)
850 merge_possible = True
850 merge_possible = True
851
851
852 # read the state of the close action, if it
852 # read the state of the close action, if it
853 # maybe required a push
853 # maybe required a push
854 needs_push = needs_push or needs_push_on_close
854 needs_push = needs_push or needs_push_on_close
855
855
856 # Set a bookmark pointing to the merge commit. This bookmark
856 # Set a bookmark pointing to the merge commit. This bookmark
857 # may be used to easily identify the last successful merge
857 # may be used to easily identify the last successful merge
858 # commit in the shadow repository.
858 # commit in the shadow repository.
859 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
859 shadow_repo.bookmark('pr-merge', revision=merge_commit_id)
860 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
860 merge_ref = Reference('book', 'pr-merge', merge_commit_id)
861 except SubrepoMergeError:
861 except SubrepoMergeError:
862 log.exception(
862 log.exception(
863 'Subrepo merge error during local merge on hg shadow repo.')
863 'Subrepo merge error during local merge on hg shadow repo.')
864 merge_possible = False
864 merge_possible = False
865 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
865 merge_failure_reason = MergeFailureReason.SUBREPO_MERGE_FAILED
866 needs_push = False
866 needs_push = False
867 except RepositoryError as e:
867 except RepositoryError as e:
868 log.exception('Failure when doing local merge on hg shadow repo')
868 log.exception('Failure when doing local merge on hg shadow repo')
869 metadata['unresolved_files'] = 'no unresolved files found'
869 metadata['unresolved_files'] = 'no unresolved files found'
870
870
871 if isinstance(e, UnresolvedFilesInRepo):
871 if isinstance(e, UnresolvedFilesInRepo):
872 all_conflicts = list(e.args[0])
872 all_conflicts = list(e.args[0])
873 max_conflicts = 20
873 max_conflicts = 20
874 if len(all_conflicts) > max_conflicts:
874 if len(all_conflicts) > max_conflicts:
875 conflicts = all_conflicts[:max_conflicts] \
875 conflicts = all_conflicts[:max_conflicts] \
876 + [f'and {len(all_conflicts)-max_conflicts} more.']
876 + [f'and {len(all_conflicts)-max_conflicts} more.']
877 else:
877 else:
878 conflicts = all_conflicts
878 conflicts = all_conflicts
879 metadata['unresolved_files'] = \
879 metadata['unresolved_files'] = \
880 '\n* conflict: ' + \
880 '\n* conflict: ' + \
881 ('\n * conflict: '.join(conflicts))
881 ('\n * conflict: '.join(conflicts))
882
882
883 merge_possible = False
883 merge_possible = False
884 merge_failure_reason = MergeFailureReason.MERGE_FAILED
884 merge_failure_reason = MergeFailureReason.MERGE_FAILED
885 needs_push = False
885 needs_push = False
886
886
887 if merge_possible and not dry_run:
887 if merge_possible and not dry_run:
888 if needs_push:
888 if needs_push:
889 # In case the target is a bookmark, update it, so after pushing
889 # In case the target is a bookmark, update it, so after pushing
890 # the bookmarks is also updated in the target.
890 # the bookmarks is also updated in the target.
891 if target_ref.type == 'book':
891 if target_ref.type == 'book':
892 shadow_repo.bookmark(
892 shadow_repo.bookmark(
893 target_ref.name, revision=merge_commit_id)
893 target_ref.name, revision=merge_commit_id)
894 try:
894 try:
895 shadow_repo_with_hooks = self.get_shadow_instance(
895 shadow_repo_with_hooks = self.get_shadow_instance(
896 shadow_repository_path,
896 shadow_repository_path,
897 enable_hooks=True)
897 enable_hooks=True)
898 # This is the actual merge action, we push from shadow
898 # This is the actual merge action, we push from shadow
899 # into origin.
899 # into origin.
900 # Note: the push_branches option will push any new branch
900 # Note: the push_branches option will push any new branch
901 # defined in the source repository to the target. This may
901 # defined in the source repository to the target. This may
902 # be dangerous as branches are permanent in Mercurial.
902 # be dangerous as branches are permanent in Mercurial.
903 # This feature was requested in issue #441.
903 # This feature was requested in issue #441.
904 shadow_repo_with_hooks._local_push(
904 shadow_repo_with_hooks._local_push(
905 merge_commit_id, self.path, push_branches=True,
905 merge_commit_id, self.path, push_branches=True,
906 enable_hooks=True)
906 enable_hooks=True)
907
907
908 # maybe we also need to push the close_commit_id
908 # maybe we also need to push the close_commit_id
909 if close_commit_id:
909 if close_commit_id:
910 shadow_repo_with_hooks._local_push(
910 shadow_repo_with_hooks._local_push(
911 close_commit_id, self.path, push_branches=True,
911 close_commit_id, self.path, push_branches=True,
912 enable_hooks=True)
912 enable_hooks=True)
913 merge_succeeded = True
913 merge_succeeded = True
914 except RepositoryError:
914 except RepositoryError:
915 log.exception(
915 log.exception(
916 'Failure when doing local push from the shadow '
916 'Failure when doing local push from the shadow '
917 'repository to the target repository at %s.', self.path)
917 'repository to the target repository at %s.', self.path)
918 merge_succeeded = False
918 merge_succeeded = False
919 merge_failure_reason = MergeFailureReason.PUSH_FAILED
919 merge_failure_reason = MergeFailureReason.PUSH_FAILED
920 metadata['target'] = 'hg shadow repo'
920 metadata['target'] = 'hg shadow repo'
921 metadata['merge_commit'] = merge_commit_id
921 metadata['merge_commit'] = merge_commit_id
922 else:
922 else:
923 merge_succeeded = True
923 merge_succeeded = True
924 else:
924 else:
925 merge_succeeded = False
925 merge_succeeded = False
926
926
927 return MergeResponse(
927 return MergeResponse(
928 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
928 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
929 metadata=metadata)
929 metadata=metadata)
930
930
931 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
931 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
932 config = self.config.copy()
932 config = self.config.copy()
933 if not enable_hooks:
933 if not enable_hooks:
934 config.clear_section('hooks')
934 config.clear_section('hooks')
935 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
935 return MercurialRepository(shadow_repository_path, config, with_wire={"cache": cache})
936
936
937 def _validate_pull_reference(self, reference):
937 def _validate_pull_reference(self, reference):
938 if not (reference.name in self.bookmarks or
938 if not (reference.name in self.bookmarks or
939 reference.name in self.branches or
939 reference.name in self.branches or
940 self.get_commit(reference.commit_id)):
940 self.get_commit(reference.commit_id)):
941 raise CommitDoesNotExistError(
941 raise CommitDoesNotExistError(
942 'Unknown branch, bookmark or commit id')
942 'Unknown branch, bookmark or commit id')
943
943
944 def _local_pull(self, repository_path, reference):
944 def _local_pull(self, repository_path, reference):
945 """
945 """
946 Fetch a branch, bookmark or commit from a local repository.
946 Fetch a branch, bookmark or commit from a local repository.
947 """
947 """
948 repository_path = os.path.abspath(repository_path)
948 repository_path = os.path.abspath(repository_path)
949 if repository_path == self.path:
949 if repository_path == self.path:
950 raise ValueError('Cannot pull from the same repository')
950 raise ValueError('Cannot pull from the same repository')
951
951
952 reference_type_to_option_name = {
952 reference_type_to_option_name = {
953 'book': 'bookmark',
953 'book': 'bookmark',
954 'branch': 'branch',
954 'branch': 'branch',
955 }
955 }
956 option_name = reference_type_to_option_name.get(
956 option_name = reference_type_to_option_name.get(
957 reference.type, 'revision')
957 reference.type, 'revision')
958
958
959 if option_name == 'revision':
959 if option_name == 'revision':
960 ref = reference.commit_id
960 ref = reference.commit_id
961 else:
961 else:
962 ref = reference.name
962 ref = reference.name
963
963
964 options = {option_name: [ref]}
964 options = {option_name: [ref]}
965 self._remote.pull_cmd(repository_path, hooks=False, **options)
965 self._remote.pull_cmd(repository_path, hooks=False, **options)
966 self._remote.invalidate_vcs_cache()
966 self._remote.invalidate_vcs_cache()
967
967
968 def bookmark(self, bookmark, revision=None):
968 def bookmark(self, bookmark, revision=None):
969 if isinstance(bookmark, str):
969 if isinstance(bookmark, str):
970 bookmark = safe_str(bookmark)
970 bookmark = safe_str(bookmark)
971 self._remote.bookmark(bookmark, revision=revision)
971 self._remote.bookmark(bookmark, revision=revision)
972 self._remote.invalidate_vcs_cache()
972 self._remote.invalidate_vcs_cache()
973
973
974 def get_path_permissions(self, username):
974 def get_path_permissions(self, username):
975 hgacl_file = os.path.join(self.path, '.hg/hgacl')
975 hgacl_file = os.path.join(self.path, '.hg/hgacl')
976
976
977 def read_patterns(suffix):
977 def read_patterns(suffix):
978 svalue = None
978 svalue = None
979 for section, option in [
979 for section, option in [
980 ('narrowacl', username + suffix),
980 ('narrowacl', username + suffix),
981 ('narrowacl', 'default' + suffix),
981 ('narrowacl', 'default' + suffix),
982 ('narrowhgacl', username + suffix),
982 ('narrowhgacl', username + suffix),
983 ('narrowhgacl', 'default' + suffix)
983 ('narrowhgacl', 'default' + suffix)
984 ]:
984 ]:
985 try:
985 try:
986 svalue = hgacl.get(section, option)
986 svalue = hgacl.get(section, option)
987 break # stop at the first value we find
987 break # stop at the first value we find
988 except configparser.NoOptionError:
988 except configparser.NoOptionError:
989 pass
989 pass
990 if not svalue:
990 if not svalue:
991 return None
991 return None
992 result = ['/']
992 result = ['/']
993 for pattern in svalue.split():
993 for pattern in svalue.split():
994 result.append(pattern)
994 result.append(pattern)
995 if '*' not in pattern and '?' not in pattern:
995 if '*' not in pattern and '?' not in pattern:
996 result.append(pattern + '/*')
996 result.append(pattern + '/*')
997 return result
997 return result
998
998
999 if os.path.exists(hgacl_file):
999 if os.path.exists(hgacl_file):
1000 try:
1000 try:
1001 hgacl = configparser.RawConfigParser()
1001 hgacl = configparser.RawConfigParser()
1002 hgacl.read(hgacl_file)
1002 hgacl.read(hgacl_file)
1003
1003
1004 includes = read_patterns('.includes')
1004 includes = read_patterns('.includes')
1005 excludes = read_patterns('.excludes')
1005 excludes = read_patterns('.excludes')
1006 return BasePathPermissionChecker.create_from_patterns(
1006 return BasePathPermissionChecker.create_from_patterns(
1007 includes, excludes)
1007 includes, excludes)
1008 except BaseException as e:
1008 except BaseException as e:
1009 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1009 msg = 'Cannot read ACL settings from {} on {}: {}'.format(
1010 hgacl_file, self.name, e)
1010 hgacl_file, self.name, e)
1011 raise exceptions.RepositoryRequirementError(msg)
1011 raise exceptions.RepositoryRequirementError(msg)
1012 else:
1012 else:
1013 return None
1013 return None
1014
1014
1015
1015
1016 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1016 class MercurialIndexBasedCollectionGenerator(CollectionGenerator):
1017
1017
1018 def _commit_factory(self, commit_id):
1018 def _commit_factory(self, commit_id):
1019 if isinstance(commit_id, int):
1019 if isinstance(commit_id, int):
1020 return self.repo.get_commit(
1020 return self.repo.get_commit(
1021 commit_idx=commit_id, pre_load=self.pre_load)
1021 commit_idx=commit_id, pre_load=self.pre_load)
1022 else:
1022 else:
1023 return self.repo.get_commit(
1023 return self.repo.get_commit(
1024 commit_id=commit_id, pre_load=self.pre_load)
1024 commit_id=commit_id, pre_load=self.pre_load)
@@ -1,1044 +1,1044 b''
1 # Copyright (C) 2010-2023 RhodeCode GmbH
1 # Copyright (C) 2010-2023 RhodeCode GmbH
2 #
2 #
3 # This program is free software: you can redistribute it and/or modify
3 # This program is free software: you can redistribute it and/or modify
4 # it under the terms of the GNU Affero General Public License, version 3
4 # it under the terms of the GNU Affero General Public License, version 3
5 # (only), as published by the Free Software Foundation.
5 # (only), as published by the Free Software Foundation.
6 #
6 #
7 # This program is distributed in the hope that it will be useful,
7 # This program is distributed in the hope that it will be useful,
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 # GNU General Public License for more details.
10 # GNU General Public License for more details.
11 #
11 #
12 # You should have received a copy of the GNU Affero General Public License
12 # You should have received a copy of the GNU Affero General Public License
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
13 # along with this program. If not, see <http://www.gnu.org/licenses/>.
14 #
14 #
15 # This program is dual-licensed. If you wish to learn more about the
15 # This program is dual-licensed. If you wish to learn more about the
16 # RhodeCode Enterprise Edition, including its added features, Support services,
16 # RhodeCode Enterprise Edition, including its added features, Support services,
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
17 # and proprietary license terms, please see https://rhodecode.com/licenses/
18
18
19 """
19 """
20 Scm model for RhodeCode
20 Scm model for RhodeCode
21 """
21 """
22
22
23 import os.path
23 import os.path
24 import traceback
24 import traceback
25 import logging
25 import logging
26 import io
26 import io
27
27
28 from sqlalchemy import func
28 from sqlalchemy import func
29 from zope.cachedescriptors.property import Lazy as LazyProperty
29 from zope.cachedescriptors.property import Lazy as LazyProperty
30
30
31 import rhodecode
31 import rhodecode
32 from rhodecode.lib.str_utils import safe_bytes
32 from rhodecode.lib.str_utils import safe_bytes
33 from rhodecode.lib.vcs import get_backend
33 from rhodecode.lib.vcs import get_backend
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
34 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
35 from rhodecode.lib.vcs.nodes import FileNode
35 from rhodecode.lib.vcs.nodes import FileNode
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
36 from rhodecode.lib.vcs.backends.base import EmptyCommit
37 from rhodecode.lib import helpers as h, rc_cache
37 from rhodecode.lib import helpers as h, rc_cache
38 from rhodecode.lib.auth import (
38 from rhodecode.lib.auth import (
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
39 HasRepoPermissionAny, HasRepoGroupPermissionAny,
40 HasUserGroupPermissionAny)
40 HasUserGroupPermissionAny)
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
41 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
42 from rhodecode.lib import hooks_utils
42 from rhodecode.lib import hooks_utils
43 from rhodecode.lib.utils import (
43 from rhodecode.lib.utils import (
44 get_filesystem_repos, make_db_config)
44 get_filesystem_repos, make_db_config)
45 from rhodecode.lib.str_utils import safe_str
45 from rhodecode.lib.str_utils import safe_str
46 from rhodecode.lib.system_info import get_system_info
46 from rhodecode.lib.system_info import get_system_info
47 from rhodecode.model import BaseModel
47 from rhodecode.model import BaseModel
48 from rhodecode.model.db import (
48 from rhodecode.model.db import (
49 or_, false, null,
49 or_, false, null,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 PullRequest, FileStore)
51 PullRequest, FileStore)
52 from rhodecode.model.settings import VcsSettingsModel
52 from rhodecode.model.settings import VcsSettingsModel
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 class UserTemp(object):
58 class UserTemp(object):
59 def __init__(self, user_id):
59 def __init__(self, user_id):
60 self.user_id = user_id
60 self.user_id = user_id
61
61
62 def __repr__(self):
62 def __repr__(self):
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
63 return "<{}('id:{}')>".format(self.__class__.__name__, self.user_id)
64
64
65
65
66 class RepoTemp(object):
66 class RepoTemp(object):
67 def __init__(self, repo_id):
67 def __init__(self, repo_id):
68 self.repo_id = repo_id
68 self.repo_id = repo_id
69
69
70 def __repr__(self):
70 def __repr__(self):
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
71 return "<{}('id:{}')>".format(self.__class__.__name__, self.repo_id)
72
72
73
73
74 class SimpleCachedRepoList(object):
74 class SimpleCachedRepoList(object):
75 """
75 """
76 Lighter version of of iteration of repos without the scm initialisation,
76 Lighter version of of iteration of repos without the scm initialisation,
77 and with cache usage
77 and with cache usage
78 """
78 """
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84 if not perm_set:
84 if not perm_set:
85 perm_set = ['repository.read', 'repository.write',
85 perm_set = ['repository.read', 'repository.write',
86 'repository.admin']
86 'repository.admin']
87 self.perm_set = perm_set
87 self.perm_set = perm_set
88
88
89 def __len__(self):
89 def __len__(self):
90 return len(self.db_repo_list)
90 return len(self.db_repo_list)
91
91
92 def __repr__(self):
92 def __repr__(self):
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
93 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
94
94
95 def __iter__(self):
95 def __iter__(self):
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 # check permission at this level
97 # check permission at this level
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 dbr.repo_name, 'SimpleCachedRepoList check')
99 dbr.repo_name, 'SimpleCachedRepoList check')
100 if not has_perm:
100 if not has_perm:
101 continue
101 continue
102
102
103 tmp_d = {
103 tmp_d = {
104 'name': dbr.repo_name,
104 'name': dbr.repo_name,
105 'dbrepo': dbr.get_dict(),
105 'dbrepo': dbr.get_dict(),
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 }
107 }
108 yield tmp_d
108 yield tmp_d
109
109
110
110
111 class _PermCheckIterator(object):
111 class _PermCheckIterator(object):
112
112
113 def __init__(
113 def __init__(
114 self, obj_list, obj_attr, perm_set, perm_checker,
114 self, obj_list, obj_attr, perm_set, perm_checker,
115 extra_kwargs=None):
115 extra_kwargs=None):
116 """
116 """
117 Creates iterator from given list of objects, additionally
117 Creates iterator from given list of objects, additionally
118 checking permission for them from perm_set var
118 checking permission for them from perm_set var
119
119
120 :param obj_list: list of db objects
120 :param obj_list: list of db objects
121 :param obj_attr: attribute of object to pass into perm_checker
121 :param obj_attr: attribute of object to pass into perm_checker
122 :param perm_set: list of permissions to check
122 :param perm_set: list of permissions to check
123 :param perm_checker: callable to check permissions against
123 :param perm_checker: callable to check permissions against
124 """
124 """
125 self.obj_list = obj_list
125 self.obj_list = obj_list
126 self.obj_attr = obj_attr
126 self.obj_attr = obj_attr
127 self.perm_set = perm_set
127 self.perm_set = perm_set
128 self.perm_checker = perm_checker(*self.perm_set)
128 self.perm_checker = perm_checker(*self.perm_set)
129 self.extra_kwargs = extra_kwargs or {}
129 self.extra_kwargs = extra_kwargs or {}
130
130
131 def __len__(self):
131 def __len__(self):
132 return len(self.obj_list)
132 return len(self.obj_list)
133
133
134 def __repr__(self):
134 def __repr__(self):
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
135 return '<{} ({})>'.format(self.__class__.__name__, self.__len__())
136
136
137 def __iter__(self):
137 def __iter__(self):
138 for db_obj in self.obj_list:
138 for db_obj in self.obj_list:
139 # check permission at this level
139 # check permission at this level
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
140 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
141 name = db_obj.__dict__.get(self.obj_attr, None)
141 name = db_obj.__dict__.get(self.obj_attr, None)
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
142 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
143 continue
143 continue
144
144
145 yield db_obj
145 yield db_obj
146
146
147
147
148 class RepoList(_PermCheckIterator):
148 class RepoList(_PermCheckIterator):
149
149
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 if not perm_set:
151 if not perm_set:
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
152 perm_set = ['repository.read', 'repository.write', 'repository.admin']
153
153
154 super().__init__(
154 super().__init__(
155 obj_list=db_repo_list,
155 obj_list=db_repo_list,
156 obj_attr='_repo_name', perm_set=perm_set,
156 obj_attr='_repo_name', perm_set=perm_set,
157 perm_checker=HasRepoPermissionAny,
157 perm_checker=HasRepoPermissionAny,
158 extra_kwargs=extra_kwargs)
158 extra_kwargs=extra_kwargs)
159
159
160
160
161 class RepoGroupList(_PermCheckIterator):
161 class RepoGroupList(_PermCheckIterator):
162
162
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
163 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
164 if not perm_set:
164 if not perm_set:
165 perm_set = ['group.read', 'group.write', 'group.admin']
165 perm_set = ['group.read', 'group.write', 'group.admin']
166
166
167 super().__init__(
167 super().__init__(
168 obj_list=db_repo_group_list,
168 obj_list=db_repo_group_list,
169 obj_attr='_group_name', perm_set=perm_set,
169 obj_attr='_group_name', perm_set=perm_set,
170 perm_checker=HasRepoGroupPermissionAny,
170 perm_checker=HasRepoGroupPermissionAny,
171 extra_kwargs=extra_kwargs)
171 extra_kwargs=extra_kwargs)
172
172
173
173
174 class UserGroupList(_PermCheckIterator):
174 class UserGroupList(_PermCheckIterator):
175
175
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
176 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
177 if not perm_set:
177 if not perm_set:
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
178 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
179
179
180 super().__init__(
180 super().__init__(
181 obj_list=db_user_group_list,
181 obj_list=db_user_group_list,
182 obj_attr='users_group_name', perm_set=perm_set,
182 obj_attr='users_group_name', perm_set=perm_set,
183 perm_checker=HasUserGroupPermissionAny,
183 perm_checker=HasUserGroupPermissionAny,
184 extra_kwargs=extra_kwargs)
184 extra_kwargs=extra_kwargs)
185
185
186
186
187 class ScmModel(BaseModel):
187 class ScmModel(BaseModel):
188 """
188 """
189 Generic Scm Model
189 Generic Scm Model
190 """
190 """
191
191
192 @LazyProperty
192 @LazyProperty
193 def repos_path(self):
193 def repos_path(self):
194 """
194 """
195 Gets the repositories root path from database
195 Gets the repositories root path from database
196 """
196 """
197
197
198 settings_model = VcsSettingsModel(sa=self.sa)
198 settings_model = VcsSettingsModel(sa=self.sa)
199 return settings_model.get_repos_location()
199 return settings_model.get_repos_location()
200
200
201 def repo_scan(self, repos_path=None):
201 def repo_scan(self, repos_path=None):
202 """
202 """
203 Listing of repositories in given path. This path should not be a
203 Listing of repositories in given path. This path should not be a
204 repository itself. Return a dictionary of repository objects
204 repository itself. Return a dictionary of repository objects
205
205
206 :param repos_path: path to directory containing repositories
206 :param repos_path: path to directory containing repositories
207 """
207 """
208
208
209 if repos_path is None:
209 if repos_path is None:
210 repos_path = self.repos_path
210 repos_path = self.repos_path
211
211
212 log.info('scanning for repositories in %s', repos_path)
212 log.info('scanning for repositories in %s', repos_path)
213
213
214 config = make_db_config()
214 config = make_db_config()
215 config.set('extensions', 'largefiles', '')
215 config.set('extensions', 'largefiles', '')
216 repos = {}
216 repos = {}
217
217
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
218 for name, path in get_filesystem_repos(repos_path, recursive=True):
219 # name need to be decomposed and put back together using the /
219 # name need to be decomposed and put back together using the /
220 # since this is internal storage separator for rhodecode
220 # since this is internal storage separator for rhodecode
221 name = Repository.normalize_repo_name(name)
221 name = Repository.normalize_repo_name(name)
222
222
223 try:
223 try:
224 if name in repos:
224 if name in repos:
225 raise RepositoryError('Duplicate repository name %s '
225 raise RepositoryError('Duplicate repository name %s '
226 'found in %s' % (name, path))
226 'found in %s' % (name, path))
227 elif path[0] in rhodecode.BACKENDS:
227 elif path[0] in rhodecode.BACKENDS:
228 backend = get_backend(path[0])
228 backend = get_backend(path[0])
229 repos[name] = backend(path[1], config=config,
229 repos[name] = backend(path[1], config=config,
230 with_wire={"cache": False})
230 with_wire={"cache": False})
231 except OSError:
231 except OSError:
232 continue
232 continue
233 except RepositoryError:
233 except RepositoryError:
234 log.exception('Failed to create a repo')
234 log.exception('Failed to create a repo')
235 continue
235 continue
236
236
237 log.debug('found %s paths with repositories', len(repos))
237 log.debug('found %s paths with repositories', len(repos))
238 return repos
238 return repos
239
239
240 def get_repos(self, all_repos=None, sort_key=None):
240 def get_repos(self, all_repos=None, sort_key=None):
241 """
241 """
242 Get all repositories from db and for each repo create it's
242 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
243 backend instance and fill that backed with information from database
244
244
245 :param all_repos: list of repository names as strings
245 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
246 give specific repositories list, good for filtering
247
247
248 :param sort_key: initial sorting of repositories
248 :param sort_key: initial sorting of repositories
249 """
249 """
250 if all_repos is None:
250 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
251 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == null())\
252 .filter(Repository.group_id == null())\
253 .order_by(func.lower(Repository.repo_name)).all()
253 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
254 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
256 return repo_iter
257
257
258 def get_repo_groups(self, all_groups=None):
258 def get_repo_groups(self, all_groups=None):
259 if all_groups is None:
259 if all_groups is None:
260 all_groups = RepoGroup.query()\
260 all_groups = RepoGroup.query()\
261 .filter(RepoGroup.group_parent_id == null()).all()
261 .filter(RepoGroup.group_parent_id == null()).all()
262 return [x for x in RepoGroupList(all_groups)]
262 return [x for x in RepoGroupList(all_groups)]
263
263
264 def mark_for_invalidation(self, repo_name, delete=False):
264 def mark_for_invalidation(self, repo_name, delete=False):
265 """
265 """
266 Mark caches of this repo invalid in the database. `delete` flag
266 Mark caches of this repo invalid in the database. `delete` flag
267 removes the cache entries
267 removes the cache entries
268
268
269 :param repo_name: the repo_name for which caches should be marked
269 :param repo_name: the repo_name for which caches should be marked
270 invalid, or deleted
270 invalid, or deleted
271 :param delete: delete the entry keys instead of setting bool
271 :param delete: delete the entry keys instead of setting bool
272 flag on them, and also purge caches used by the dogpile
272 flag on them, and also purge caches used by the dogpile
273 """
273 """
274 repo = Repository.get_by_repo_name(repo_name)
274 repo = Repository.get_by_repo_name(repo_name)
275
275
276 if repo:
276 if repo:
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
277 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
278 repo_id=repo.repo_id)
278 repo_id=repo.repo_id)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
279 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
280
280
281 repo_id = repo.repo_id
281 repo_id = repo.repo_id
282 config = repo._config
282 config = repo._config
283 config.set('extensions', 'largefiles', '')
283 config.set('extensions', 'largefiles', '')
284 repo.update_commit_cache(config=config, cs_cache=None)
284 repo.update_commit_cache(config=config, cs_cache=None)
285 if delete:
285 if delete:
286 cache_namespace_uid = f'cache_repo.{repo_id}'
286 cache_namespace_uid = f'cache_repo.{repo_id}'
287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
287 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid, method=rc_cache.CLEAR_INVALIDATE)
288
288
289 def toggle_following_repo(self, follow_repo_id, user_id):
289 def toggle_following_repo(self, follow_repo_id, user_id):
290
290
291 f = self.sa.query(UserFollowing)\
291 f = self.sa.query(UserFollowing)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
292 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
293 .filter(UserFollowing.user_id == user_id).scalar()
293 .filter(UserFollowing.user_id == user_id).scalar()
294
294
295 if f is not None:
295 if f is not None:
296 try:
296 try:
297 self.sa.delete(f)
297 self.sa.delete(f)
298 return
298 return
299 except Exception:
299 except Exception:
300 log.error(traceback.format_exc())
300 log.error(traceback.format_exc())
301 raise
301 raise
302
302
303 try:
303 try:
304 f = UserFollowing()
304 f = UserFollowing()
305 f.user_id = user_id
305 f.user_id = user_id
306 f.follows_repo_id = follow_repo_id
306 f.follows_repo_id = follow_repo_id
307 self.sa.add(f)
307 self.sa.add(f)
308 except Exception:
308 except Exception:
309 log.error(traceback.format_exc())
309 log.error(traceback.format_exc())
310 raise
310 raise
311
311
312 def toggle_following_user(self, follow_user_id, user_id):
312 def toggle_following_user(self, follow_user_id, user_id):
313 f = self.sa.query(UserFollowing)\
313 f = self.sa.query(UserFollowing)\
314 .filter(UserFollowing.follows_user_id == follow_user_id)\
314 .filter(UserFollowing.follows_user_id == follow_user_id)\
315 .filter(UserFollowing.user_id == user_id).scalar()
315 .filter(UserFollowing.user_id == user_id).scalar()
316
316
317 if f is not None:
317 if f is not None:
318 try:
318 try:
319 self.sa.delete(f)
319 self.sa.delete(f)
320 return
320 return
321 except Exception:
321 except Exception:
322 log.error(traceback.format_exc())
322 log.error(traceback.format_exc())
323 raise
323 raise
324
324
325 try:
325 try:
326 f = UserFollowing()
326 f = UserFollowing()
327 f.user_id = user_id
327 f.user_id = user_id
328 f.follows_user_id = follow_user_id
328 f.follows_user_id = follow_user_id
329 self.sa.add(f)
329 self.sa.add(f)
330 except Exception:
330 except Exception:
331 log.error(traceback.format_exc())
331 log.error(traceback.format_exc())
332 raise
332 raise
333
333
334 def is_following_repo(self, repo_name, user_id, cache=False):
334 def is_following_repo(self, repo_name, user_id, cache=False):
335 r = self.sa.query(Repository)\
335 r = self.sa.query(Repository)\
336 .filter(Repository.repo_name == repo_name).scalar()
336 .filter(Repository.repo_name == repo_name).scalar()
337
337
338 f = self.sa.query(UserFollowing)\
338 f = self.sa.query(UserFollowing)\
339 .filter(UserFollowing.follows_repository == r)\
339 .filter(UserFollowing.follows_repository == r)\
340 .filter(UserFollowing.user_id == user_id).scalar()
340 .filter(UserFollowing.user_id == user_id).scalar()
341
341
342 return f is not None
342 return f is not None
343
343
344 def is_following_user(self, username, user_id, cache=False):
344 def is_following_user(self, username, user_id, cache=False):
345 u = User.get_by_username(username)
345 u = User.get_by_username(username)
346
346
347 f = self.sa.query(UserFollowing)\
347 f = self.sa.query(UserFollowing)\
348 .filter(UserFollowing.follows_user == u)\
348 .filter(UserFollowing.follows_user == u)\
349 .filter(UserFollowing.user_id == user_id).scalar()
349 .filter(UserFollowing.user_id == user_id).scalar()
350
350
351 return f is not None
351 return f is not None
352
352
353 def get_followers(self, repo):
353 def get_followers(self, repo):
354 repo = self._get_repo(repo)
354 repo = self._get_repo(repo)
355
355
356 return self.sa.query(UserFollowing)\
356 return self.sa.query(UserFollowing)\
357 .filter(UserFollowing.follows_repository == repo).count()
357 .filter(UserFollowing.follows_repository == repo).count()
358
358
359 def get_forks(self, repo):
359 def get_forks(self, repo):
360 repo = self._get_repo(repo)
360 repo = self._get_repo(repo)
361 return self.sa.query(Repository)\
361 return self.sa.query(Repository)\
362 .filter(Repository.fork == repo).count()
362 .filter(Repository.fork == repo).count()
363
363
364 def get_pull_requests(self, repo):
364 def get_pull_requests(self, repo):
365 repo = self._get_repo(repo)
365 repo = self._get_repo(repo)
366 return self.sa.query(PullRequest)\
366 return self.sa.query(PullRequest)\
367 .filter(PullRequest.target_repo == repo)\
367 .filter(PullRequest.target_repo == repo)\
368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
368 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
369
369
370 def get_artifacts(self, repo):
370 def get_artifacts(self, repo):
371 repo = self._get_repo(repo)
371 repo = self._get_repo(repo)
372 return self.sa.query(FileStore)\
372 return self.sa.query(FileStore)\
373 .filter(FileStore.repo == repo)\
373 .filter(FileStore.repo == repo)\
374 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
374 .filter(or_(FileStore.hidden == null(), FileStore.hidden == false())).count()
375
375
376 def mark_as_fork(self, repo, fork, user):
376 def mark_as_fork(self, repo, fork, user):
377 repo = self._get_repo(repo)
377 repo = self._get_repo(repo)
378 fork = self._get_repo(fork)
378 fork = self._get_repo(fork)
379 if fork and repo.repo_id == fork.repo_id:
379 if fork and repo.repo_id == fork.repo_id:
380 raise Exception("Cannot set repository as fork of itself")
380 raise Exception("Cannot set repository as fork of itself")
381
381
382 if fork and repo.repo_type != fork.repo_type:
382 if fork and repo.repo_type != fork.repo_type:
383 raise RepositoryError(
383 raise RepositoryError(
384 "Cannot set repository as fork of repository with other type")
384 "Cannot set repository as fork of repository with other type")
385
385
386 repo.fork = fork
386 repo.fork = fork
387 self.sa.add(repo)
387 self.sa.add(repo)
388 return repo
388 return repo
389
389
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
390 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True, **kwargs):
391 dbrepo = self._get_repo(repo)
391 dbrepo = self._get_repo(repo)
392 remote_uri = remote_uri or dbrepo.clone_uri
392 remote_uri = remote_uri or dbrepo.clone_uri
393 if not remote_uri:
393 if not remote_uri:
394 raise Exception("This repository doesn't have a clone uri")
394 raise Exception("This repository doesn't have a clone uri")
395
395
396 repo = dbrepo.scm_instance(cache=False)
396 repo = dbrepo.scm_instance(cache=False)
397 repo.config.clear_section('hooks')
397 repo.config.clear_section('hooks')
398
398
399 try:
399 try:
400 # NOTE(marcink): add extra validation so we skip invalid urls
400 # NOTE(marcink): add extra validation so we skip invalid urls
401 # this is due this tasks can be executed via scheduler without
401 # this is due this tasks can be executed via scheduler without
402 # proper validation of remote_uri
402 # proper validation of remote_uri
403 if validate_uri:
403 if validate_uri:
404 config = make_db_config(clear_session=False)
404 config = make_db_config(clear_session=False)
405 url_validator(remote_uri, dbrepo.repo_type, config)
405 url_validator(remote_uri, dbrepo.repo_type, config)
406 except InvalidCloneUrl:
406 except InvalidCloneUrl:
407 raise
407 raise
408
408
409 repo_name = dbrepo.repo_name
409 repo_name = dbrepo.repo_name
410 try:
410 try:
411 # TODO: we need to make sure those operations call proper hooks !
411 # TODO: we need to make sure those operations call proper hooks !
412 repo.fetch(remote_uri)
412 repo.fetch(remote_uri, **kwargs)
413
413
414 self.mark_for_invalidation(repo_name)
414 self.mark_for_invalidation(repo_name)
415 except Exception:
415 except Exception:
416 log.error(traceback.format_exc())
416 log.error(traceback.format_exc())
417 raise
417 raise
418
418
419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
419 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
420 dbrepo = self._get_repo(repo)
420 dbrepo = self._get_repo(repo)
421 remote_uri = remote_uri or dbrepo.push_uri
421 remote_uri = remote_uri or dbrepo.push_uri
422 if not remote_uri:
422 if not remote_uri:
423 raise Exception("This repository doesn't have a clone uri")
423 raise Exception("This repository doesn't have a clone uri")
424
424
425 repo = dbrepo.scm_instance(cache=False)
425 repo = dbrepo.scm_instance(cache=False)
426 repo.config.clear_section('hooks')
426 repo.config.clear_section('hooks')
427
427
428 try:
428 try:
429 # NOTE(marcink): add extra validation so we skip invalid urls
429 # NOTE(marcink): add extra validation so we skip invalid urls
430 # this is due this tasks can be executed via scheduler without
430 # this is due this tasks can be executed via scheduler without
431 # proper validation of remote_uri
431 # proper validation of remote_uri
432 if validate_uri:
432 if validate_uri:
433 config = make_db_config(clear_session=False)
433 config = make_db_config(clear_session=False)
434 url_validator(remote_uri, dbrepo.repo_type, config)
434 url_validator(remote_uri, dbrepo.repo_type, config)
435 except InvalidCloneUrl:
435 except InvalidCloneUrl:
436 raise
436 raise
437
437
438 try:
438 try:
439 repo.push(remote_uri)
439 repo.push(remote_uri)
440 except Exception:
440 except Exception:
441 log.error(traceback.format_exc())
441 log.error(traceback.format_exc())
442 raise
442 raise
443
443
444 def commit_change(self, repo, repo_name, commit, user, author, message,
444 def commit_change(self, repo, repo_name, commit, user, author, message,
445 content: bytes, f_path: bytes, branch: str = None):
445 content: bytes, f_path: bytes, branch: str = None):
446 """
446 """
447 Commits changes
447 Commits changes
448 """
448 """
449 user = self._get_user(user)
449 user = self._get_user(user)
450
450
451 # message and author needs to be unicode
451 # message and author needs to be unicode
452 # proper backend should then translate that into required type
452 # proper backend should then translate that into required type
453 message = safe_str(message)
453 message = safe_str(message)
454 author = safe_str(author)
454 author = safe_str(author)
455 imc = repo.in_memory_commit
455 imc = repo.in_memory_commit
456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
456 imc.change(FileNode(f_path, content, mode=commit.get_file_mode(f_path)))
457 try:
457 try:
458 # TODO: handle pre-push action !
458 # TODO: handle pre-push action !
459 tip = imc.commit(
459 tip = imc.commit(
460 message=message, author=author, parents=[commit],
460 message=message, author=author, parents=[commit],
461 branch=branch or commit.branch)
461 branch=branch or commit.branch)
462 except Exception as e:
462 except Exception as e:
463 log.error(traceback.format_exc())
463 log.error(traceback.format_exc())
464 raise IMCCommitError(str(e))
464 raise IMCCommitError(str(e))
465 finally:
465 finally:
466 # always clear caches, if commit fails we want fresh object also
466 # always clear caches, if commit fails we want fresh object also
467 self.mark_for_invalidation(repo_name)
467 self.mark_for_invalidation(repo_name)
468
468
469 # We trigger the post-push action
469 # We trigger the post-push action
470 hooks_utils.trigger_post_push_hook(
470 hooks_utils.trigger_post_push_hook(
471 username=user.username, action='push_local', hook_type='post_push',
471 username=user.username, action='push_local', hook_type='post_push',
472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
472 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
473 return tip
473 return tip
474
474
475 def _sanitize_path(self, f_path: bytes):
475 def _sanitize_path(self, f_path: bytes):
476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
476 if f_path.startswith(b'/') or f_path.startswith(b'./') or b'../' in f_path:
477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
477 raise NonRelativePathError(b'%b is not an relative path' % f_path)
478 if f_path:
478 if f_path:
479 f_path = os.path.normpath(f_path)
479 f_path = os.path.normpath(f_path)
480 return f_path
480 return f_path
481
481
482 def get_dirnode_metadata(self, request, commit, dir_node):
482 def get_dirnode_metadata(self, request, commit, dir_node):
483 if not dir_node.is_dir():
483 if not dir_node.is_dir():
484 return []
484 return []
485
485
486 data = []
486 data = []
487 for node in dir_node:
487 for node in dir_node:
488 if not node.is_file():
488 if not node.is_file():
489 # we skip file-nodes
489 # we skip file-nodes
490 continue
490 continue
491
491
492 last_commit = node.last_commit
492 last_commit = node.last_commit
493 last_commit_date = last_commit.date
493 last_commit_date = last_commit.date
494 data.append({
494 data.append({
495 'name': node.name,
495 'name': node.name,
496 'size': h.format_byte_size_binary(node.size),
496 'size': h.format_byte_size_binary(node.size),
497 'modified_at': h.format_date(last_commit_date),
497 'modified_at': h.format_date(last_commit_date),
498 'modified_ts': last_commit_date.isoformat(),
498 'modified_ts': last_commit_date.isoformat(),
499 'revision': last_commit.revision,
499 'revision': last_commit.revision,
500 'short_id': last_commit.short_id,
500 'short_id': last_commit.short_id,
501 'message': h.escape(last_commit.message),
501 'message': h.escape(last_commit.message),
502 'author': h.escape(last_commit.author),
502 'author': h.escape(last_commit.author),
503 'user_profile': h.gravatar_with_user(
503 'user_profile': h.gravatar_with_user(
504 request, last_commit.author),
504 request, last_commit.author),
505 })
505 })
506
506
507 return data
507 return data
508
508
509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
509 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
510 extended_info=False, content=False, max_file_bytes=None):
510 extended_info=False, content=False, max_file_bytes=None):
511 """
511 """
512 recursive walk in root dir and return a set of all path in that dir
512 recursive walk in root dir and return a set of all path in that dir
513 based on repository walk function
513 based on repository walk function
514
514
515 :param repo_name: name of repository
515 :param repo_name: name of repository
516 :param commit_id: commit id for which to list nodes
516 :param commit_id: commit id for which to list nodes
517 :param root_path: root path to list
517 :param root_path: root path to list
518 :param flat: return as a list, if False returns a dict with description
518 :param flat: return as a list, if False returns a dict with description
519 :param extended_info: show additional info such as md5, binary, size etc
519 :param extended_info: show additional info such as md5, binary, size etc
520 :param content: add nodes content to the return data
520 :param content: add nodes content to the return data
521 :param max_file_bytes: will not return file contents over this limit
521 :param max_file_bytes: will not return file contents over this limit
522
522
523 """
523 """
524 _files = list()
524 _files = list()
525 _dirs = list()
525 _dirs = list()
526
526
527 try:
527 try:
528 _repo = self._get_repo(repo_name)
528 _repo = self._get_repo(repo_name)
529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
529 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
530 root_path = root_path.lstrip('/')
530 root_path = root_path.lstrip('/')
531
531
532 # get RootNode, inject pre-load options before walking
532 # get RootNode, inject pre-load options before walking
533 top_node = commit.get_node(root_path)
533 top_node = commit.get_node(root_path)
534 extended_info_pre_load = []
534 extended_info_pre_load = []
535 if extended_info:
535 if extended_info:
536 extended_info_pre_load += ['md5']
536 extended_info_pre_load += ['md5']
537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
537 top_node.default_pre_load = ['is_binary', 'size'] + extended_info_pre_load
538
538
539 for __, dirs, files in commit.walk(top_node):
539 for __, dirs, files in commit.walk(top_node):
540
540
541 for f in files:
541 for f in files:
542 _content = None
542 _content = None
543 _data = f_name = f.str_path
543 _data = f_name = f.str_path
544
544
545 if not flat:
545 if not flat:
546 _data = {
546 _data = {
547 "name": h.escape(f_name),
547 "name": h.escape(f_name),
548 "type": "file",
548 "type": "file",
549 }
549 }
550 if extended_info:
550 if extended_info:
551 _data.update({
551 _data.update({
552 "md5": f.md5,
552 "md5": f.md5,
553 "binary": f.is_binary,
553 "binary": f.is_binary,
554 "size": f.size,
554 "size": f.size,
555 "extension": f.extension,
555 "extension": f.extension,
556 "mimetype": f.mimetype,
556 "mimetype": f.mimetype,
557 "lines": f.lines()[0]
557 "lines": f.lines()[0]
558 })
558 })
559
559
560 if content:
560 if content:
561 over_size_limit = (max_file_bytes is not None
561 over_size_limit = (max_file_bytes is not None
562 and f.size > max_file_bytes)
562 and f.size > max_file_bytes)
563 full_content = None
563 full_content = None
564 if not f.is_binary and not over_size_limit:
564 if not f.is_binary and not over_size_limit:
565 full_content = f.str_content
565 full_content = f.str_content
566
566
567 _data.update({
567 _data.update({
568 "content": full_content,
568 "content": full_content,
569 })
569 })
570 _files.append(_data)
570 _files.append(_data)
571
571
572 for d in dirs:
572 for d in dirs:
573 _data = d_name = d.str_path
573 _data = d_name = d.str_path
574 if not flat:
574 if not flat:
575 _data = {
575 _data = {
576 "name": h.escape(d_name),
576 "name": h.escape(d_name),
577 "type": "dir",
577 "type": "dir",
578 }
578 }
579 if extended_info:
579 if extended_info:
580 _data.update({
580 _data.update({
581 "md5": "",
581 "md5": "",
582 "binary": False,
582 "binary": False,
583 "size": 0,
583 "size": 0,
584 "extension": "",
584 "extension": "",
585 })
585 })
586 if content:
586 if content:
587 _data.update({
587 _data.update({
588 "content": None
588 "content": None
589 })
589 })
590 _dirs.append(_data)
590 _dirs.append(_data)
591 except RepositoryError:
591 except RepositoryError:
592 log.exception("Exception in get_nodes")
592 log.exception("Exception in get_nodes")
593 raise
593 raise
594
594
595 return _dirs, _files
595 return _dirs, _files
596
596
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 """
598 """
599 Generate files for quick filter in files view
599 Generate files for quick filter in files view
600 """
600 """
601
601
602 _files = list()
602 _files = list()
603 _dirs = list()
603 _dirs = list()
604 try:
604 try:
605 _repo = self._get_repo(repo_name)
605 _repo = self._get_repo(repo_name)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 root_path = root_path.lstrip('/')
607 root_path = root_path.lstrip('/')
608
608
609 top_node = commit.get_node(root_path)
609 top_node = commit.get_node(root_path)
610 top_node.default_pre_load = []
610 top_node.default_pre_load = []
611
611
612 for __, dirs, files in commit.walk(top_node):
612 for __, dirs, files in commit.walk(top_node):
613 for f in files:
613 for f in files:
614
614
615 _data = {
615 _data = {
616 "name": h.escape(f.str_path),
616 "name": h.escape(f.str_path),
617 "type": "file",
617 "type": "file",
618 }
618 }
619
619
620 _files.append(_data)
620 _files.append(_data)
621
621
622 for d in dirs:
622 for d in dirs:
623
623
624 _data = {
624 _data = {
625 "name": h.escape(d.str_path),
625 "name": h.escape(d.str_path),
626 "type": "dir",
626 "type": "dir",
627 }
627 }
628
628
629 _dirs.append(_data)
629 _dirs.append(_data)
630 except RepositoryError:
630 except RepositoryError:
631 log.exception("Exception in get_quick_filter_nodes")
631 log.exception("Exception in get_quick_filter_nodes")
632 raise
632 raise
633
633
634 return _dirs, _files
634 return _dirs, _files
635
635
636 def get_node(self, repo_name, commit_id, file_path,
636 def get_node(self, repo_name, commit_id, file_path,
637 extended_info=False, content=False, max_file_bytes=None, cache=True):
637 extended_info=False, content=False, max_file_bytes=None, cache=True):
638 """
638 """
639 retrieve single node from commit
639 retrieve single node from commit
640 """
640 """
641
641
642 try:
642 try:
643
643
644 _repo = self._get_repo(repo_name)
644 _repo = self._get_repo(repo_name)
645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
645 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
646
646
647 file_node = commit.get_node(file_path)
647 file_node = commit.get_node(file_path)
648 if file_node.is_dir():
648 if file_node.is_dir():
649 raise RepositoryError('The given path is a directory')
649 raise RepositoryError('The given path is a directory')
650
650
651 _content = None
651 _content = None
652 f_name = file_node.str_path
652 f_name = file_node.str_path
653
653
654 file_data = {
654 file_data = {
655 "name": h.escape(f_name),
655 "name": h.escape(f_name),
656 "type": "file",
656 "type": "file",
657 }
657 }
658
658
659 if extended_info:
659 if extended_info:
660 file_data.update({
660 file_data.update({
661 "extension": file_node.extension,
661 "extension": file_node.extension,
662 "mimetype": file_node.mimetype,
662 "mimetype": file_node.mimetype,
663 })
663 })
664
664
665 if cache:
665 if cache:
666 md5 = file_node.md5
666 md5 = file_node.md5
667 is_binary = file_node.is_binary
667 is_binary = file_node.is_binary
668 size = file_node.size
668 size = file_node.size
669 else:
669 else:
670 is_binary, md5, size, _content = file_node.metadata_uncached()
670 is_binary, md5, size, _content = file_node.metadata_uncached()
671
671
672 file_data.update({
672 file_data.update({
673 "md5": md5,
673 "md5": md5,
674 "binary": is_binary,
674 "binary": is_binary,
675 "size": size,
675 "size": size,
676 })
676 })
677
677
678 if content and cache:
678 if content and cache:
679 # get content + cache
679 # get content + cache
680 size = file_node.size
680 size = file_node.size
681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
681 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
682 full_content = None
682 full_content = None
683 all_lines = 0
683 all_lines = 0
684 if not file_node.is_binary and not over_size_limit:
684 if not file_node.is_binary and not over_size_limit:
685 full_content = safe_str(file_node.content)
685 full_content = safe_str(file_node.content)
686 all_lines, empty_lines = file_node.count_lines(full_content)
686 all_lines, empty_lines = file_node.count_lines(full_content)
687
687
688 file_data.update({
688 file_data.update({
689 "content": full_content,
689 "content": full_content,
690 "lines": all_lines
690 "lines": all_lines
691 })
691 })
692 elif content:
692 elif content:
693 # get content *without* cache
693 # get content *without* cache
694 if _content is None:
694 if _content is None:
695 is_binary, md5, size, _content = file_node.metadata_uncached()
695 is_binary, md5, size, _content = file_node.metadata_uncached()
696
696
697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
697 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
698 full_content = None
698 full_content = None
699 all_lines = 0
699 all_lines = 0
700 if not is_binary and not over_size_limit:
700 if not is_binary and not over_size_limit:
701 full_content = safe_str(_content)
701 full_content = safe_str(_content)
702 all_lines, empty_lines = file_node.count_lines(full_content)
702 all_lines, empty_lines = file_node.count_lines(full_content)
703
703
704 file_data.update({
704 file_data.update({
705 "content": full_content,
705 "content": full_content,
706 "lines": all_lines
706 "lines": all_lines
707 })
707 })
708
708
709 except RepositoryError:
709 except RepositoryError:
710 log.exception("Exception in get_node")
710 log.exception("Exception in get_node")
711 raise
711 raise
712
712
713 return file_data
713 return file_data
714
714
715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
715 def get_fts_data(self, repo_name, commit_id, root_path='/'):
716 """
716 """
717 Fetch node tree for usage in full text search
717 Fetch node tree for usage in full text search
718 """
718 """
719
719
720 tree_info = list()
720 tree_info = list()
721
721
722 try:
722 try:
723 _repo = self._get_repo(repo_name)
723 _repo = self._get_repo(repo_name)
724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
724 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
725 root_path = root_path.lstrip('/')
725 root_path = root_path.lstrip('/')
726 top_node = commit.get_node(root_path)
726 top_node = commit.get_node(root_path)
727 top_node.default_pre_load = []
727 top_node.default_pre_load = []
728
728
729 for __, dirs, files in commit.walk(top_node):
729 for __, dirs, files in commit.walk(top_node):
730
730
731 for f in files:
731 for f in files:
732 is_binary, md5, size, _content = f.metadata_uncached()
732 is_binary, md5, size, _content = f.metadata_uncached()
733 _data = {
733 _data = {
734 "name": f.str_path,
734 "name": f.str_path,
735 "md5": md5,
735 "md5": md5,
736 "extension": f.extension,
736 "extension": f.extension,
737 "binary": is_binary,
737 "binary": is_binary,
738 "size": size
738 "size": size
739 }
739 }
740
740
741 tree_info.append(_data)
741 tree_info.append(_data)
742
742
743 except RepositoryError:
743 except RepositoryError:
744 log.exception("Exception in get_nodes")
744 log.exception("Exception in get_nodes")
745 raise
745 raise
746
746
747 return tree_info
747 return tree_info
748
748
749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
749 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
750 author=None, trigger_push_hook=True):
750 author=None, trigger_push_hook=True):
751 """
751 """
752 Commits given multiple nodes into repo
752 Commits given multiple nodes into repo
753
753
754 :param user: RhodeCode User object or user_id, the commiter
754 :param user: RhodeCode User object or user_id, the commiter
755 :param repo: RhodeCode Repository object
755 :param repo: RhodeCode Repository object
756 :param message: commit message
756 :param message: commit message
757 :param nodes: mapping {filename:{'content':content},...}
757 :param nodes: mapping {filename:{'content':content},...}
758 :param parent_commit: parent commit, can be empty than it's
758 :param parent_commit: parent commit, can be empty than it's
759 initial commit
759 initial commit
760 :param author: author of commit, cna be different that commiter
760 :param author: author of commit, cna be different that commiter
761 only for git
761 only for git
762 :param trigger_push_hook: trigger push hooks
762 :param trigger_push_hook: trigger push hooks
763
763
764 :returns: new committed commit
764 :returns: new committed commit
765 """
765 """
766
766
767 user = self._get_user(user)
767 user = self._get_user(user)
768 scm_instance = repo.scm_instance(cache=False)
768 scm_instance = repo.scm_instance(cache=False)
769
769
770 message = safe_str(message)
770 message = safe_str(message)
771 commiter = user.full_contact
771 commiter = user.full_contact
772 author = safe_str(author) if author else commiter
772 author = safe_str(author) if author else commiter
773
773
774 imc = scm_instance.in_memory_commit
774 imc = scm_instance.in_memory_commit
775
775
776 if not parent_commit:
776 if not parent_commit:
777 parent_commit = EmptyCommit(alias=scm_instance.alias)
777 parent_commit = EmptyCommit(alias=scm_instance.alias)
778
778
779 if isinstance(parent_commit, EmptyCommit):
779 if isinstance(parent_commit, EmptyCommit):
780 # EmptyCommit means we're editing empty repository
780 # EmptyCommit means we're editing empty repository
781 parents = None
781 parents = None
782 else:
782 else:
783 parents = [parent_commit]
783 parents = [parent_commit]
784
784
785 upload_file_types = (io.BytesIO, io.BufferedRandom)
785 upload_file_types = (io.BytesIO, io.BufferedRandom)
786 processed_nodes = []
786 processed_nodes = []
787 for filename, content_dict in nodes.items():
787 for filename, content_dict in nodes.items():
788 if not isinstance(filename, bytes):
788 if not isinstance(filename, bytes):
789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
789 raise ValueError(f'filename key in nodes needs to be bytes , or {upload_file_types}')
790 content = content_dict['content']
790 content = content_dict['content']
791 if not isinstance(content, upload_file_types + (bytes,)):
791 if not isinstance(content, upload_file_types + (bytes,)):
792 raise ValueError('content key value in nodes needs to be bytes')
792 raise ValueError('content key value in nodes needs to be bytes')
793
793
794 for f_path in nodes:
794 for f_path in nodes:
795 f_path = self._sanitize_path(f_path)
795 f_path = self._sanitize_path(f_path)
796 content = nodes[f_path]['content']
796 content = nodes[f_path]['content']
797
797
798 # decoding here will force that we have proper encoded values
798 # decoding here will force that we have proper encoded values
799 # in any other case this will throw exceptions and deny commit
799 # in any other case this will throw exceptions and deny commit
800
800
801 if isinstance(content, bytes):
801 if isinstance(content, bytes):
802 pass
802 pass
803 elif isinstance(content, upload_file_types):
803 elif isinstance(content, upload_file_types):
804 content = content.read()
804 content = content.read()
805 else:
805 else:
806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
806 raise Exception(f'Content is of unrecognized type {type(content)}, expected {upload_file_types}')
807 processed_nodes.append((f_path, content))
807 processed_nodes.append((f_path, content))
808
808
809 # add multiple nodes
809 # add multiple nodes
810 for path, content in processed_nodes:
810 for path, content in processed_nodes:
811 imc.add(FileNode(path, content=content))
811 imc.add(FileNode(path, content=content))
812
812
813 # TODO: handle pre push scenario
813 # TODO: handle pre push scenario
814 tip = imc.commit(message=message,
814 tip = imc.commit(message=message,
815 author=author,
815 author=author,
816 parents=parents,
816 parents=parents,
817 branch=parent_commit.branch)
817 branch=parent_commit.branch)
818
818
819 self.mark_for_invalidation(repo.repo_name)
819 self.mark_for_invalidation(repo.repo_name)
820 if trigger_push_hook:
820 if trigger_push_hook:
821 hooks_utils.trigger_post_push_hook(
821 hooks_utils.trigger_post_push_hook(
822 username=user.username, action='push_local',
822 username=user.username, action='push_local',
823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
823 repo_name=repo.repo_name, repo_type=scm_instance.alias,
824 hook_type='post_push',
824 hook_type='post_push',
825 commit_ids=[tip.raw_id])
825 commit_ids=[tip.raw_id])
826 return tip
826 return tip
827
827
828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
828 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
829 author=None, trigger_push_hook=True):
829 author=None, trigger_push_hook=True):
830 user = self._get_user(user)
830 user = self._get_user(user)
831 scm_instance = repo.scm_instance(cache=False)
831 scm_instance = repo.scm_instance(cache=False)
832
832
833 message = safe_str(message)
833 message = safe_str(message)
834 commiter = user.full_contact
834 commiter = user.full_contact
835 author = safe_str(author) if author else commiter
835 author = safe_str(author) if author else commiter
836
836
837 imc = scm_instance.in_memory_commit
837 imc = scm_instance.in_memory_commit
838
838
839 if not parent_commit:
839 if not parent_commit:
840 parent_commit = EmptyCommit(alias=scm_instance.alias)
840 parent_commit = EmptyCommit(alias=scm_instance.alias)
841
841
842 if isinstance(parent_commit, EmptyCommit):
842 if isinstance(parent_commit, EmptyCommit):
843 # EmptyCommit means we we're editing empty repository
843 # EmptyCommit means we we're editing empty repository
844 parents = None
844 parents = None
845 else:
845 else:
846 parents = [parent_commit]
846 parents = [parent_commit]
847
847
848 # add multiple nodes
848 # add multiple nodes
849 for _filename, data in nodes.items():
849 for _filename, data in nodes.items():
850 # new filename, can be renamed from the old one, also sanitaze
850 # new filename, can be renamed from the old one, also sanitaze
851 # the path for any hack around relative paths like ../../ etc.
851 # the path for any hack around relative paths like ../../ etc.
852 filename = self._sanitize_path(data['filename'])
852 filename = self._sanitize_path(data['filename'])
853 old_filename = self._sanitize_path(_filename)
853 old_filename = self._sanitize_path(_filename)
854 content = data['content']
854 content = data['content']
855 file_mode = data.get('mode')
855 file_mode = data.get('mode')
856 filenode = FileNode(old_filename, content=content, mode=file_mode)
856 filenode = FileNode(old_filename, content=content, mode=file_mode)
857 op = data['op']
857 op = data['op']
858 if op == 'add':
858 if op == 'add':
859 imc.add(filenode)
859 imc.add(filenode)
860 elif op == 'del':
860 elif op == 'del':
861 imc.remove(filenode)
861 imc.remove(filenode)
862 elif op == 'mod':
862 elif op == 'mod':
863 if filename != old_filename:
863 if filename != old_filename:
864 # TODO: handle renames more efficient, needs vcs lib changes
864 # TODO: handle renames more efficient, needs vcs lib changes
865 imc.remove(filenode)
865 imc.remove(filenode)
866 imc.add(FileNode(filename, content=content, mode=file_mode))
866 imc.add(FileNode(filename, content=content, mode=file_mode))
867 else:
867 else:
868 imc.change(filenode)
868 imc.change(filenode)
869
869
870 try:
870 try:
871 # TODO: handle pre push scenario commit changes
871 # TODO: handle pre push scenario commit changes
872 tip = imc.commit(message=message,
872 tip = imc.commit(message=message,
873 author=author,
873 author=author,
874 parents=parents,
874 parents=parents,
875 branch=parent_commit.branch)
875 branch=parent_commit.branch)
876 except NodeNotChangedError:
876 except NodeNotChangedError:
877 raise
877 raise
878 except Exception as e:
878 except Exception as e:
879 log.exception("Unexpected exception during call to imc.commit")
879 log.exception("Unexpected exception during call to imc.commit")
880 raise IMCCommitError(str(e))
880 raise IMCCommitError(str(e))
881 finally:
881 finally:
882 # always clear caches, if commit fails we want fresh object also
882 # always clear caches, if commit fails we want fresh object also
883 self.mark_for_invalidation(repo.repo_name)
883 self.mark_for_invalidation(repo.repo_name)
884
884
885 if trigger_push_hook:
885 if trigger_push_hook:
886 hooks_utils.trigger_post_push_hook(
886 hooks_utils.trigger_post_push_hook(
887 username=user.username, action='push_local', hook_type='post_push',
887 username=user.username, action='push_local', hook_type='post_push',
888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
888 repo_name=repo.repo_name, repo_type=scm_instance.alias,
889 commit_ids=[tip.raw_id])
889 commit_ids=[tip.raw_id])
890
890
891 return tip
891 return tip
892
892
893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
893 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
894 author=None, trigger_push_hook=True):
894 author=None, trigger_push_hook=True):
895 """
895 """
896 Deletes given multiple nodes into `repo`
896 Deletes given multiple nodes into `repo`
897
897
898 :param user: RhodeCode User object or user_id, the committer
898 :param user: RhodeCode User object or user_id, the committer
899 :param repo: RhodeCode Repository object
899 :param repo: RhodeCode Repository object
900 :param message: commit message
900 :param message: commit message
901 :param nodes: mapping {filename:{'content':content},...}
901 :param nodes: mapping {filename:{'content':content},...}
902 :param parent_commit: parent commit, can be empty than it's initial
902 :param parent_commit: parent commit, can be empty than it's initial
903 commit
903 commit
904 :param author: author of commit, cna be different that commiter only
904 :param author: author of commit, cna be different that commiter only
905 for git
905 for git
906 :param trigger_push_hook: trigger push hooks
906 :param trigger_push_hook: trigger push hooks
907
907
908 :returns: new commit after deletion
908 :returns: new commit after deletion
909 """
909 """
910
910
911 user = self._get_user(user)
911 user = self._get_user(user)
912 scm_instance = repo.scm_instance(cache=False)
912 scm_instance = repo.scm_instance(cache=False)
913
913
914 processed_nodes = []
914 processed_nodes = []
915 for f_path in nodes:
915 for f_path in nodes:
916 f_path = self._sanitize_path(f_path)
916 f_path = self._sanitize_path(f_path)
917 # content can be empty but for compatibility it allows same dicts
917 # content can be empty but for compatibility it allows same dicts
918 # structure as add_nodes
918 # structure as add_nodes
919 content = nodes[f_path].get('content')
919 content = nodes[f_path].get('content')
920 processed_nodes.append((safe_bytes(f_path), content))
920 processed_nodes.append((safe_bytes(f_path), content))
921
921
922 message = safe_str(message)
922 message = safe_str(message)
923 commiter = user.full_contact
923 commiter = user.full_contact
924 author = safe_str(author) if author else commiter
924 author = safe_str(author) if author else commiter
925
925
926 imc = scm_instance.in_memory_commit
926 imc = scm_instance.in_memory_commit
927
927
928 if not parent_commit:
928 if not parent_commit:
929 parent_commit = EmptyCommit(alias=scm_instance.alias)
929 parent_commit = EmptyCommit(alias=scm_instance.alias)
930
930
931 if isinstance(parent_commit, EmptyCommit):
931 if isinstance(parent_commit, EmptyCommit):
932 # EmptyCommit means we we're editing empty repository
932 # EmptyCommit means we we're editing empty repository
933 parents = None
933 parents = None
934 else:
934 else:
935 parents = [parent_commit]
935 parents = [parent_commit]
936 # add multiple nodes
936 # add multiple nodes
937 for path, content in processed_nodes:
937 for path, content in processed_nodes:
938 imc.remove(FileNode(path, content=content))
938 imc.remove(FileNode(path, content=content))
939
939
940 # TODO: handle pre push scenario
940 # TODO: handle pre push scenario
941 tip = imc.commit(message=message,
941 tip = imc.commit(message=message,
942 author=author,
942 author=author,
943 parents=parents,
943 parents=parents,
944 branch=parent_commit.branch)
944 branch=parent_commit.branch)
945
945
946 self.mark_for_invalidation(repo.repo_name)
946 self.mark_for_invalidation(repo.repo_name)
947 if trigger_push_hook:
947 if trigger_push_hook:
948 hooks_utils.trigger_post_push_hook(
948 hooks_utils.trigger_post_push_hook(
949 username=user.username, action='push_local', hook_type='post_push',
949 username=user.username, action='push_local', hook_type='post_push',
950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
950 repo_name=repo.repo_name, repo_type=scm_instance.alias,
951 commit_ids=[tip.raw_id])
951 commit_ids=[tip.raw_id])
952 return tip
952 return tip
953
953
954 def strip(self, repo, commit_id, branch):
954 def strip(self, repo, commit_id, branch):
955 scm_instance = repo.scm_instance(cache=False)
955 scm_instance = repo.scm_instance(cache=False)
956 scm_instance.config.clear_section('hooks')
956 scm_instance.config.clear_section('hooks')
957 scm_instance.strip(commit_id, branch)
957 scm_instance.strip(commit_id, branch)
958 self.mark_for_invalidation(repo.repo_name)
958 self.mark_for_invalidation(repo.repo_name)
959
959
960 def get_unread_journal(self):
960 def get_unread_journal(self):
961 return self.sa.query(UserLog).count()
961 return self.sa.query(UserLog).count()
962
962
963 @classmethod
963 @classmethod
964 def backend_landing_ref(cls, repo_type):
964 def backend_landing_ref(cls, repo_type):
965 """
965 """
966 Return a default landing ref based on a repository type.
966 Return a default landing ref based on a repository type.
967 """
967 """
968
968
969 landing_ref = {
969 landing_ref = {
970 'hg': ('branch:default', 'default'),
970 'hg': ('branch:default', 'default'),
971 'git': ('branch:master', 'master'),
971 'git': ('branch:master', 'master'),
972 'svn': ('rev:tip', 'latest tip'),
972 'svn': ('rev:tip', 'latest tip'),
973 'default': ('rev:tip', 'latest tip'),
973 'default': ('rev:tip', 'latest tip'),
974 }
974 }
975
975
976 return landing_ref.get(repo_type) or landing_ref['default']
976 return landing_ref.get(repo_type) or landing_ref['default']
977
977
978 def get_repo_landing_revs(self, translator, repo=None):
978 def get_repo_landing_revs(self, translator, repo=None):
979 """
979 """
980 Generates select option with tags branches and bookmarks (for hg only)
980 Generates select option with tags branches and bookmarks (for hg only)
981 grouped by type
981 grouped by type
982
982
983 :param repo:
983 :param repo:
984 """
984 """
985 from rhodecode.lib.vcs.backends.git import GitRepository
985 from rhodecode.lib.vcs.backends.git import GitRepository
986
986
987 _ = translator
987 _ = translator
988 repo = self._get_repo(repo)
988 repo = self._get_repo(repo)
989
989
990 if repo:
990 if repo:
991 repo_type = repo.repo_type
991 repo_type = repo.repo_type
992 else:
992 else:
993 repo_type = 'default'
993 repo_type = 'default'
994
994
995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
995 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
996
996
997 default_ref_options = [
997 default_ref_options = [
998 [default_landing_ref, landing_ref_lbl]
998 [default_landing_ref, landing_ref_lbl]
999 ]
999 ]
1000 default_choices = [
1000 default_choices = [
1001 default_landing_ref
1001 default_landing_ref
1002 ]
1002 ]
1003
1003
1004 if not repo:
1004 if not repo:
1005 # presented at NEW repo creation
1005 # presented at NEW repo creation
1006 return default_choices, default_ref_options
1006 return default_choices, default_ref_options
1007
1007
1008 repo = repo.scm_instance()
1008 repo = repo.scm_instance()
1009
1009
1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1010 ref_options = [(default_landing_ref, landing_ref_lbl)]
1011 choices = [default_landing_ref]
1011 choices = [default_landing_ref]
1012
1012
1013 # branches
1013 # branches
1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1014 branch_group = [(f'branch:{safe_str(b)}', safe_str(b)) for b in repo.branches]
1015 if not branch_group:
1015 if not branch_group:
1016 # new repo, or without maybe a branch?
1016 # new repo, or without maybe a branch?
1017 branch_group = default_ref_options
1017 branch_group = default_ref_options
1018
1018
1019 branches_group = (branch_group, _("Branches"))
1019 branches_group = (branch_group, _("Branches"))
1020 ref_options.append(branches_group)
1020 ref_options.append(branches_group)
1021 choices.extend([x[0] for x in branches_group[0]])
1021 choices.extend([x[0] for x in branches_group[0]])
1022
1022
1023 # bookmarks for HG
1023 # bookmarks for HG
1024 if repo.alias == 'hg':
1024 if repo.alias == 'hg':
1025 bookmarks_group = (
1025 bookmarks_group = (
1026 [(f'book:{safe_str(b)}', safe_str(b))
1026 [(f'book:{safe_str(b)}', safe_str(b))
1027 for b in repo.bookmarks],
1027 for b in repo.bookmarks],
1028 _("Bookmarks"))
1028 _("Bookmarks"))
1029 ref_options.append(bookmarks_group)
1029 ref_options.append(bookmarks_group)
1030 choices.extend([x[0] for x in bookmarks_group[0]])
1030 choices.extend([x[0] for x in bookmarks_group[0]])
1031
1031
1032 # tags
1032 # tags
1033 tags_group = (
1033 tags_group = (
1034 [(f'tag:{safe_str(t)}', safe_str(t))
1034 [(f'tag:{safe_str(t)}', safe_str(t))
1035 for t in repo.tags],
1035 for t in repo.tags],
1036 _("Tags"))
1036 _("Tags"))
1037 ref_options.append(tags_group)
1037 ref_options.append(tags_group)
1038 choices.extend([x[0] for x in tags_group[0]])
1038 choices.extend([x[0] for x in tags_group[0]])
1039
1039
1040 return choices, ref_options
1040 return choices, ref_options
1041
1041
1042 def get_server_info(self, environ=None):
1042 def get_server_info(self, environ=None):
1043 server_info = get_system_info(environ)
1043 server_info = get_system_info(environ)
1044 return server_info
1044 return server_info
General Comments 0
You need to be logged in to leave comments. Login now