##// END OF EJS Templates
repos: cleannup and fix landing-ref code....
super-admin -
r4852:07a18b11 default
parent child Browse files
Show More
@@ -1,203 +1,210 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.model.repo import RepoModel
25 from rhodecode.model.scm import ScmModel
25 26 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 27 from rhodecode.api.tests.utils import (
27 28 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 29 from rhodecode.tests.fixture import Fixture
29 30 from rhodecode.tests.plugin import plain_http_host_only_stub
30 31
31 32 fixture = Fixture()
32 33
33 34 UPDATE_REPO_NAME = 'api_update_me'
34 35
35 36
36 37 class SAME_AS_UPDATES(object):
37 38 """ Constant used for tests below """
38 39
39 40
40 41 @pytest.mark.usefixtures("testuser_api", "app")
41 42 class TestApiUpdateRepo(object):
42 43
43 44 @pytest.mark.parametrize("updates, expected", [
44 45 ({'owner': TEST_USER_REGULAR_LOGIN},
45 46 SAME_AS_UPDATES),
46 47
47 48 ({'description': 'new description'},
48 49 SAME_AS_UPDATES),
49 50
50 51 ({'clone_uri': 'http://foo.com/repo'},
51 52 SAME_AS_UPDATES),
52 53
53 54 ({'clone_uri': None},
54 55 {'clone_uri': ''}),
55 56
56 57 ({'clone_uri': ''},
57 58 {'clone_uri': ''}),
58 59
59 60 ({'clone_uri': 'http://example.com/repo_pull'},
60 61 {'clone_uri': 'http://example.com/repo_pull'}),
61 62
62 63 ({'push_uri': ''},
63 64 {'push_uri': ''}),
64 65
65 66 ({'push_uri': 'http://example.com/repo_push'},
66 67 {'push_uri': 'http://example.com/repo_push'}),
67 68
68 ({'landing_rev': 'rev:tip'},
69 {'landing_rev': ['rev', 'tip']}),
69 ({'landing_rev': None}, # auto-updated based on type of repo
70 {'landing_rev': [None, None]}),
70 71
71 72 ({'enable_statistics': True},
72 73 SAME_AS_UPDATES),
73 74
74 75 ({'enable_locking': True},
75 76 SAME_AS_UPDATES),
76 77
77 78 ({'enable_downloads': True},
78 79 SAME_AS_UPDATES),
79 80
80 81 ({'repo_name': 'new_repo_name'},
81 82 {
82 83 'repo_name': 'new_repo_name',
83 84 'url': 'http://{}/new_repo_name'.format(plain_http_host_only_stub())
84 85 }),
85 86
86 87 ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
87 88 '_group': 'test_group_for_update'},
88 89 {
89 90 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
90 91 'url': 'http://{}/test_group_for_update/{}'.format(
91 92 plain_http_host_only_stub(), UPDATE_REPO_NAME)
92 93 }),
93 94 ])
94 95 def test_api_update_repo(self, updates, expected, backend):
95 96 repo_name = UPDATE_REPO_NAME
96 97 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
97 98 if updates.get('_group'):
98 99 fixture.create_repo_group(updates['_group'])
99 100
101 if 'landing_rev' in updates:
102 default_landing_ref, _lbl = ScmModel.backend_landing_ref(backend.alias)
103 _type, _name = default_landing_ref.split(':')
104 updates['landing_rev'] = default_landing_ref
105 expected['landing_rev'] = [_type, _name]
106
100 107 expected_api_data = repo.get_api_data(include_secrets=True)
101 108 if expected is SAME_AS_UPDATES:
102 109 expected_api_data.update(updates)
103 110 else:
104 111 expected_api_data.update(expected)
105 112
106 113 id_, params = build_data(
107 114 self.apikey, 'update_repo', repoid=repo_name, **updates)
108 115
109 116 with mock.patch('rhodecode.model.validation_schema.validators.url_validator'):
110 117 response = api_call(self.app, params)
111 118
112 119 if updates.get('repo_name'):
113 120 repo_name = updates['repo_name']
114 121
115 122 try:
116 123 expected = {
117 124 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
118 125 'repository': jsonify(expected_api_data)
119 126 }
120 127 assert_ok(id_, expected, given=response.body)
121 128 finally:
122 129 fixture.destroy_repo(repo_name)
123 130 if updates.get('_group'):
124 131 fixture.destroy_repo_group(updates['_group'])
125 132
126 133 def test_api_update_repo_fork_of_field(self, backend):
127 134 master_repo = backend.create_repo()
128 135 repo = backend.create_repo()
129 136 updates = {
130 137 'fork_of': master_repo.repo_name,
131 138 'fork_of_id': master_repo.repo_id
132 139 }
133 140 expected_api_data = repo.get_api_data(include_secrets=True)
134 141 expected_api_data.update(updates)
135 142
136 143 id_, params = build_data(
137 144 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
138 145 response = api_call(self.app, params)
139 146 expected = {
140 147 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
141 148 'repository': jsonify(expected_api_data)
142 149 }
143 150 assert_ok(id_, expected, given=response.body)
144 151 result = response.json['result']['repository']
145 152 assert result['fork_of'] == master_repo.repo_name
146 153 assert result['fork_of_id'] == master_repo.repo_id
147 154
148 155 def test_api_update_repo_fork_of_not_found(self, backend):
149 156 master_repo_name = 'fake-parent-repo'
150 157 repo = backend.create_repo()
151 158 updates = {
152 159 'fork_of': master_repo_name
153 160 }
154 161 id_, params = build_data(
155 162 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
156 163 response = api_call(self.app, params)
157 164 expected = {
158 165 'repo_fork_of': 'Fork with id `{}` does not exists'.format(
159 166 master_repo_name)}
160 167 assert_error(id_, expected, given=response.body)
161 168
162 169 def test_api_update_repo_with_repo_group_not_existing(self):
163 170 repo_name = 'admin_owned'
164 171 fake_repo_group = 'test_group_for_update'
165 172 fixture.create_repo(repo_name)
166 173 updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)}
167 174 id_, params = build_data(
168 175 self.apikey, 'update_repo', repoid=repo_name, **updates)
169 176 response = api_call(self.app, params)
170 177 try:
171 178 expected = {
172 179 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group)
173 180 }
174 181 assert_error(id_, expected, given=response.body)
175 182 finally:
176 183 fixture.destroy_repo(repo_name)
177 184
178 185 def test_api_update_repo_regular_user_not_allowed(self):
179 186 repo_name = 'admin_owned'
180 187 fixture.create_repo(repo_name)
181 188 updates = {'active': False}
182 189 id_, params = build_data(
183 190 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
184 191 response = api_call(self.app, params)
185 192 try:
186 193 expected = 'repository `%s` does not exist' % (repo_name,)
187 194 assert_error(id_, expected, given=response.body)
188 195 finally:
189 196 fixture.destroy_repo(repo_name)
190 197
191 198 @mock.patch.object(RepoModel, 'update', crash)
192 199 def test_api_update_repo_exception_occurred(self, backend):
193 200 repo_name = UPDATE_REPO_NAME
194 201 fixture.create_repo(repo_name, repo_type=backend.alias)
195 202 id_, params = build_data(
196 203 self.apikey, 'update_repo', repoid=repo_name,
197 204 owner=TEST_USER_ADMIN_LOGIN,)
198 205 response = api_call(self.app, params)
199 206 try:
200 207 expected = 'failed to update repo `%s`' % (repo_name,)
201 208 assert_error(id_, expected, given=response.body)
202 209 finally:
203 210 fixture.destroy_repo(repo_name)
@@ -1,1051 +1,1052 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT repository module
23 23 """
24 24
25 25 import logging
26 26 import os
27 27 import re
28 28
29 29 from zope.cachedescriptors.property import Lazy as LazyProperty
30 30
31 31 from rhodecode.lib.compat import OrderedDict
32 32 from rhodecode.lib.datelib import (
33 33 utcdate_fromtimestamp, makedate, date_astimestamp)
34 34 from rhodecode.lib.utils import safe_unicode, safe_str
35 35 from rhodecode.lib.utils2 import CachedProperty
36 36 from rhodecode.lib.vcs import connection, path as vcspath
37 37 from rhodecode.lib.vcs.backends.base import (
38 38 BaseRepository, CollectionGenerator, Config, MergeResponse,
39 39 MergeFailureReason, Reference)
40 40 from rhodecode.lib.vcs.backends.git.commit import GitCommit
41 41 from rhodecode.lib.vcs.backends.git.diff import GitDiff
42 42 from rhodecode.lib.vcs.backends.git.inmemory import GitInMemoryCommit
43 43 from rhodecode.lib.vcs.exceptions import (
44 44 CommitDoesNotExistError, EmptyRepositoryError,
45 45 RepositoryError, TagAlreadyExistError, TagDoesNotExistError, VCSError, UnresolvedFilesInRepo)
46 46
47 47
48 48 SHA_PATTERN = re.compile(r'^[[0-9a-fA-F]{12}|[0-9a-fA-F]{40}]$')
49 49
50 50 log = logging.getLogger(__name__)
51 51
52 52
53 53 class GitRepository(BaseRepository):
54 54 """
55 55 Git repository backend.
56 56 """
57 57 DEFAULT_BRANCH_NAME = os.environ.get('GIT_DEFAULT_BRANCH_NAME') or 'master'
58 DEFAULT_REF = 'branch:{}'.format(DEFAULT_BRANCH_NAME)
58 59
59 60 contact = BaseRepository.DEFAULT_CONTACT
60 61
61 62 def __init__(self, repo_path, config=None, create=False, src_url=None,
62 63 do_workspace_checkout=False, with_wire=None, bare=False):
63 64
64 65 self.path = safe_str(os.path.abspath(repo_path))
65 66 self.config = config if config else self.get_default_config()
66 67 self.with_wire = with_wire or {"cache": False} # default should not use cache
67 68
68 69 self._init_repo(create, src_url, do_workspace_checkout, bare)
69 70
70 71 # caches
71 72 self._commit_ids = {}
72 73
73 74 @LazyProperty
74 75 def _remote(self):
75 76 repo_id = self.path
76 77 return connection.Git(self.path, repo_id, self.config, with_wire=self.with_wire)
77 78
78 79 @LazyProperty
79 80 def bare(self):
80 81 return self._remote.bare()
81 82
82 83 @LazyProperty
83 84 def head(self):
84 85 return self._remote.head()
85 86
86 87 @CachedProperty
87 88 def commit_ids(self):
88 89 """
89 90 Returns list of commit ids, in ascending order. Being lazy
90 91 attribute allows external tools to inject commit ids from cache.
91 92 """
92 93 commit_ids = self._get_all_commit_ids()
93 94 self._rebuild_cache(commit_ids)
94 95 return commit_ids
95 96
96 97 def _rebuild_cache(self, commit_ids):
97 98 self._commit_ids = dict((commit_id, index)
98 99 for index, commit_id in enumerate(commit_ids))
99 100
100 101 def run_git_command(self, cmd, **opts):
101 102 """
102 103 Runs given ``cmd`` as git command and returns tuple
103 104 (stdout, stderr).
104 105
105 106 :param cmd: git command to be executed
106 107 :param opts: env options to pass into Subprocess command
107 108 """
108 109 if not isinstance(cmd, list):
109 110 raise ValueError('cmd must be a list, got %s instead' % type(cmd))
110 111
111 112 skip_stderr_log = opts.pop('skip_stderr_log', False)
112 113 out, err = self._remote.run_git_command(cmd, **opts)
113 114 if err and not skip_stderr_log:
114 115 log.debug('Stderr output of git command "%s":\n%s', cmd, err)
115 116 return out, err
116 117
117 118 @staticmethod
118 119 def check_url(url, config):
119 120 """
120 121 Function will check given url and try to verify if it's a valid
121 122 link. Sometimes it may happened that git will issue basic
122 123 auth request that can cause whole API to hang when used from python
123 124 or other external calls.
124 125
125 126 On failures it'll raise urllib2.HTTPError, exception is also thrown
126 127 when the return code is non 200
127 128 """
128 129 # check first if it's not an url
129 130 if os.path.isdir(url) or url.startswith('file:'):
130 131 return True
131 132
132 133 if '+' in url.split('://', 1)[0]:
133 134 url = url.split('+', 1)[1]
134 135
135 136 # Request the _remote to verify the url
136 137 return connection.Git.check_url(url, config.serialize())
137 138
138 139 @staticmethod
139 140 def is_valid_repository(path):
140 141 if os.path.isdir(os.path.join(path, '.git')):
141 142 return True
142 143 # check case of bare repository
143 144 try:
144 145 GitRepository(path)
145 146 return True
146 147 except VCSError:
147 148 pass
148 149 return False
149 150
150 151 def _init_repo(self, create, src_url=None, do_workspace_checkout=False,
151 152 bare=False):
152 153 if create and os.path.exists(self.path):
153 154 raise RepositoryError(
154 155 "Cannot create repository at %s, location already exist"
155 156 % self.path)
156 157
157 158 if bare and do_workspace_checkout:
158 159 raise RepositoryError("Cannot update a bare repository")
159 160 try:
160 161
161 162 if src_url:
162 163 # check URL before any actions
163 164 GitRepository.check_url(src_url, self.config)
164 165
165 166 if create:
166 167 os.makedirs(self.path, mode=0o755)
167 168
168 169 if bare:
169 170 self._remote.init_bare()
170 171 else:
171 172 self._remote.init()
172 173
173 174 if src_url and bare:
174 175 # bare repository only allows a fetch and checkout is not allowed
175 176 self.fetch(src_url, commit_ids=None)
176 177 elif src_url:
177 178 self.pull(src_url, commit_ids=None,
178 179 update_after=do_workspace_checkout)
179 180
180 181 else:
181 182 if not self._remote.assert_correct_path():
182 183 raise RepositoryError(
183 184 'Path "%s" does not contain a Git repository' %
184 185 (self.path,))
185 186
186 187 # TODO: johbo: check if we have to translate the OSError here
187 188 except OSError as err:
188 189 raise RepositoryError(err)
189 190
190 191 def _get_all_commit_ids(self):
191 192 return self._remote.get_all_commit_ids()
192 193
193 194 def _get_commit_ids(self, filters=None):
194 195 # we must check if this repo is not empty, since later command
195 196 # fails if it is. And it's cheaper to ask than throw the subprocess
196 197 # errors
197 198
198 199 head = self._remote.head(show_exc=False)
199 200
200 201 if not head:
201 202 return []
202 203
203 204 rev_filter = ['--branches', '--tags']
204 205 extra_filter = []
205 206
206 207 if filters:
207 208 if filters.get('since'):
208 209 extra_filter.append('--since=%s' % (filters['since']))
209 210 if filters.get('until'):
210 211 extra_filter.append('--until=%s' % (filters['until']))
211 212 if filters.get('branch_name'):
212 213 rev_filter = []
213 214 extra_filter.append(filters['branch_name'])
214 215 rev_filter.extend(extra_filter)
215 216
216 217 # if filters.get('start') or filters.get('end'):
217 218 # # skip is offset, max-count is limit
218 219 # if filters.get('start'):
219 220 # extra_filter += ' --skip=%s' % filters['start']
220 221 # if filters.get('end'):
221 222 # extra_filter += ' --max-count=%s' % (filters['end'] - (filters['start'] or 0))
222 223
223 224 cmd = ['rev-list', '--reverse', '--date-order'] + rev_filter
224 225 try:
225 226 output, __ = self.run_git_command(cmd)
226 227 except RepositoryError:
227 228 # Can be raised for empty repositories
228 229 return []
229 230 return output.splitlines()
230 231
231 232 def _lookup_commit(self, commit_id_or_idx, translate_tag=True, maybe_unreachable=False, reference_obj=None):
232 233
233 234 def is_null(value):
234 235 return len(value) == commit_id_or_idx.count('0')
235 236
236 237 if commit_id_or_idx in (None, '', 'tip', 'HEAD', 'head', -1):
237 238 return self.commit_ids[-1]
238 239
239 240 commit_missing_err = "Commit {} does not exist for `{}`".format(
240 241 *map(safe_str, [commit_id_or_idx, self.name]))
241 242
242 243 is_bstr = isinstance(commit_id_or_idx, (str, unicode))
243 244 is_branch = reference_obj and reference_obj.branch
244 245
245 246 lookup_ok = False
246 247 if is_bstr:
247 248 # Need to call remote to translate id for tagging scenarios,
248 249 # or branch that are numeric
249 250 try:
250 251 remote_data = self._remote.get_object(commit_id_or_idx,
251 252 maybe_unreachable=maybe_unreachable)
252 253 commit_id_or_idx = remote_data["commit_id"]
253 254 lookup_ok = True
254 255 except (CommitDoesNotExistError,):
255 256 lookup_ok = False
256 257
257 258 if lookup_ok is False:
258 259 is_numeric_idx = \
259 260 (is_bstr and commit_id_or_idx.isdigit() and len(commit_id_or_idx) < 12) \
260 261 or isinstance(commit_id_or_idx, int)
261 262 if not is_branch and (is_numeric_idx or is_null(commit_id_or_idx)):
262 263 try:
263 264 commit_id_or_idx = self.commit_ids[int(commit_id_or_idx)]
264 265 lookup_ok = True
265 266 except Exception:
266 267 raise CommitDoesNotExistError(commit_missing_err)
267 268
268 269 # we failed regular lookup, and by integer number lookup
269 270 if lookup_ok is False:
270 271 raise CommitDoesNotExistError(commit_missing_err)
271 272
272 273 # Ensure we return full id
273 274 if not SHA_PATTERN.match(str(commit_id_or_idx)):
274 275 raise CommitDoesNotExistError(
275 276 "Given commit id %s not recognized" % commit_id_or_idx)
276 277 return commit_id_or_idx
277 278
278 279 def get_hook_location(self):
279 280 """
280 281 returns absolute path to location where hooks are stored
281 282 """
282 283 loc = os.path.join(self.path, 'hooks')
283 284 if not self.bare:
284 285 loc = os.path.join(self.path, '.git', 'hooks')
285 286 return loc
286 287
287 288 @LazyProperty
288 289 def last_change(self):
289 290 """
290 291 Returns last change made on this repository as
291 292 `datetime.datetime` object.
292 293 """
293 294 try:
294 295 return self.get_commit().date
295 296 except RepositoryError:
296 297 tzoffset = makedate()[1]
297 298 return utcdate_fromtimestamp(self._get_fs_mtime(), tzoffset)
298 299
299 300 def _get_fs_mtime(self):
300 301 idx_loc = '' if self.bare else '.git'
301 302 # fallback to filesystem
302 303 in_path = os.path.join(self.path, idx_loc, "index")
303 304 he_path = os.path.join(self.path, idx_loc, "HEAD")
304 305 if os.path.exists(in_path):
305 306 return os.stat(in_path).st_mtime
306 307 else:
307 308 return os.stat(he_path).st_mtime
308 309
309 310 @LazyProperty
310 311 def description(self):
311 312 description = self._remote.get_description()
312 313 return safe_unicode(description or self.DEFAULT_DESCRIPTION)
313 314
314 315 def _get_refs_entries(self, prefix='', reverse=False, strip_prefix=True):
315 316 if self.is_empty():
316 317 return OrderedDict()
317 318
318 319 result = []
319 320 for ref, sha in self._refs.iteritems():
320 321 if ref.startswith(prefix):
321 322 ref_name = ref
322 323 if strip_prefix:
323 324 ref_name = ref[len(prefix):]
324 325 result.append((safe_unicode(ref_name), sha))
325 326
326 327 def get_name(entry):
327 328 return entry[0]
328 329
329 330 return OrderedDict(sorted(result, key=get_name, reverse=reverse))
330 331
331 332 def _get_branches(self):
332 333 return self._get_refs_entries(prefix='refs/heads/', strip_prefix=True)
333 334
334 335 @CachedProperty
335 336 def branches(self):
336 337 return self._get_branches()
337 338
338 339 @CachedProperty
339 340 def branches_closed(self):
340 341 return {}
341 342
342 343 @CachedProperty
343 344 def bookmarks(self):
344 345 return {}
345 346
346 347 @CachedProperty
347 348 def branches_all(self):
348 349 all_branches = {}
349 350 all_branches.update(self.branches)
350 351 all_branches.update(self.branches_closed)
351 352 return all_branches
352 353
353 354 @CachedProperty
354 355 def tags(self):
355 356 return self._get_tags()
356 357
357 358 def _get_tags(self):
358 359 return self._get_refs_entries(prefix='refs/tags/', strip_prefix=True, reverse=True)
359 360
360 361 def tag(self, name, user, commit_id=None, message=None, date=None,
361 362 **kwargs):
362 363 # TODO: fix this method to apply annotated tags correct with message
363 364 """
364 365 Creates and returns a tag for the given ``commit_id``.
365 366
366 367 :param name: name for new tag
367 368 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
368 369 :param commit_id: commit id for which new tag would be created
369 370 :param message: message of the tag's commit
370 371 :param date: date of tag's commit
371 372
372 373 :raises TagAlreadyExistError: if tag with same name already exists
373 374 """
374 375 if name in self.tags:
375 376 raise TagAlreadyExistError("Tag %s already exists" % name)
376 377 commit = self.get_commit(commit_id=commit_id)
377 378 message = message or "Added tag %s for commit %s" % (name, commit.raw_id)
378 379
379 380 self._remote.set_refs('refs/tags/%s' % name, commit.raw_id)
380 381
381 382 self._invalidate_prop_cache('tags')
382 383 self._invalidate_prop_cache('_refs')
383 384
384 385 return commit
385 386
386 387 def remove_tag(self, name, user, message=None, date=None):
387 388 """
388 389 Removes tag with the given ``name``.
389 390
390 391 :param name: name of the tag to be removed
391 392 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
392 393 :param message: message of the tag's removal commit
393 394 :param date: date of tag's removal commit
394 395
395 396 :raises TagDoesNotExistError: if tag with given name does not exists
396 397 """
397 398 if name not in self.tags:
398 399 raise TagDoesNotExistError("Tag %s does not exist" % name)
399 400
400 401 self._remote.tag_remove(name)
401 402 self._invalidate_prop_cache('tags')
402 403 self._invalidate_prop_cache('_refs')
403 404
404 405 def _get_refs(self):
405 406 return self._remote.get_refs()
406 407
407 408 @CachedProperty
408 409 def _refs(self):
409 410 return self._get_refs()
410 411
411 412 @property
412 413 def _ref_tree(self):
413 414 node = tree = {}
414 415 for ref, sha in self._refs.iteritems():
415 416 path = ref.split('/')
416 417 for bit in path[:-1]:
417 418 node = node.setdefault(bit, {})
418 419 node[path[-1]] = sha
419 420 node = tree
420 421 return tree
421 422
422 423 def get_remote_ref(self, ref_name):
423 424 ref_key = 'refs/remotes/origin/{}'.format(safe_str(ref_name))
424 425 try:
425 426 return self._refs[ref_key]
426 427 except Exception:
427 428 return
428 429
429 430 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None,
430 431 translate_tag=True, maybe_unreachable=False, reference_obj=None):
431 432 """
432 433 Returns `GitCommit` object representing commit from git repository
433 434 at the given `commit_id` or head (most recent commit) if None given.
434 435 """
435 436
436 437 if self.is_empty():
437 438 raise EmptyRepositoryError("There are no commits yet")
438 439
439 440 if commit_id is not None:
440 441 self._validate_commit_id(commit_id)
441 442 try:
442 443 # we have cached idx, use it without contacting the remote
443 444 idx = self._commit_ids[commit_id]
444 445 return GitCommit(self, commit_id, idx, pre_load=pre_load)
445 446 except KeyError:
446 447 pass
447 448
448 449 elif commit_idx is not None:
449 450 self._validate_commit_idx(commit_idx)
450 451 try:
451 452 _commit_id = self.commit_ids[commit_idx]
452 453 if commit_idx < 0:
453 454 commit_idx = self.commit_ids.index(_commit_id)
454 455 return GitCommit(self, _commit_id, commit_idx, pre_load=pre_load)
455 456 except IndexError:
456 457 commit_id = commit_idx
457 458 else:
458 459 commit_id = "tip"
459 460
460 461 if translate_tag:
461 462 commit_id = self._lookup_commit(
462 463 commit_id, maybe_unreachable=maybe_unreachable,
463 464 reference_obj=reference_obj)
464 465
465 466 try:
466 467 idx = self._commit_ids[commit_id]
467 468 except KeyError:
468 469 idx = -1
469 470
470 471 return GitCommit(self, commit_id, idx, pre_load=pre_load)
471 472
472 473 def get_commits(
473 474 self, start_id=None, end_id=None, start_date=None, end_date=None,
474 475 branch_name=None, show_hidden=False, pre_load=None, translate_tags=True):
475 476 """
476 477 Returns generator of `GitCommit` objects from start to end (both
477 478 are inclusive), in ascending date order.
478 479
479 480 :param start_id: None, str(commit_id)
480 481 :param end_id: None, str(commit_id)
481 482 :param start_date: if specified, commits with commit date less than
482 483 ``start_date`` would be filtered out from returned set
483 484 :param end_date: if specified, commits with commit date greater than
484 485 ``end_date`` would be filtered out from returned set
485 486 :param branch_name: if specified, commits not reachable from given
486 487 branch would be filtered out from returned set
487 488 :param show_hidden: Show hidden commits such as obsolete or hidden from
488 489 Mercurial evolve
489 490 :raise BranchDoesNotExistError: If given `branch_name` does not
490 491 exist.
491 492 :raise CommitDoesNotExistError: If commits for given `start` or
492 493 `end` could not be found.
493 494
494 495 """
495 496 if self.is_empty():
496 497 raise EmptyRepositoryError("There are no commits yet")
497 498
498 499 self._validate_branch_name(branch_name)
499 500
500 501 if start_id is not None:
501 502 self._validate_commit_id(start_id)
502 503 if end_id is not None:
503 504 self._validate_commit_id(end_id)
504 505
505 506 start_raw_id = self._lookup_commit(start_id)
506 507 start_pos = self._commit_ids[start_raw_id] if start_id else None
507 508 end_raw_id = self._lookup_commit(end_id)
508 509 end_pos = max(0, self._commit_ids[end_raw_id]) if end_id else None
509 510
510 511 if None not in [start_id, end_id] and start_pos > end_pos:
511 512 raise RepositoryError(
512 513 "Start commit '%s' cannot be after end commit '%s'" %
513 514 (start_id, end_id))
514 515
515 516 if end_pos is not None:
516 517 end_pos += 1
517 518
518 519 filter_ = []
519 520 if branch_name:
520 521 filter_.append({'branch_name': branch_name})
521 522 if start_date and not end_date:
522 523 filter_.append({'since': start_date})
523 524 if end_date and not start_date:
524 525 filter_.append({'until': end_date})
525 526 if start_date and end_date:
526 527 filter_.append({'since': start_date})
527 528 filter_.append({'until': end_date})
528 529
529 530 # if start_pos or end_pos:
530 531 # filter_.append({'start': start_pos})
531 532 # filter_.append({'end': end_pos})
532 533
533 534 if filter_:
534 535 revfilters = {
535 536 'branch_name': branch_name,
536 537 'since': start_date.strftime('%m/%d/%y %H:%M:%S') if start_date else None,
537 538 'until': end_date.strftime('%m/%d/%y %H:%M:%S') if end_date else None,
538 539 'start': start_pos,
539 540 'end': end_pos,
540 541 }
541 542 commit_ids = self._get_commit_ids(filters=revfilters)
542 543
543 544 else:
544 545 commit_ids = self.commit_ids
545 546
546 547 if start_pos or end_pos:
547 548 commit_ids = commit_ids[start_pos: end_pos]
548 549
549 550 return CollectionGenerator(self, commit_ids, pre_load=pre_load,
550 551 translate_tag=translate_tags)
551 552
552 553 def get_diff(
553 554 self, commit1, commit2, path='', ignore_whitespace=False,
554 555 context=3, path1=None):
555 556 """
556 557 Returns (git like) *diff*, as plain text. Shows changes introduced by
557 558 ``commit2`` since ``commit1``.
558 559
559 560 :param commit1: Entry point from which diff is shown. Can be
560 561 ``self.EMPTY_COMMIT`` - in this case, patch showing all
561 562 the changes since empty state of the repository until ``commit2``
562 563 :param commit2: Until which commits changes should be shown.
563 564 :param ignore_whitespace: If set to ``True``, would not show whitespace
564 565 changes. Defaults to ``False``.
565 566 :param context: How many lines before/after changed lines should be
566 567 shown. Defaults to ``3``.
567 568 """
568 569 self._validate_diff_commits(commit1, commit2)
569 570 if path1 is not None and path1 != path:
570 571 raise ValueError("Diff of two different paths not supported.")
571 572
572 573 if path:
573 574 file_filter = path
574 575 else:
575 576 file_filter = None
576 577
577 578 diff = self._remote.diff(
578 579 commit1.raw_id, commit2.raw_id, file_filter=file_filter,
579 580 opt_ignorews=ignore_whitespace,
580 581 context=context)
581 582 return GitDiff(diff)
582 583
583 584 def strip(self, commit_id, branch_name):
584 585 commit = self.get_commit(commit_id=commit_id)
585 586 if commit.merge:
586 587 raise Exception('Cannot reset to merge commit')
587 588
588 589 # parent is going to be the new head now
589 590 commit = commit.parents[0]
590 591 self._remote.set_refs('refs/heads/%s' % branch_name, commit.raw_id)
591 592
592 593 # clear cached properties
593 594 self._invalidate_prop_cache('commit_ids')
594 595 self._invalidate_prop_cache('_refs')
595 596 self._invalidate_prop_cache('branches')
596 597
597 598 return len(self.commit_ids)
598 599
599 600 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
600 601 log.debug('Calculating common ancestor between %sc1:%s and %sc2:%s',
601 602 self, commit_id1, repo2, commit_id2)
602 603
603 604 if commit_id1 == commit_id2:
604 605 return commit_id1
605 606
606 607 if self != repo2:
607 608 commits = self._remote.get_missing_revs(
608 609 commit_id1, commit_id2, repo2.path)
609 610 if commits:
610 611 commit = repo2.get_commit(commits[-1])
611 612 if commit.parents:
612 613 ancestor_id = commit.parents[0].raw_id
613 614 else:
614 615 ancestor_id = None
615 616 else:
616 617 # no commits from other repo, ancestor_id is the commit_id2
617 618 ancestor_id = commit_id2
618 619 else:
619 620 output, __ = self.run_git_command(
620 621 ['merge-base', commit_id1, commit_id2])
621 622 ancestor_id = self.COMMIT_ID_PAT.findall(output)[0]
622 623
623 624 log.debug('Found common ancestor with sha: %s', ancestor_id)
624 625
625 626 return ancestor_id
626 627
627 628 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
628 629 repo1 = self
629 630 ancestor_id = None
630 631
631 632 if commit_id1 == commit_id2:
632 633 commits = []
633 634 elif repo1 != repo2:
634 635 missing_ids = self._remote.get_missing_revs(commit_id1, commit_id2,
635 636 repo2.path)
636 637 commits = [
637 638 repo2.get_commit(commit_id=commit_id, pre_load=pre_load)
638 639 for commit_id in reversed(missing_ids)]
639 640 else:
640 641 output, __ = repo1.run_git_command(
641 642 ['log', '--reverse', '--pretty=format: %H', '-s',
642 643 '%s..%s' % (commit_id1, commit_id2)])
643 644 commits = [
644 645 repo1.get_commit(commit_id=commit_id, pre_load=pre_load)
645 646 for commit_id in self.COMMIT_ID_PAT.findall(output)]
646 647
647 648 return commits
648 649
649 650 @LazyProperty
650 651 def in_memory_commit(self):
651 652 """
652 653 Returns ``GitInMemoryCommit`` object for this repository.
653 654 """
654 655 return GitInMemoryCommit(self)
655 656
656 657 def pull(self, url, commit_ids=None, update_after=False):
657 658 """
658 659 Pull changes from external location. Pull is different in GIT
659 660 that fetch since it's doing a checkout
660 661
661 662 :param commit_ids: Optional. Can be set to a list of commit ids
662 663 which shall be pulled from the other repository.
663 664 """
664 665 refs = None
665 666 if commit_ids is not None:
666 667 remote_refs = self._remote.get_remote_refs(url)
667 668 refs = [ref for ref in remote_refs if remote_refs[ref] in commit_ids]
668 669 self._remote.pull(url, refs=refs, update_after=update_after)
669 670 self._remote.invalidate_vcs_cache()
670 671
671 672 def fetch(self, url, commit_ids=None):
672 673 """
673 674 Fetch all git objects from external location.
674 675 """
675 676 self._remote.sync_fetch(url, refs=commit_ids)
676 677 self._remote.invalidate_vcs_cache()
677 678
678 679 def push(self, url):
679 680 refs = None
680 681 self._remote.sync_push(url, refs=refs)
681 682
682 683 def set_refs(self, ref_name, commit_id):
683 684 self._remote.set_refs(ref_name, commit_id)
684 685 self._invalidate_prop_cache('_refs')
685 686
686 687 def remove_ref(self, ref_name):
687 688 self._remote.remove_ref(ref_name)
688 689 self._invalidate_prop_cache('_refs')
689 690
690 691 def run_gc(self, prune=True):
691 692 cmd = ['gc', '--aggressive']
692 693 if prune:
693 694 cmd += ['--prune=now']
694 695 _stdout, stderr = self.run_git_command(cmd, fail_on_stderr=False)
695 696 return stderr
696 697
697 698 def _update_server_info(self):
698 699 """
699 700 runs gits update-server-info command in this repo instance
700 701 """
701 702 self._remote.update_server_info()
702 703
703 704 def _current_branch(self):
704 705 """
705 706 Return the name of the current branch.
706 707
707 708 It only works for non bare repositories (i.e. repositories with a
708 709 working copy)
709 710 """
710 711 if self.bare:
711 712 raise RepositoryError('Bare git repos do not have active branches')
712 713
713 714 if self.is_empty():
714 715 return None
715 716
716 717 stdout, _ = self.run_git_command(['rev-parse', '--abbrev-ref', 'HEAD'])
717 718 return stdout.strip()
718 719
719 720 def _checkout(self, branch_name, create=False, force=False):
720 721 """
721 722 Checkout a branch in the working directory.
722 723
723 724 It tries to create the branch if create is True, failing if the branch
724 725 already exists.
725 726
726 727 It only works for non bare repositories (i.e. repositories with a
727 728 working copy)
728 729 """
729 730 if self.bare:
730 731 raise RepositoryError('Cannot checkout branches in a bare git repo')
731 732
732 733 cmd = ['checkout']
733 734 if force:
734 735 cmd.append('-f')
735 736 if create:
736 737 cmd.append('-b')
737 738 cmd.append(branch_name)
738 739 self.run_git_command(cmd, fail_on_stderr=False)
739 740
740 741 def _create_branch(self, branch_name, commit_id):
741 742 """
742 743 creates a branch in a GIT repo
743 744 """
744 745 self._remote.create_branch(branch_name, commit_id)
745 746
746 747 def _identify(self):
747 748 """
748 749 Return the current state of the working directory.
749 750 """
750 751 if self.bare:
751 752 raise RepositoryError('Bare git repos do not have active branches')
752 753
753 754 if self.is_empty():
754 755 return None
755 756
756 757 stdout, _ = self.run_git_command(['rev-parse', 'HEAD'])
757 758 return stdout.strip()
758 759
759 760 def _local_clone(self, clone_path, branch_name, source_branch=None):
760 761 """
761 762 Create a local clone of the current repo.
762 763 """
763 764 # N.B.(skreft): the --branch option is required as otherwise the shallow
764 765 # clone will only fetch the active branch.
765 766 cmd = ['clone', '--branch', branch_name,
766 767 self.path, os.path.abspath(clone_path)]
767 768
768 769 self.run_git_command(cmd, fail_on_stderr=False)
769 770
770 771 # if we get the different source branch, make sure we also fetch it for
771 772 # merge conditions
772 773 if source_branch and source_branch != branch_name:
773 774 # check if the ref exists.
774 775 shadow_repo = GitRepository(os.path.abspath(clone_path))
775 776 if shadow_repo.get_remote_ref(source_branch):
776 777 cmd = ['fetch', self.path, source_branch]
777 778 self.run_git_command(cmd, fail_on_stderr=False)
778 779
779 780 def _local_fetch(self, repository_path, branch_name, use_origin=False):
780 781 """
781 782 Fetch a branch from a local repository.
782 783 """
783 784 repository_path = os.path.abspath(repository_path)
784 785 if repository_path == self.path:
785 786 raise ValueError('Cannot fetch from the same repository')
786 787
787 788 if use_origin:
788 789 branch_name = '+{branch}:refs/heads/{branch}'.format(
789 790 branch=branch_name)
790 791
791 792 cmd = ['fetch', '--no-tags', '--update-head-ok',
792 793 repository_path, branch_name]
793 794 self.run_git_command(cmd, fail_on_stderr=False)
794 795
795 796 def _local_reset(self, branch_name):
796 797 branch_name = '{}'.format(branch_name)
797 798 cmd = ['reset', '--hard', branch_name, '--']
798 799 self.run_git_command(cmd, fail_on_stderr=False)
799 800
800 801 def _last_fetch_heads(self):
801 802 """
802 803 Return the last fetched heads that need merging.
803 804
804 805 The algorithm is defined at
805 806 https://github.com/git/git/blob/v2.1.3/git-pull.sh#L283
806 807 """
807 808 if not self.bare:
808 809 fetch_heads_path = os.path.join(self.path, '.git', 'FETCH_HEAD')
809 810 else:
810 811 fetch_heads_path = os.path.join(self.path, 'FETCH_HEAD')
811 812
812 813 heads = []
813 814 with open(fetch_heads_path) as f:
814 815 for line in f:
815 816 if ' not-for-merge ' in line:
816 817 continue
817 818 line = re.sub('\t.*', '', line, flags=re.DOTALL)
818 819 heads.append(line)
819 820
820 821 return heads
821 822
822 823 def get_shadow_instance(self, shadow_repository_path, enable_hooks=False, cache=False):
823 824 return GitRepository(shadow_repository_path, with_wire={"cache": cache})
824 825
825 826 def _local_pull(self, repository_path, branch_name, ff_only=True):
826 827 """
827 828 Pull a branch from a local repository.
828 829 """
829 830 if self.bare:
830 831 raise RepositoryError('Cannot pull into a bare git repository')
831 832 # N.B.(skreft): The --ff-only option is to make sure this is a
832 833 # fast-forward (i.e., we are only pulling new changes and there are no
833 834 # conflicts with our current branch)
834 835 # Additionally, that option needs to go before --no-tags, otherwise git
835 836 # pull complains about it being an unknown flag.
836 837 cmd = ['pull']
837 838 if ff_only:
838 839 cmd.append('--ff-only')
839 840 cmd.extend(['--no-tags', repository_path, branch_name])
840 841 self.run_git_command(cmd, fail_on_stderr=False)
841 842
842 843 def _local_merge(self, merge_message, user_name, user_email, heads):
843 844 """
844 845 Merge the given head into the checked out branch.
845 846
846 847 It will force a merge commit.
847 848
848 849 Currently it raises an error if the repo is empty, as it is not possible
849 850 to create a merge commit in an empty repo.
850 851
851 852 :param merge_message: The message to use for the merge commit.
852 853 :param heads: the heads to merge.
853 854 """
854 855 if self.bare:
855 856 raise RepositoryError('Cannot merge into a bare git repository')
856 857
857 858 if not heads:
858 859 return
859 860
860 861 if self.is_empty():
861 862 # TODO(skreft): do something more robust in this case.
862 863 raise RepositoryError('Do not know how to merge into empty repositories yet')
863 864 unresolved = None
864 865
865 866 # N.B.(skreft): the --no-ff option is used to enforce the creation of a
866 867 # commit message. We also specify the user who is doing the merge.
867 868 cmd = ['-c', 'user.name="%s"' % safe_str(user_name),
868 869 '-c', 'user.email=%s' % safe_str(user_email),
869 870 'merge', '--no-ff', '-m', safe_str(merge_message)]
870 871
871 872 merge_cmd = cmd + heads
872 873
873 874 try:
874 875 self.run_git_command(merge_cmd, fail_on_stderr=False)
875 876 except RepositoryError:
876 877 files = self.run_git_command(['diff', '--name-only', '--diff-filter', 'U'],
877 878 fail_on_stderr=False)[0].splitlines()
878 879 # NOTE(marcink): we add U notation for consistent with HG backend output
879 880 unresolved = ['U {}'.format(f) for f in files]
880 881
881 882 # Cleanup any merge leftovers
882 883 self._remote.invalidate_vcs_cache()
883 884 self.run_git_command(['merge', '--abort'], fail_on_stderr=False)
884 885
885 886 if unresolved:
886 887 raise UnresolvedFilesInRepo(unresolved)
887 888 else:
888 889 raise
889 890
890 891 def _local_push(
891 892 self, source_branch, repository_path, target_branch,
892 893 enable_hooks=False, rc_scm_data=None):
893 894 """
894 895 Push the source_branch to the given repository and target_branch.
895 896
896 897 Currently it if the target_branch is not master and the target repo is
897 898 empty, the push will work, but then GitRepository won't be able to find
898 899 the pushed branch or the commits. As the HEAD will be corrupted (i.e.,
899 900 pointing to master, which does not exist).
900 901
901 902 It does not run the hooks in the target repo.
902 903 """
903 904 # TODO(skreft): deal with the case in which the target repo is empty,
904 905 # and the target_branch is not master.
905 906 target_repo = GitRepository(repository_path)
906 907 if (not target_repo.bare and
907 908 target_repo._current_branch() == target_branch):
908 909 # Git prevents pushing to the checked out branch, so simulate it by
909 910 # pulling into the target repository.
910 911 target_repo._local_pull(self.path, source_branch)
911 912 else:
912 913 cmd = ['push', os.path.abspath(repository_path),
913 914 '%s:%s' % (source_branch, target_branch)]
914 915 gitenv = {}
915 916 if rc_scm_data:
916 917 gitenv.update({'RC_SCM_DATA': rc_scm_data})
917 918
918 919 if not enable_hooks:
919 920 gitenv['RC_SKIP_HOOKS'] = '1'
920 921 self.run_git_command(cmd, fail_on_stderr=False, extra_env=gitenv)
921 922
922 923 def _get_new_pr_branch(self, source_branch, target_branch):
923 924 prefix = 'pr_%s-%s_' % (source_branch, target_branch)
924 925 pr_branches = []
925 926 for branch in self.branches:
926 927 if branch.startswith(prefix):
927 928 pr_branches.append(int(branch[len(prefix):]))
928 929
929 930 if not pr_branches:
930 931 branch_id = 0
931 932 else:
932 933 branch_id = max(pr_branches) + 1
933 934
934 935 return '%s%d' % (prefix, branch_id)
935 936
936 937 def _maybe_prepare_merge_workspace(
937 938 self, repo_id, workspace_id, target_ref, source_ref):
938 939 shadow_repository_path = self._get_shadow_repository_path(
939 940 self.path, repo_id, workspace_id)
940 941 if not os.path.exists(shadow_repository_path):
941 942 self._local_clone(
942 943 shadow_repository_path, target_ref.name, source_ref.name)
943 944 log.debug('Prepared %s shadow repository in %s',
944 945 self.alias, shadow_repository_path)
945 946
946 947 return shadow_repository_path
947 948
948 949 def _merge_repo(self, repo_id, workspace_id, target_ref,
949 950 source_repo, source_ref, merge_message,
950 951 merger_name, merger_email, dry_run=False,
951 952 use_rebase=False, close_branch=False):
952 953
953 954 log.debug('Executing merge_repo with %s strategy, dry_run mode:%s',
954 955 'rebase' if use_rebase else 'merge', dry_run)
955 956 if target_ref.commit_id != self.branches[target_ref.name]:
956 957 log.warning('Target ref %s commit mismatch %s vs %s', target_ref,
957 958 target_ref.commit_id, self.branches[target_ref.name])
958 959 return MergeResponse(
959 960 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
960 961 metadata={'target_ref': target_ref})
961 962
962 963 shadow_repository_path = self._maybe_prepare_merge_workspace(
963 964 repo_id, workspace_id, target_ref, source_ref)
964 965 shadow_repo = self.get_shadow_instance(shadow_repository_path)
965 966
966 967 # checkout source, if it's different. Otherwise we could not
967 968 # fetch proper commits for merge testing
968 969 if source_ref.name != target_ref.name:
969 970 if shadow_repo.get_remote_ref(source_ref.name):
970 971 shadow_repo._checkout(source_ref.name, force=True)
971 972
972 973 # checkout target, and fetch changes
973 974 shadow_repo._checkout(target_ref.name, force=True)
974 975
975 976 # fetch/reset pull the target, in case it is changed
976 977 # this handles even force changes
977 978 shadow_repo._local_fetch(self.path, target_ref.name, use_origin=True)
978 979 shadow_repo._local_reset(target_ref.name)
979 980
980 981 # Need to reload repo to invalidate the cache, or otherwise we cannot
981 982 # retrieve the last target commit.
982 983 shadow_repo = self.get_shadow_instance(shadow_repository_path)
983 984 if target_ref.commit_id != shadow_repo.branches[target_ref.name]:
984 985 log.warning('Shadow Target ref %s commit mismatch %s vs %s',
985 986 target_ref, target_ref.commit_id,
986 987 shadow_repo.branches[target_ref.name])
987 988 return MergeResponse(
988 989 False, False, None, MergeFailureReason.TARGET_IS_NOT_HEAD,
989 990 metadata={'target_ref': target_ref})
990 991
991 992 # calculate new branch
992 993 pr_branch = shadow_repo._get_new_pr_branch(
993 994 source_ref.name, target_ref.name)
994 995 log.debug('using pull-request merge branch: `%s`', pr_branch)
995 996 # checkout to temp branch, and fetch changes
996 997 shadow_repo._checkout(pr_branch, create=True)
997 998 try:
998 999 shadow_repo._local_fetch(source_repo.path, source_ref.name)
999 1000 except RepositoryError:
1000 1001 log.exception('Failure when doing local fetch on '
1001 1002 'shadow repo: %s', shadow_repo)
1002 1003 return MergeResponse(
1003 1004 False, False, None, MergeFailureReason.MISSING_SOURCE_REF,
1004 1005 metadata={'source_ref': source_ref})
1005 1006
1006 1007 merge_ref = None
1007 1008 merge_failure_reason = MergeFailureReason.NONE
1008 1009 metadata = {}
1009 1010 try:
1010 1011 shadow_repo._local_merge(merge_message, merger_name, merger_email,
1011 1012 [source_ref.commit_id])
1012 1013 merge_possible = True
1013 1014
1014 1015 # Need to invalidate the cache, or otherwise we
1015 1016 # cannot retrieve the merge commit.
1016 1017 shadow_repo = shadow_repo.get_shadow_instance(shadow_repository_path)
1017 1018 merge_commit_id = shadow_repo.branches[pr_branch]
1018 1019
1019 1020 # Set a reference pointing to the merge commit. This reference may
1020 1021 # be used to easily identify the last successful merge commit in
1021 1022 # the shadow repository.
1022 1023 shadow_repo.set_refs('refs/heads/pr-merge', merge_commit_id)
1023 1024 merge_ref = Reference('branch', 'pr-merge', merge_commit_id)
1024 1025 except RepositoryError as e:
1025 1026 log.exception('Failure when doing local merge on git shadow repo')
1026 1027 if isinstance(e, UnresolvedFilesInRepo):
1027 1028 metadata['unresolved_files'] = '\n* conflict: ' + ('\n * conflict: '.join(e.args[0]))
1028 1029
1029 1030 merge_possible = False
1030 1031 merge_failure_reason = MergeFailureReason.MERGE_FAILED
1031 1032
1032 1033 if merge_possible and not dry_run:
1033 1034 try:
1034 1035 shadow_repo._local_push(
1035 1036 pr_branch, self.path, target_ref.name, enable_hooks=True,
1036 1037 rc_scm_data=self.config.get('rhodecode', 'RC_SCM_DATA'))
1037 1038 merge_succeeded = True
1038 1039 except RepositoryError:
1039 1040 log.exception(
1040 1041 'Failure when doing local push from the shadow '
1041 1042 'repository to the target repository at %s.', self.path)
1042 1043 merge_succeeded = False
1043 1044 merge_failure_reason = MergeFailureReason.PUSH_FAILED
1044 1045 metadata['target'] = 'git shadow repo'
1045 1046 metadata['merge_commit'] = pr_branch
1046 1047 else:
1047 1048 merge_succeeded = False
1048 1049
1049 1050 return MergeResponse(
1050 1051 merge_possible, merge_succeeded, merge_ref, merge_failure_reason,
1051 1052 metadata=metadata)
@@ -1,1195 +1,1197 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import shutil
24 24 import time
25 25 import logging
26 26 import traceback
27 27 import datetime
28 28
29 29 from pyramid.threadlocal import get_current_request
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode import events
33 33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 34 from rhodecode.lib.caching_query import FromCache
35 35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 36 from rhodecode.lib import hooks_base
37 37 from rhodecode.lib.user_log_filter import user_log_filter
38 38 from rhodecode.lib.utils import make_db_config
39 39 from rhodecode.lib.utils2 import (
40 40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 41 get_current_rhodecode_user, safe_int, action_logger_generic)
42 42 from rhodecode.lib.vcs.backends import get_backend
43 43 from rhodecode.model import BaseModel
44 44 from rhodecode.model.db import (
45 45 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 49 from rhodecode.model.permission import PermissionModel
50 50 from rhodecode.model.settings import VcsSettingsModel
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54
55 55 class RepoModel(BaseModel):
56 56
57 57 cls = Repository
58 58
59 59 def _get_user_group(self, users_group):
60 60 return self._get_instance(UserGroup, users_group,
61 61 callback=UserGroup.get_by_group_name)
62 62
63 63 def _get_repo_group(self, repo_group):
64 64 return self._get_instance(RepoGroup, repo_group,
65 65 callback=RepoGroup.get_by_group_name)
66 66
67 67 def _create_default_perms(self, repository, private):
68 68 # create default permission
69 69 default = 'repository.read'
70 70 def_user = User.get_default_user()
71 71 for p in def_user.user_perms:
72 72 if p.permission.permission_name.startswith('repository.'):
73 73 default = p.permission.permission_name
74 74 break
75 75
76 76 default_perm = 'repository.none' if private else default
77 77
78 78 repo_to_perm = UserRepoToPerm()
79 79 repo_to_perm.permission = Permission.get_by_key(default_perm)
80 80
81 81 repo_to_perm.repository = repository
82 82 repo_to_perm.user_id = def_user.user_id
83 83
84 84 return repo_to_perm
85 85
86 86 @LazyProperty
87 87 def repos_path(self):
88 88 """
89 89 Gets the repositories root path from database
90 90 """
91 91 settings_model = VcsSettingsModel(sa=self.sa)
92 92 return settings_model.get_repos_location()
93 93
94 94 def get(self, repo_id):
95 95 repo = self.sa.query(Repository) \
96 96 .filter(Repository.repo_id == repo_id)
97 97
98 98 return repo.scalar()
99 99
100 100 def get_repo(self, repository):
101 101 return self._get_repo(repository)
102 102
103 103 def get_by_repo_name(self, repo_name, cache=False):
104 104 repo = self.sa.query(Repository) \
105 105 .filter(Repository.repo_name == repo_name)
106 106
107 107 if cache:
108 108 name_key = _hash_key(repo_name)
109 109 repo = repo.options(
110 110 FromCache("sql_cache_short", "get_repo_%s" % name_key))
111 111 return repo.scalar()
112 112
113 113 def _extract_id_from_repo_name(self, repo_name):
114 114 if repo_name.startswith('/'):
115 115 repo_name = repo_name.lstrip('/')
116 116 by_id_match = re.match(r'^_(\d{1,})', repo_name)
117 117 if by_id_match:
118 118 return by_id_match.groups()[0]
119 119
120 120 def get_repo_by_id(self, repo_name):
121 121 """
122 122 Extracts repo_name by id from special urls.
123 123 Example url is _11/repo_name
124 124
125 125 :param repo_name:
126 126 :return: repo object if matched else None
127 127 """
128 128 _repo_id = None
129 129 try:
130 130 _repo_id = self._extract_id_from_repo_name(repo_name)
131 131 if _repo_id:
132 132 return self.get(_repo_id)
133 133 except Exception:
134 134 log.exception('Failed to extract repo_name from URL')
135 135 if _repo_id:
136 136 Session().rollback()
137 137
138 138 return None
139 139
140 140 def get_repos_for_root(self, root, traverse=False):
141 141 if traverse:
142 142 like_expression = u'{}%'.format(safe_unicode(root))
143 143 repos = Repository.query().filter(
144 144 Repository.repo_name.like(like_expression)).all()
145 145 else:
146 146 if root and not isinstance(root, RepoGroup):
147 147 raise ValueError(
148 148 'Root must be an instance '
149 149 'of RepoGroup, got:{} instead'.format(type(root)))
150 150 repos = Repository.query().filter(Repository.group == root).all()
151 151 return repos
152 152
153 153 def get_url(self, repo, request=None, permalink=False):
154 154 if not request:
155 155 request = get_current_request()
156 156
157 157 if not request:
158 158 return
159 159
160 160 if permalink:
161 161 return request.route_url(
162 162 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
163 163 else:
164 164 return request.route_url(
165 165 'repo_summary', repo_name=safe_str(repo.repo_name))
166 166
167 167 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
168 168 if not request:
169 169 request = get_current_request()
170 170
171 171 if not request:
172 172 return
173 173
174 174 if permalink:
175 175 return request.route_url(
176 176 'repo_commit', repo_name=safe_str(repo.repo_id),
177 177 commit_id=commit_id)
178 178
179 179 else:
180 180 return request.route_url(
181 181 'repo_commit', repo_name=safe_str(repo.repo_name),
182 182 commit_id=commit_id)
183 183
184 184 def get_repo_log(self, repo, filter_term):
185 185 repo_log = UserLog.query()\
186 186 .filter(or_(UserLog.repository_id == repo.repo_id,
187 187 UserLog.repository_name == repo.repo_name))\
188 188 .options(joinedload(UserLog.user))\
189 189 .options(joinedload(UserLog.repository))\
190 190 .order_by(UserLog.action_date.desc())
191 191
192 192 repo_log = user_log_filter(repo_log, filter_term)
193 193 return repo_log
194 194
195 195 @classmethod
196 196 def update_commit_cache(cls, repositories=None):
197 197 if not repositories:
198 198 repositories = Repository.getAll()
199 199 for repo in repositories:
200 200 repo.update_commit_cache()
201 201
202 202 def get_repos_as_dict(self, repo_list=None, admin=False,
203 203 super_user_actions=False, short_name=None):
204 204
205 205 _render = get_current_request().get_partial_renderer(
206 206 'rhodecode:templates/data_table/_dt_elements.mako')
207 207 c = _render.get_call_context()
208 208 h = _render.get_helpers()
209 209
210 210 def quick_menu(repo_name):
211 211 return _render('quick_menu', repo_name)
212 212
213 213 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
214 214 if short_name is not None:
215 215 short_name_var = short_name
216 216 else:
217 217 short_name_var = not admin
218 218 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
219 219 short_name=short_name_var, admin=False)
220 220
221 221 def last_change(last_change):
222 222 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
223 223 ts = time.time()
224 224 utc_offset = (datetime.datetime.fromtimestamp(ts)
225 225 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
226 226 last_change = last_change + datetime.timedelta(seconds=utc_offset)
227 227
228 228 return _render("last_change", last_change)
229 229
230 230 def rss_lnk(repo_name):
231 231 return _render("rss", repo_name)
232 232
233 233 def atom_lnk(repo_name):
234 234 return _render("atom", repo_name)
235 235
236 236 def last_rev(repo_name, cs_cache):
237 237 return _render('revision', repo_name, cs_cache.get('revision'),
238 238 cs_cache.get('raw_id'), cs_cache.get('author'),
239 239 cs_cache.get('message'), cs_cache.get('date'))
240 240
241 241 def desc(desc):
242 242 return _render('repo_desc', desc, c.visual.stylify_metatags)
243 243
244 244 def state(repo_state):
245 245 return _render("repo_state", repo_state)
246 246
247 247 def repo_actions(repo_name):
248 248 return _render('repo_actions', repo_name, super_user_actions)
249 249
250 250 def user_profile(username):
251 251 return _render('user_profile', username)
252 252
253 253 repos_data = []
254 254 for repo in repo_list:
255 255 # NOTE(marcink): because we use only raw column we need to load it like that
256 256 changeset_cache = Repository._load_changeset_cache(
257 257 repo.repo_id, repo._changeset_cache)
258 258
259 259 row = {
260 260 "menu": quick_menu(repo.repo_name),
261 261
262 262 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
263 263 repo.private, repo.archived, repo.fork),
264 264
265 265 "desc": desc(h.escape(repo.description)),
266 266
267 267 "last_change": last_change(repo.updated_on),
268 268
269 269 "last_changeset": last_rev(repo.repo_name, changeset_cache),
270 270 "last_changeset_raw": changeset_cache.get('revision'),
271 271
272 272 "owner": user_profile(repo.User.username),
273 273
274 274 "state": state(repo.repo_state),
275 275 "rss": rss_lnk(repo.repo_name),
276 276 "atom": atom_lnk(repo.repo_name),
277 277 }
278 278 if admin:
279 279 row.update({
280 280 "action": repo_actions(repo.repo_name),
281 281 })
282 282 repos_data.append(row)
283 283
284 284 return repos_data
285 285
286 286 def get_repos_data_table(
287 287 self, draw, start, limit,
288 288 search_q, order_by, order_dir,
289 289 auth_user, repo_group_id):
290 290 from rhodecode.model.scm import RepoList
291 291
292 292 _perms = ['repository.read', 'repository.write', 'repository.admin']
293 293
294 294 repos = Repository.query() \
295 295 .filter(Repository.group_id == repo_group_id) \
296 296 .all()
297 297 auth_repo_list = RepoList(
298 298 repos, perm_set=_perms,
299 299 extra_kwargs=dict(user=auth_user))
300 300
301 301 allowed_ids = [-1]
302 302 for repo in auth_repo_list:
303 303 allowed_ids.append(repo.repo_id)
304 304
305 305 repos_data_total_count = Repository.query() \
306 306 .filter(Repository.group_id == repo_group_id) \
307 307 .filter(or_(
308 308 # generate multiple IN to fix limitation problems
309 309 *in_filter_generator(Repository.repo_id, allowed_ids))
310 310 ) \
311 311 .count()
312 312
313 313 base_q = Session.query(
314 314 Repository.repo_id,
315 315 Repository.repo_name,
316 316 Repository.description,
317 317 Repository.repo_type,
318 318 Repository.repo_state,
319 319 Repository.private,
320 320 Repository.archived,
321 321 Repository.fork,
322 322 Repository.updated_on,
323 323 Repository._changeset_cache,
324 324 User,
325 325 ) \
326 326 .filter(Repository.group_id == repo_group_id) \
327 327 .filter(or_(
328 328 # generate multiple IN to fix limitation problems
329 329 *in_filter_generator(Repository.repo_id, allowed_ids))
330 330 ) \
331 331 .join(User, User.user_id == Repository.user_id) \
332 332 .group_by(Repository, User)
333 333
334 334 repos_data_total_filtered_count = base_q.count()
335 335
336 336 sort_defined = False
337 337 if order_by == 'repo_name':
338 338 sort_col = func.lower(Repository.repo_name)
339 339 sort_defined = True
340 340 elif order_by == 'user_username':
341 341 sort_col = User.username
342 342 else:
343 343 sort_col = getattr(Repository, order_by, None)
344 344
345 345 if sort_defined or sort_col:
346 346 if order_dir == 'asc':
347 347 sort_col = sort_col.asc()
348 348 else:
349 349 sort_col = sort_col.desc()
350 350
351 351 base_q = base_q.order_by(sort_col)
352 352 base_q = base_q.offset(start).limit(limit)
353 353
354 354 repos_list = base_q.all()
355 355
356 356 repos_data = RepoModel().get_repos_as_dict(
357 357 repo_list=repos_list, admin=False)
358 358
359 359 data = ({
360 360 'draw': draw,
361 361 'data': repos_data,
362 362 'recordsTotal': repos_data_total_count,
363 363 'recordsFiltered': repos_data_total_filtered_count,
364 364 })
365 365 return data
366 366
367 367 def _get_defaults(self, repo_name):
368 368 """
369 369 Gets information about repository, and returns a dict for
370 370 usage in forms
371 371
372 372 :param repo_name:
373 373 """
374 374
375 375 repo_info = Repository.get_by_repo_name(repo_name)
376 376
377 377 if repo_info is None:
378 378 return None
379 379
380 380 defaults = repo_info.get_dict()
381 381 defaults['repo_name'] = repo_info.just_name
382 382
383 383 groups = repo_info.groups_with_parents
384 384 parent_group = groups[-1] if groups else None
385 385
386 386 # we use -1 as this is how in HTML, we mark an empty group
387 387 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
388 388
389 389 keys_to_process = (
390 390 {'k': 'repo_type', 'strip': False},
391 391 {'k': 'repo_enable_downloads', 'strip': True},
392 392 {'k': 'repo_description', 'strip': True},
393 393 {'k': 'repo_enable_locking', 'strip': True},
394 394 {'k': 'repo_landing_rev', 'strip': True},
395 395 {'k': 'clone_uri', 'strip': False},
396 396 {'k': 'push_uri', 'strip': False},
397 397 {'k': 'repo_private', 'strip': True},
398 398 {'k': 'repo_enable_statistics', 'strip': True}
399 399 )
400 400
401 401 for item in keys_to_process:
402 402 attr = item['k']
403 403 if item['strip']:
404 404 attr = remove_prefix(item['k'], 'repo_')
405 405
406 406 val = defaults[attr]
407 407 if item['k'] == 'repo_landing_rev':
408 408 val = ':'.join(defaults[attr])
409 409 defaults[item['k']] = val
410 410 if item['k'] == 'clone_uri':
411 411 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
412 412 if item['k'] == 'push_uri':
413 413 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
414 414
415 415 # fill owner
416 416 if repo_info.user:
417 417 defaults.update({'user': repo_info.user.username})
418 418 else:
419 419 replacement_user = User.get_first_super_admin().username
420 420 defaults.update({'user': replacement_user})
421 421
422 422 return defaults
423 423
424 424 def update(self, repo, **kwargs):
425 425 try:
426 426 cur_repo = self._get_repo(repo)
427 427 source_repo_name = cur_repo.repo_name
428 428
429 429 affected_user_ids = []
430 430 if 'user' in kwargs:
431 431 old_owner_id = cur_repo.user.user_id
432 432 new_owner = User.get_by_username(kwargs['user'])
433 433 cur_repo.user = new_owner
434 434
435 435 if old_owner_id != new_owner.user_id:
436 436 affected_user_ids = [new_owner.user_id, old_owner_id]
437 437
438 438 if 'repo_group' in kwargs:
439 439 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
440 440 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
441 441
442 442 update_keys = [
443 443 (1, 'repo_description'),
444 444 (1, 'repo_landing_rev'),
445 445 (1, 'repo_private'),
446 446 (1, 'repo_enable_downloads'),
447 447 (1, 'repo_enable_locking'),
448 448 (1, 'repo_enable_statistics'),
449 449 (0, 'clone_uri'),
450 450 (0, 'push_uri'),
451 451 (0, 'fork_id')
452 452 ]
453 453 for strip, k in update_keys:
454 454 if k in kwargs:
455 455 val = kwargs[k]
456 456 if strip:
457 457 k = remove_prefix(k, 'repo_')
458 458
459 459 setattr(cur_repo, k, val)
460 460
461 461 new_name = cur_repo.get_new_name(kwargs['repo_name'])
462 462 cur_repo.repo_name = new_name
463 463
464 464 # if private flag is set, reset default permission to NONE
465 465 if kwargs.get('repo_private'):
466 466 EMPTY_PERM = 'repository.none'
467 467 RepoModel().grant_user_permission(
468 468 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
469 469 )
470 470 if kwargs.get('repo_landing_rev'):
471 471 landing_rev_val = kwargs['repo_landing_rev']
472 472 RepoModel().set_landing_rev(cur_repo, landing_rev_val)
473 473
474 474 # handle extra fields
475 475 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
476 476 k = RepositoryField.un_prefix_key(field)
477 477 ex_field = RepositoryField.get_by_key_name(
478 478 key=k, repo=cur_repo)
479 479 if ex_field:
480 480 ex_field.field_value = kwargs[field]
481 481 self.sa.add(ex_field)
482 482
483 483 self.sa.add(cur_repo)
484 484
485 485 if source_repo_name != new_name:
486 486 # rename repository
487 487 self._rename_filesystem_repo(
488 488 old=source_repo_name, new=new_name)
489 489
490 490 if affected_user_ids:
491 491 PermissionModel().trigger_permission_flush(affected_user_ids)
492 492
493 493 return cur_repo
494 494 except Exception:
495 495 log.error(traceback.format_exc())
496 496 raise
497 497
498 498 def _create_repo(self, repo_name, repo_type, description, owner,
499 499 private=False, clone_uri=None, repo_group=None,
500 landing_rev='rev:tip', fork_of=None,
500 landing_rev=None, fork_of=None,
501 501 copy_fork_permissions=False, enable_statistics=False,
502 502 enable_locking=False, enable_downloads=False,
503 503 copy_group_permissions=False,
504 504 state=Repository.STATE_PENDING):
505 505 """
506 506 Create repository inside database with PENDING state, this should be
507 507 only executed by create() repo. With exception of importing existing
508 508 repos
509 509 """
510 510 from rhodecode.model.scm import ScmModel
511 511
512 512 owner = self._get_user(owner)
513 513 fork_of = self._get_repo(fork_of)
514 514 repo_group = self._get_repo_group(safe_int(repo_group))
515 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
516 landing_rev = landing_rev or default_landing_ref
515 517
516 518 try:
517 519 repo_name = safe_unicode(repo_name)
518 520 description = safe_unicode(description)
519 521 # repo name is just a name of repository
520 522 # while repo_name_full is a full qualified name that is combined
521 523 # with name and path of group
522 524 repo_name_full = repo_name
523 525 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
524 526
525 527 new_repo = Repository()
526 528 new_repo.repo_state = state
527 529 new_repo.enable_statistics = False
528 530 new_repo.repo_name = repo_name_full
529 531 new_repo.repo_type = repo_type
530 532 new_repo.user = owner
531 533 new_repo.group = repo_group
532 534 new_repo.description = description or repo_name
533 535 new_repo.private = private
534 536 new_repo.archived = False
535 537 new_repo.clone_uri = clone_uri
536 538 new_repo.landing_rev = landing_rev
537 539
538 540 new_repo.enable_statistics = enable_statistics
539 541 new_repo.enable_locking = enable_locking
540 542 new_repo.enable_downloads = enable_downloads
541 543
542 544 if repo_group:
543 545 new_repo.enable_locking = repo_group.enable_locking
544 546
545 547 if fork_of:
546 548 parent_repo = fork_of
547 549 new_repo.fork = parent_repo
548 550
549 551 events.trigger(events.RepoPreCreateEvent(new_repo))
550 552
551 553 self.sa.add(new_repo)
552 554
553 555 EMPTY_PERM = 'repository.none'
554 556 if fork_of and copy_fork_permissions:
555 557 repo = fork_of
556 558 user_perms = UserRepoToPerm.query() \
557 559 .filter(UserRepoToPerm.repository == repo).all()
558 560 group_perms = UserGroupRepoToPerm.query() \
559 561 .filter(UserGroupRepoToPerm.repository == repo).all()
560 562
561 563 for perm in user_perms:
562 564 UserRepoToPerm.create(
563 565 perm.user, new_repo, perm.permission)
564 566
565 567 for perm in group_perms:
566 568 UserGroupRepoToPerm.create(
567 569 perm.users_group, new_repo, perm.permission)
568 570 # in case we copy permissions and also set this repo to private
569 571 # override the default user permission to make it a private repo
570 572 if private:
571 573 RepoModel(self.sa).grant_user_permission(
572 574 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
573 575
574 576 elif repo_group and copy_group_permissions:
575 577 user_perms = UserRepoGroupToPerm.query() \
576 578 .filter(UserRepoGroupToPerm.group == repo_group).all()
577 579
578 580 group_perms = UserGroupRepoGroupToPerm.query() \
579 581 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
580 582
581 583 for perm in user_perms:
582 584 perm_name = perm.permission.permission_name.replace(
583 585 'group.', 'repository.')
584 586 perm_obj = Permission.get_by_key(perm_name)
585 587 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
586 588
587 589 for perm in group_perms:
588 590 perm_name = perm.permission.permission_name.replace(
589 591 'group.', 'repository.')
590 592 perm_obj = Permission.get_by_key(perm_name)
591 593 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
592 594
593 595 if private:
594 596 RepoModel(self.sa).grant_user_permission(
595 597 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
596 598
597 599 else:
598 600 perm_obj = self._create_default_perms(new_repo, private)
599 601 self.sa.add(perm_obj)
600 602
601 603 # now automatically start following this repository as owner
602 604 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
603 605
604 606 # we need to flush here, in order to check if database won't
605 607 # throw any exceptions, create filesystem dirs at the very end
606 608 self.sa.flush()
607 609 events.trigger(events.RepoCreateEvent(new_repo))
608 610 return new_repo
609 611
610 612 except Exception:
611 613 log.error(traceback.format_exc())
612 614 raise
613 615
614 616 def create(self, form_data, cur_user):
615 617 """
616 618 Create repository using celery tasks
617 619
618 620 :param form_data:
619 621 :param cur_user:
620 622 """
621 623 from rhodecode.lib.celerylib import tasks, run_task
622 624 return run_task(tasks.create_repo, form_data, cur_user)
623 625
624 626 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
625 627 perm_deletions=None, check_perms=True,
626 628 cur_user=None):
627 629 if not perm_additions:
628 630 perm_additions = []
629 631 if not perm_updates:
630 632 perm_updates = []
631 633 if not perm_deletions:
632 634 perm_deletions = []
633 635
634 636 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
635 637
636 638 changes = {
637 639 'added': [],
638 640 'updated': [],
639 641 'deleted': [],
640 642 'default_user_changed': None
641 643 }
642 644
643 645 repo = self._get_repo(repo)
644 646
645 647 # update permissions
646 648 for member_id, perm, member_type in perm_updates:
647 649 member_id = int(member_id)
648 650 if member_type == 'user':
649 651 member_name = User.get(member_id).username
650 652 if member_name == User.DEFAULT_USER:
651 653 # NOTE(dan): detect if we changed permissions for default user
652 654 perm_obj = self.sa.query(UserRepoToPerm) \
653 655 .filter(UserRepoToPerm.user_id == member_id) \
654 656 .filter(UserRepoToPerm.repository == repo) \
655 657 .scalar()
656 658 if perm_obj and perm_obj.permission.permission_name != perm:
657 659 changes['default_user_changed'] = True
658 660
659 661 # this updates also current one if found
660 662 self.grant_user_permission(
661 663 repo=repo, user=member_id, perm=perm)
662 664 elif member_type == 'user_group':
663 665 # check if we have permissions to alter this usergroup
664 666 member_name = UserGroup.get(member_id).users_group_name
665 667 if not check_perms or HasUserGroupPermissionAny(
666 668 *req_perms)(member_name, user=cur_user):
667 669 self.grant_user_group_permission(
668 670 repo=repo, group_name=member_id, perm=perm)
669 671 else:
670 672 raise ValueError("member_type must be 'user' or 'user_group' "
671 673 "got {} instead".format(member_type))
672 674 changes['updated'].append({'type': member_type, 'id': member_id,
673 675 'name': member_name, 'new_perm': perm})
674 676
675 677 # set new permissions
676 678 for member_id, perm, member_type in perm_additions:
677 679 member_id = int(member_id)
678 680 if member_type == 'user':
679 681 member_name = User.get(member_id).username
680 682 self.grant_user_permission(
681 683 repo=repo, user=member_id, perm=perm)
682 684 elif member_type == 'user_group':
683 685 # check if we have permissions to alter this usergroup
684 686 member_name = UserGroup.get(member_id).users_group_name
685 687 if not check_perms or HasUserGroupPermissionAny(
686 688 *req_perms)(member_name, user=cur_user):
687 689 self.grant_user_group_permission(
688 690 repo=repo, group_name=member_id, perm=perm)
689 691 else:
690 692 raise ValueError("member_type must be 'user' or 'user_group' "
691 693 "got {} instead".format(member_type))
692 694
693 695 changes['added'].append({'type': member_type, 'id': member_id,
694 696 'name': member_name, 'new_perm': perm})
695 697 # delete permissions
696 698 for member_id, perm, member_type in perm_deletions:
697 699 member_id = int(member_id)
698 700 if member_type == 'user':
699 701 member_name = User.get(member_id).username
700 702 self.revoke_user_permission(repo=repo, user=member_id)
701 703 elif member_type == 'user_group':
702 704 # check if we have permissions to alter this usergroup
703 705 member_name = UserGroup.get(member_id).users_group_name
704 706 if not check_perms or HasUserGroupPermissionAny(
705 707 *req_perms)(member_name, user=cur_user):
706 708 self.revoke_user_group_permission(
707 709 repo=repo, group_name=member_id)
708 710 else:
709 711 raise ValueError("member_type must be 'user' or 'user_group' "
710 712 "got {} instead".format(member_type))
711 713
712 714 changes['deleted'].append({'type': member_type, 'id': member_id,
713 715 'name': member_name, 'new_perm': perm})
714 716 return changes
715 717
716 718 def create_fork(self, form_data, cur_user):
717 719 """
718 720 Simple wrapper into executing celery task for fork creation
719 721
720 722 :param form_data:
721 723 :param cur_user:
722 724 """
723 725 from rhodecode.lib.celerylib import tasks, run_task
724 726 return run_task(tasks.create_repo_fork, form_data, cur_user)
725 727
726 728 def archive(self, repo):
727 729 """
728 730 Archive given repository. Set archive flag.
729 731
730 732 :param repo:
731 733 """
732 734 repo = self._get_repo(repo)
733 735 if repo:
734 736
735 737 try:
736 738 repo.archived = True
737 739 self.sa.add(repo)
738 740 self.sa.commit()
739 741 except Exception:
740 742 log.error(traceback.format_exc())
741 743 raise
742 744
743 745 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
744 746 """
745 747 Delete given repository, forks parameter defines what do do with
746 748 attached forks. Throws AttachedForksError if deleted repo has attached
747 749 forks
748 750
749 751 :param repo:
750 752 :param forks: str 'delete' or 'detach'
751 753 :param pull_requests: str 'delete' or None
752 754 :param fs_remove: remove(archive) repo from filesystem
753 755 """
754 756 if not cur_user:
755 757 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
756 758 repo = self._get_repo(repo)
757 759 if repo:
758 760 if forks == 'detach':
759 761 for r in repo.forks:
760 762 r.fork = None
761 763 self.sa.add(r)
762 764 elif forks == 'delete':
763 765 for r in repo.forks:
764 766 self.delete(r, forks='delete')
765 767 elif [f for f in repo.forks]:
766 768 raise AttachedForksError()
767 769
768 770 # check for pull requests
769 771 pr_sources = repo.pull_requests_source
770 772 pr_targets = repo.pull_requests_target
771 773 if pull_requests != 'delete' and (pr_sources or pr_targets):
772 774 raise AttachedPullRequestsError()
773 775
774 776 old_repo_dict = repo.get_dict()
775 777 events.trigger(events.RepoPreDeleteEvent(repo))
776 778 try:
777 779 self.sa.delete(repo)
778 780 if fs_remove:
779 781 self._delete_filesystem_repo(repo)
780 782 else:
781 783 log.debug('skipping removal from filesystem')
782 784 old_repo_dict.update({
783 785 'deleted_by': cur_user,
784 786 'deleted_on': time.time(),
785 787 })
786 788 hooks_base.delete_repository(**old_repo_dict)
787 789 events.trigger(events.RepoDeleteEvent(repo))
788 790 except Exception:
789 791 log.error(traceback.format_exc())
790 792 raise
791 793
792 794 def grant_user_permission(self, repo, user, perm):
793 795 """
794 796 Grant permission for user on given repository, or update existing one
795 797 if found
796 798
797 799 :param repo: Instance of Repository, repository_id, or repository name
798 800 :param user: Instance of User, user_id or username
799 801 :param perm: Instance of Permission, or permission_name
800 802 """
801 803 user = self._get_user(user)
802 804 repo = self._get_repo(repo)
803 805 permission = self._get_perm(perm)
804 806
805 807 # check if we have that permission already
806 808 obj = self.sa.query(UserRepoToPerm) \
807 809 .filter(UserRepoToPerm.user == user) \
808 810 .filter(UserRepoToPerm.repository == repo) \
809 811 .scalar()
810 812 if obj is None:
811 813 # create new !
812 814 obj = UserRepoToPerm()
813 815 obj.repository = repo
814 816 obj.user = user
815 817 obj.permission = permission
816 818 self.sa.add(obj)
817 819 log.debug('Granted perm %s to %s on %s', perm, user, repo)
818 820 action_logger_generic(
819 821 'granted permission: {} to user: {} on repo: {}'.format(
820 822 perm, user, repo), namespace='security.repo')
821 823 return obj
822 824
823 825 def revoke_user_permission(self, repo, user):
824 826 """
825 827 Revoke permission for user on given repository
826 828
827 829 :param repo: Instance of Repository, repository_id, or repository name
828 830 :param user: Instance of User, user_id or username
829 831 """
830 832
831 833 user = self._get_user(user)
832 834 repo = self._get_repo(repo)
833 835
834 836 obj = self.sa.query(UserRepoToPerm) \
835 837 .filter(UserRepoToPerm.repository == repo) \
836 838 .filter(UserRepoToPerm.user == user) \
837 839 .scalar()
838 840 if obj:
839 841 self.sa.delete(obj)
840 842 log.debug('Revoked perm on %s on %s', repo, user)
841 843 action_logger_generic(
842 844 'revoked permission from user: {} on repo: {}'.format(
843 845 user, repo), namespace='security.repo')
844 846
845 847 def grant_user_group_permission(self, repo, group_name, perm):
846 848 """
847 849 Grant permission for user group on given repository, or update
848 850 existing one if found
849 851
850 852 :param repo: Instance of Repository, repository_id, or repository name
851 853 :param group_name: Instance of UserGroup, users_group_id,
852 854 or user group name
853 855 :param perm: Instance of Permission, or permission_name
854 856 """
855 857 repo = self._get_repo(repo)
856 858 group_name = self._get_user_group(group_name)
857 859 permission = self._get_perm(perm)
858 860
859 861 # check if we have that permission already
860 862 obj = self.sa.query(UserGroupRepoToPerm) \
861 863 .filter(UserGroupRepoToPerm.users_group == group_name) \
862 864 .filter(UserGroupRepoToPerm.repository == repo) \
863 865 .scalar()
864 866
865 867 if obj is None:
866 868 # create new
867 869 obj = UserGroupRepoToPerm()
868 870
869 871 obj.repository = repo
870 872 obj.users_group = group_name
871 873 obj.permission = permission
872 874 self.sa.add(obj)
873 875 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
874 876 action_logger_generic(
875 877 'granted permission: {} to usergroup: {} on repo: {}'.format(
876 878 perm, group_name, repo), namespace='security.repo')
877 879
878 880 return obj
879 881
880 882 def revoke_user_group_permission(self, repo, group_name):
881 883 """
882 884 Revoke permission for user group on given repository
883 885
884 886 :param repo: Instance of Repository, repository_id, or repository name
885 887 :param group_name: Instance of UserGroup, users_group_id,
886 888 or user group name
887 889 """
888 890 repo = self._get_repo(repo)
889 891 group_name = self._get_user_group(group_name)
890 892
891 893 obj = self.sa.query(UserGroupRepoToPerm) \
892 894 .filter(UserGroupRepoToPerm.repository == repo) \
893 895 .filter(UserGroupRepoToPerm.users_group == group_name) \
894 896 .scalar()
895 897 if obj:
896 898 self.sa.delete(obj)
897 899 log.debug('Revoked perm to %s on %s', repo, group_name)
898 900 action_logger_generic(
899 901 'revoked permission from usergroup: {} on repo: {}'.format(
900 902 group_name, repo), namespace='security.repo')
901 903
902 904 def delete_stats(self, repo_name):
903 905 """
904 906 removes stats for given repo
905 907
906 908 :param repo_name:
907 909 """
908 910 repo = self._get_repo(repo_name)
909 911 try:
910 912 obj = self.sa.query(Statistics) \
911 913 .filter(Statistics.repository == repo).scalar()
912 914 if obj:
913 915 self.sa.delete(obj)
914 916 except Exception:
915 917 log.error(traceback.format_exc())
916 918 raise
917 919
918 920 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
919 921 field_type='str', field_desc=''):
920 922
921 923 repo = self._get_repo(repo_name)
922 924
923 925 new_field = RepositoryField()
924 926 new_field.repository = repo
925 927 new_field.field_key = field_key
926 928 new_field.field_type = field_type # python type
927 929 new_field.field_value = field_value
928 930 new_field.field_desc = field_desc
929 931 new_field.field_label = field_label
930 932 self.sa.add(new_field)
931 933 return new_field
932 934
933 935 def delete_repo_field(self, repo_name, field_key):
934 936 repo = self._get_repo(repo_name)
935 937 field = RepositoryField.get_by_key_name(field_key, repo)
936 938 if field:
937 939 self.sa.delete(field)
938 940
939 941 def set_landing_rev(self, repo, landing_rev_name):
940 942 if landing_rev_name.startswith('branch:'):
941 943 landing_rev_name = landing_rev_name.split('branch:')[-1]
942 944 scm_instance = repo.scm_instance()
943 945 if scm_instance:
944 946 return scm_instance._remote.set_head_ref(landing_rev_name)
945 947
946 948 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
947 949 clone_uri=None, repo_store_location=None,
948 950 use_global_config=False, install_hooks=True):
949 951 """
950 952 makes repository on filesystem. It's group aware means it'll create
951 953 a repository within a group, and alter the paths accordingly of
952 954 group location
953 955
954 956 :param repo_name:
955 957 :param alias:
956 958 :param parent:
957 959 :param clone_uri:
958 960 :param repo_store_location:
959 961 """
960 962 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
961 963 from rhodecode.model.scm import ScmModel
962 964
963 965 if Repository.NAME_SEP in repo_name:
964 966 raise ValueError(
965 967 'repo_name must not contain groups got `%s`' % repo_name)
966 968
967 969 if isinstance(repo_group, RepoGroup):
968 970 new_parent_path = os.sep.join(repo_group.full_path_splitted)
969 971 else:
970 972 new_parent_path = repo_group or ''
971 973
972 974 if repo_store_location:
973 975 _paths = [repo_store_location]
974 976 else:
975 977 _paths = [self.repos_path, new_parent_path, repo_name]
976 978 # we need to make it str for mercurial
977 979 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
978 980
979 981 # check if this path is not a repository
980 982 if is_valid_repo(repo_path, self.repos_path):
981 983 raise Exception('This path %s is a valid repository' % repo_path)
982 984
983 985 # check if this path is a group
984 986 if is_valid_repo_group(repo_path, self.repos_path):
985 987 raise Exception('This path %s is a valid group' % repo_path)
986 988
987 989 log.info('creating repo %s in %s from url: `%s`',
988 990 repo_name, safe_unicode(repo_path),
989 991 obfuscate_url_pw(clone_uri))
990 992
991 993 backend = get_backend(repo_type)
992 994
993 995 config_repo = None if use_global_config else repo_name
994 996 if config_repo and new_parent_path:
995 997 config_repo = Repository.NAME_SEP.join(
996 998 (new_parent_path, config_repo))
997 999 config = make_db_config(clear_session=False, repo=config_repo)
998 1000 config.set('extensions', 'largefiles', '')
999 1001
1000 1002 # patch and reset hooks section of UI config to not run any
1001 1003 # hooks on creating remote repo
1002 1004 config.clear_section('hooks')
1003 1005
1004 1006 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
1005 1007 if repo_type == 'git':
1006 1008 repo = backend(
1007 1009 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
1008 1010 with_wire={"cache": False})
1009 1011 else:
1010 1012 repo = backend(
1011 1013 repo_path, config=config, create=True, src_url=clone_uri,
1012 1014 with_wire={"cache": False})
1013 1015
1014 1016 if install_hooks:
1015 1017 repo.install_hooks()
1016 1018
1017 1019 log.debug('Created repo %s with %s backend',
1018 1020 safe_unicode(repo_name), safe_unicode(repo_type))
1019 1021 return repo
1020 1022
1021 1023 def _rename_filesystem_repo(self, old, new):
1022 1024 """
1023 1025 renames repository on filesystem
1024 1026
1025 1027 :param old: old name
1026 1028 :param new: new name
1027 1029 """
1028 1030 log.info('renaming repo from %s to %s', old, new)
1029 1031
1030 1032 old_path = os.path.join(self.repos_path, old)
1031 1033 new_path = os.path.join(self.repos_path, new)
1032 1034 if os.path.isdir(new_path):
1033 1035 raise Exception(
1034 1036 'Was trying to rename to already existing dir %s' % new_path
1035 1037 )
1036 1038 shutil.move(old_path, new_path)
1037 1039
1038 1040 def _delete_filesystem_repo(self, repo):
1039 1041 """
1040 1042 removes repo from filesystem, the removal is acctually made by
1041 1043 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1042 1044 repository is no longer valid for rhodecode, can be undeleted later on
1043 1045 by reverting the renames on this repository
1044 1046
1045 1047 :param repo: repo object
1046 1048 """
1047 1049 rm_path = os.path.join(self.repos_path, repo.repo_name)
1048 1050 repo_group = repo.group
1049 1051 log.info("Removing repository %s", rm_path)
1050 1052 # disable hg/git internal that it doesn't get detected as repo
1051 1053 alias = repo.repo_type
1052 1054
1053 1055 config = make_db_config(clear_session=False)
1054 1056 config.set('extensions', 'largefiles', '')
1055 1057 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1056 1058
1057 1059 # skip this for bare git repos
1058 1060 if not bare:
1059 1061 # disable VCS repo
1060 1062 vcs_path = os.path.join(rm_path, '.%s' % alias)
1061 1063 if os.path.exists(vcs_path):
1062 1064 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1063 1065
1064 1066 _now = datetime.datetime.now()
1065 1067 _ms = str(_now.microsecond).rjust(6, '0')
1066 1068 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1067 1069 repo.just_name)
1068 1070 if repo_group:
1069 1071 # if repository is in group, prefix the removal path with the group
1070 1072 args = repo_group.full_path_splitted + [_d]
1071 1073 _d = os.path.join(*args)
1072 1074
1073 1075 if os.path.isdir(rm_path):
1074 1076 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1075 1077
1076 1078 # finally cleanup diff-cache if it exists
1077 1079 cached_diffs_dir = repo.cached_diffs_dir
1078 1080 if os.path.isdir(cached_diffs_dir):
1079 1081 shutil.rmtree(cached_diffs_dir)
1080 1082
1081 1083
1082 1084 class ReadmeFinder:
1083 1085 """
1084 1086 Utility which knows how to find a readme for a specific commit.
1085 1087
1086 1088 The main idea is that this is a configurable algorithm. When creating an
1087 1089 instance you can define parameters, currently only the `default_renderer`.
1088 1090 Based on this configuration the method :meth:`search` behaves slightly
1089 1091 different.
1090 1092 """
1091 1093
1092 1094 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1093 1095 path_re = re.compile(r'^docs?', re.IGNORECASE)
1094 1096
1095 1097 default_priorities = {
1096 1098 None: 0,
1097 1099 '.text': 2,
1098 1100 '.txt': 3,
1099 1101 '.rst': 1,
1100 1102 '.rest': 2,
1101 1103 '.md': 1,
1102 1104 '.mkdn': 2,
1103 1105 '.mdown': 3,
1104 1106 '.markdown': 4,
1105 1107 }
1106 1108
1107 1109 path_priority = {
1108 1110 'doc': 0,
1109 1111 'docs': 1,
1110 1112 }
1111 1113
1112 1114 FALLBACK_PRIORITY = 99
1113 1115
1114 1116 RENDERER_TO_EXTENSION = {
1115 1117 'rst': ['.rst', '.rest'],
1116 1118 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1117 1119 }
1118 1120
1119 1121 def __init__(self, default_renderer=None):
1120 1122 self._default_renderer = default_renderer
1121 1123 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1122 1124 default_renderer, [])
1123 1125
1124 1126 def search(self, commit, path=u'/'):
1125 1127 """
1126 1128 Find a readme in the given `commit`.
1127 1129 """
1128 1130 nodes = commit.get_nodes(path)
1129 1131 matches = self._match_readmes(nodes)
1130 1132 matches = self._sort_according_to_priority(matches)
1131 1133 if matches:
1132 1134 return matches[0].node
1133 1135
1134 1136 paths = self._match_paths(nodes)
1135 1137 paths = self._sort_paths_according_to_priority(paths)
1136 1138 for path in paths:
1137 1139 match = self.search(commit, path=path)
1138 1140 if match:
1139 1141 return match
1140 1142
1141 1143 return None
1142 1144
1143 1145 def _match_readmes(self, nodes):
1144 1146 for node in nodes:
1145 1147 if not node.is_file():
1146 1148 continue
1147 1149 path = node.path.rsplit('/', 1)[-1]
1148 1150 match = self.readme_re.match(path)
1149 1151 if match:
1150 1152 extension = match.group(1)
1151 1153 yield ReadmeMatch(node, match, self._priority(extension))
1152 1154
1153 1155 def _match_paths(self, nodes):
1154 1156 for node in nodes:
1155 1157 if not node.is_dir():
1156 1158 continue
1157 1159 match = self.path_re.match(node.path)
1158 1160 if match:
1159 1161 yield node.path
1160 1162
1161 1163 def _priority(self, extension):
1162 1164 renderer_priority = (
1163 1165 0 if extension in self._renderer_extensions else 1)
1164 1166 extension_priority = self.default_priorities.get(
1165 1167 extension, self.FALLBACK_PRIORITY)
1166 1168 return (renderer_priority, extension_priority)
1167 1169
1168 1170 def _sort_according_to_priority(self, matches):
1169 1171
1170 1172 def priority_and_path(match):
1171 1173 return (match.priority, match.path)
1172 1174
1173 1175 return sorted(matches, key=priority_and_path)
1174 1176
1175 1177 def _sort_paths_according_to_priority(self, paths):
1176 1178
1177 1179 def priority_and_path(path):
1178 1180 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1179 1181
1180 1182 return sorted(paths, key=priority_and_path)
1181 1183
1182 1184
1183 1185 class ReadmeMatch:
1184 1186
1185 1187 def __init__(self, node, match, priority):
1186 1188 self.node = node
1187 1189 self._match = match
1188 1190 self.priority = priority
1189 1191
1190 1192 @property
1191 1193 def path(self):
1192 1194 return self.node.path
1193 1195
1194 1196 def __repr__(self):
1195 1197 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
@@ -1,1025 +1,1028 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import traceback
27 27 import logging
28 28 import cStringIO
29 29
30 30 from sqlalchemy import func
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 import rhodecode
34 34 from rhodecode.lib.vcs import get_backend
35 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 38 from rhodecode.lib import helpers as h, rc_cache
39 39 from rhodecode.lib.auth import (
40 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 41 HasUserGroupPermissionAny)
42 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 43 from rhodecode.lib import hooks_utils
44 44 from rhodecode.lib.utils import (
45 45 get_filesystem_repos, make_db_config)
46 46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 47 from rhodecode.lib.system_info import get_system_info
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 or_, false,
51 51 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
52 52 PullRequest, FileStore)
53 53 from rhodecode.model.settings import VcsSettingsModel
54 54 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 class UserTemp(object):
60 60 def __init__(self, user_id):
61 61 self.user_id = user_id
62 62
63 63 def __repr__(self):
64 64 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65 65
66 66
67 67 class RepoTemp(object):
68 68 def __init__(self, repo_id):
69 69 self.repo_id = repo_id
70 70
71 71 def __repr__(self):
72 72 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73 73
74 74
75 75 class SimpleCachedRepoList(object):
76 76 """
77 77 Lighter version of of iteration of repos without the scm initialisation,
78 78 and with cache usage
79 79 """
80 80 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 81 self.db_repo_list = db_repo_list
82 82 self.repos_path = repos_path
83 83 self.order_by = order_by
84 84 self.reversed = (order_by or '').startswith('-')
85 85 if not perm_set:
86 86 perm_set = ['repository.read', 'repository.write',
87 87 'repository.admin']
88 88 self.perm_set = perm_set
89 89
90 90 def __len__(self):
91 91 return len(self.db_repo_list)
92 92
93 93 def __repr__(self):
94 94 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95 95
96 96 def __iter__(self):
97 97 for dbr in self.db_repo_list:
98 98 # check permission at this level
99 99 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 100 dbr.repo_name, 'SimpleCachedRepoList check')
101 101 if not has_perm:
102 102 continue
103 103
104 104 tmp_d = {
105 105 'name': dbr.repo_name,
106 106 'dbrepo': dbr.get_dict(),
107 107 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 108 }
109 109 yield tmp_d
110 110
111 111
112 112 class _PermCheckIterator(object):
113 113
114 114 def __init__(
115 115 self, obj_list, obj_attr, perm_set, perm_checker,
116 116 extra_kwargs=None):
117 117 """
118 118 Creates iterator from given list of objects, additionally
119 119 checking permission for them from perm_set var
120 120
121 121 :param obj_list: list of db objects
122 122 :param obj_attr: attribute of object to pass into perm_checker
123 123 :param perm_set: list of permissions to check
124 124 :param perm_checker: callable to check permissions against
125 125 """
126 126 self.obj_list = obj_list
127 127 self.obj_attr = obj_attr
128 128 self.perm_set = perm_set
129 129 self.perm_checker = perm_checker(*self.perm_set)
130 130 self.extra_kwargs = extra_kwargs or {}
131 131
132 132 def __len__(self):
133 133 return len(self.obj_list)
134 134
135 135 def __repr__(self):
136 136 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137 137
138 138 def __iter__(self):
139 139 for db_obj in self.obj_list:
140 140 # check permission at this level
141 141 # NOTE(marcink): the __dict__.get() is ~4x faster then getattr()
142 142 name = db_obj.__dict__.get(self.obj_attr, None)
143 143 if not self.perm_checker(name, self.__class__.__name__, **self.extra_kwargs):
144 144 continue
145 145
146 146 yield db_obj
147 147
148 148
149 149 class RepoList(_PermCheckIterator):
150 150
151 151 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 152 if not perm_set:
153 153 perm_set = ['repository.read', 'repository.write', 'repository.admin']
154 154
155 155 super(RepoList, self).__init__(
156 156 obj_list=db_repo_list,
157 157 obj_attr='_repo_name', perm_set=perm_set,
158 158 perm_checker=HasRepoPermissionAny,
159 159 extra_kwargs=extra_kwargs)
160 160
161 161
162 162 class RepoGroupList(_PermCheckIterator):
163 163
164 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 165 if not perm_set:
166 166 perm_set = ['group.read', 'group.write', 'group.admin']
167 167
168 168 super(RepoGroupList, self).__init__(
169 169 obj_list=db_repo_group_list,
170 170 obj_attr='_group_name', perm_set=perm_set,
171 171 perm_checker=HasRepoGroupPermissionAny,
172 172 extra_kwargs=extra_kwargs)
173 173
174 174
175 175 class UserGroupList(_PermCheckIterator):
176 176
177 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 178 if not perm_set:
179 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180 180
181 181 super(UserGroupList, self).__init__(
182 182 obj_list=db_user_group_list,
183 183 obj_attr='users_group_name', perm_set=perm_set,
184 184 perm_checker=HasUserGroupPermissionAny,
185 185 extra_kwargs=extra_kwargs)
186 186
187 187
188 188 class ScmModel(BaseModel):
189 189 """
190 190 Generic Scm Model
191 191 """
192 192
193 193 @LazyProperty
194 194 def repos_path(self):
195 195 """
196 196 Gets the repositories root path from database
197 197 """
198 198
199 199 settings_model = VcsSettingsModel(sa=self.sa)
200 200 return settings_model.get_repos_location()
201 201
202 202 def repo_scan(self, repos_path=None):
203 203 """
204 204 Listing of repositories in given path. This path should not be a
205 205 repository itself. Return a dictionary of repository objects
206 206
207 207 :param repos_path: path to directory containing repositories
208 208 """
209 209
210 210 if repos_path is None:
211 211 repos_path = self.repos_path
212 212
213 213 log.info('scanning for repositories in %s', repos_path)
214 214
215 215 config = make_db_config()
216 216 config.set('extensions', 'largefiles', '')
217 217 repos = {}
218 218
219 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 220 # name need to be decomposed and put back together using the /
221 221 # since this is internal storage separator for rhodecode
222 222 name = Repository.normalize_repo_name(name)
223 223
224 224 try:
225 225 if name in repos:
226 226 raise RepositoryError('Duplicate repository name %s '
227 227 'found in %s' % (name, path))
228 228 elif path[0] in rhodecode.BACKENDS:
229 229 backend = get_backend(path[0])
230 230 repos[name] = backend(path[1], config=config,
231 231 with_wire={"cache": False})
232 232 except OSError:
233 233 continue
234 234 except RepositoryError:
235 235 log.exception('Failed to create a repo')
236 236 continue
237 237
238 238 log.debug('found %s paths with repositories', len(repos))
239 239 return repos
240 240
241 241 def get_repos(self, all_repos=None, sort_key=None):
242 242 """
243 243 Get all repositories from db and for each repo create it's
244 244 backend instance and fill that backed with information from database
245 245
246 246 :param all_repos: list of repository names as strings
247 247 give specific repositories list, good for filtering
248 248
249 249 :param sort_key: initial sorting of repositories
250 250 """
251 251 if all_repos is None:
252 252 all_repos = self.sa.query(Repository)\
253 253 .filter(Repository.group_id == None)\
254 254 .order_by(func.lower(Repository.repo_name)).all()
255 255 repo_iter = SimpleCachedRepoList(
256 256 all_repos, repos_path=self.repos_path, order_by=sort_key)
257 257 return repo_iter
258 258
259 259 def get_repo_groups(self, all_groups=None):
260 260 if all_groups is None:
261 261 all_groups = RepoGroup.query()\
262 262 .filter(RepoGroup.group_parent_id == None).all()
263 263 return [x for x in RepoGroupList(all_groups)]
264 264
265 265 def mark_for_invalidation(self, repo_name, delete=False):
266 266 """
267 267 Mark caches of this repo invalid in the database. `delete` flag
268 268 removes the cache entries
269 269
270 270 :param repo_name: the repo_name for which caches should be marked
271 271 invalid, or deleted
272 272 :param delete: delete the entry keys instead of setting bool
273 273 flag on them, and also purge caches used by the dogpile
274 274 """
275 275 repo = Repository.get_by_repo_name(repo_name)
276 276
277 277 if repo:
278 278 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
279 279 repo_id=repo.repo_id)
280 280 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
281 281
282 282 repo_id = repo.repo_id
283 283 config = repo._config
284 284 config.set('extensions', 'largefiles', '')
285 285 repo.update_commit_cache(config=config, cs_cache=None)
286 286 if delete:
287 287 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
288 288 rc_cache.clear_cache_namespace(
289 289 'cache_repo', cache_namespace_uid, invalidate=True)
290 290
291 291 def toggle_following_repo(self, follow_repo_id, user_id):
292 292
293 293 f = self.sa.query(UserFollowing)\
294 294 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
295 295 .filter(UserFollowing.user_id == user_id).scalar()
296 296
297 297 if f is not None:
298 298 try:
299 299 self.sa.delete(f)
300 300 return
301 301 except Exception:
302 302 log.error(traceback.format_exc())
303 303 raise
304 304
305 305 try:
306 306 f = UserFollowing()
307 307 f.user_id = user_id
308 308 f.follows_repo_id = follow_repo_id
309 309 self.sa.add(f)
310 310 except Exception:
311 311 log.error(traceback.format_exc())
312 312 raise
313 313
314 314 def toggle_following_user(self, follow_user_id, user_id):
315 315 f = self.sa.query(UserFollowing)\
316 316 .filter(UserFollowing.follows_user_id == follow_user_id)\
317 317 .filter(UserFollowing.user_id == user_id).scalar()
318 318
319 319 if f is not None:
320 320 try:
321 321 self.sa.delete(f)
322 322 return
323 323 except Exception:
324 324 log.error(traceback.format_exc())
325 325 raise
326 326
327 327 try:
328 328 f = UserFollowing()
329 329 f.user_id = user_id
330 330 f.follows_user_id = follow_user_id
331 331 self.sa.add(f)
332 332 except Exception:
333 333 log.error(traceback.format_exc())
334 334 raise
335 335
336 336 def is_following_repo(self, repo_name, user_id, cache=False):
337 337 r = self.sa.query(Repository)\
338 338 .filter(Repository.repo_name == repo_name).scalar()
339 339
340 340 f = self.sa.query(UserFollowing)\
341 341 .filter(UserFollowing.follows_repository == r)\
342 342 .filter(UserFollowing.user_id == user_id).scalar()
343 343
344 344 return f is not None
345 345
346 346 def is_following_user(self, username, user_id, cache=False):
347 347 u = User.get_by_username(username)
348 348
349 349 f = self.sa.query(UserFollowing)\
350 350 .filter(UserFollowing.follows_user == u)\
351 351 .filter(UserFollowing.user_id == user_id).scalar()
352 352
353 353 return f is not None
354 354
355 355 def get_followers(self, repo):
356 356 repo = self._get_repo(repo)
357 357
358 358 return self.sa.query(UserFollowing)\
359 359 .filter(UserFollowing.follows_repository == repo).count()
360 360
361 361 def get_forks(self, repo):
362 362 repo = self._get_repo(repo)
363 363 return self.sa.query(Repository)\
364 364 .filter(Repository.fork == repo).count()
365 365
366 366 def get_pull_requests(self, repo):
367 367 repo = self._get_repo(repo)
368 368 return self.sa.query(PullRequest)\
369 369 .filter(PullRequest.target_repo == repo)\
370 370 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
371 371
372 372 def get_artifacts(self, repo):
373 373 repo = self._get_repo(repo)
374 374 return self.sa.query(FileStore)\
375 375 .filter(FileStore.repo == repo)\
376 376 .filter(or_(FileStore.hidden == None, FileStore.hidden == false())).count()
377 377
378 378 def mark_as_fork(self, repo, fork, user):
379 379 repo = self._get_repo(repo)
380 380 fork = self._get_repo(fork)
381 381 if fork and repo.repo_id == fork.repo_id:
382 382 raise Exception("Cannot set repository as fork of itself")
383 383
384 384 if fork and repo.repo_type != fork.repo_type:
385 385 raise RepositoryError(
386 386 "Cannot set repository as fork of repository with other type")
387 387
388 388 repo.fork = fork
389 389 self.sa.add(repo)
390 390 return repo
391 391
392 392 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
393 393 dbrepo = self._get_repo(repo)
394 394 remote_uri = remote_uri or dbrepo.clone_uri
395 395 if not remote_uri:
396 396 raise Exception("This repository doesn't have a clone uri")
397 397
398 398 repo = dbrepo.scm_instance(cache=False)
399 399 repo.config.clear_section('hooks')
400 400
401 401 try:
402 402 # NOTE(marcink): add extra validation so we skip invalid urls
403 403 # this is due this tasks can be executed via scheduler without
404 404 # proper validation of remote_uri
405 405 if validate_uri:
406 406 config = make_db_config(clear_session=False)
407 407 url_validator(remote_uri, dbrepo.repo_type, config)
408 408 except InvalidCloneUrl:
409 409 raise
410 410
411 411 repo_name = dbrepo.repo_name
412 412 try:
413 413 # TODO: we need to make sure those operations call proper hooks !
414 414 repo.fetch(remote_uri)
415 415
416 416 self.mark_for_invalidation(repo_name)
417 417 except Exception:
418 418 log.error(traceback.format_exc())
419 419 raise
420 420
421 421 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
422 422 dbrepo = self._get_repo(repo)
423 423 remote_uri = remote_uri or dbrepo.push_uri
424 424 if not remote_uri:
425 425 raise Exception("This repository doesn't have a clone uri")
426 426
427 427 repo = dbrepo.scm_instance(cache=False)
428 428 repo.config.clear_section('hooks')
429 429
430 430 try:
431 431 # NOTE(marcink): add extra validation so we skip invalid urls
432 432 # this is due this tasks can be executed via scheduler without
433 433 # proper validation of remote_uri
434 434 if validate_uri:
435 435 config = make_db_config(clear_session=False)
436 436 url_validator(remote_uri, dbrepo.repo_type, config)
437 437 except InvalidCloneUrl:
438 438 raise
439 439
440 440 try:
441 441 repo.push(remote_uri)
442 442 except Exception:
443 443 log.error(traceback.format_exc())
444 444 raise
445 445
446 446 def commit_change(self, repo, repo_name, commit, user, author, message,
447 447 content, f_path):
448 448 """
449 449 Commits changes
450 450
451 451 :param repo: SCM instance
452 452
453 453 """
454 454 user = self._get_user(user)
455 455
456 456 # decoding here will force that we have proper encoded values
457 457 # in any other case this will throw exceptions and deny commit
458 458 content = safe_str(content)
459 459 path = safe_str(f_path)
460 460 # message and author needs to be unicode
461 461 # proper backend should then translate that into required type
462 462 message = safe_unicode(message)
463 463 author = safe_unicode(author)
464 464 imc = repo.in_memory_commit
465 465 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
466 466 try:
467 467 # TODO: handle pre-push action !
468 468 tip = imc.commit(
469 469 message=message, author=author, parents=[commit],
470 470 branch=commit.branch)
471 471 except Exception as e:
472 472 log.error(traceback.format_exc())
473 473 raise IMCCommitError(str(e))
474 474 finally:
475 475 # always clear caches, if commit fails we want fresh object also
476 476 self.mark_for_invalidation(repo_name)
477 477
478 478 # We trigger the post-push action
479 479 hooks_utils.trigger_post_push_hook(
480 480 username=user.username, action='push_local', hook_type='post_push',
481 481 repo_name=repo_name, repo_type=repo.alias, commit_ids=[tip.raw_id])
482 482 return tip
483 483
484 484 def _sanitize_path(self, f_path):
485 485 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
486 486 raise NonRelativePathError('%s is not an relative path' % f_path)
487 487 if f_path:
488 488 f_path = os.path.normpath(f_path)
489 489 return f_path
490 490
491 491 def get_dirnode_metadata(self, request, commit, dir_node):
492 492 if not dir_node.is_dir():
493 493 return []
494 494
495 495 data = []
496 496 for node in dir_node:
497 497 if not node.is_file():
498 498 # we skip file-nodes
499 499 continue
500 500
501 501 last_commit = node.last_commit
502 502 last_commit_date = last_commit.date
503 503 data.append({
504 504 'name': node.name,
505 505 'size': h.format_byte_size_binary(node.size),
506 506 'modified_at': h.format_date(last_commit_date),
507 507 'modified_ts': last_commit_date.isoformat(),
508 508 'revision': last_commit.revision,
509 509 'short_id': last_commit.short_id,
510 510 'message': h.escape(last_commit.message),
511 511 'author': h.escape(last_commit.author),
512 512 'user_profile': h.gravatar_with_user(
513 513 request, last_commit.author),
514 514 })
515 515
516 516 return data
517 517
518 518 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
519 519 extended_info=False, content=False, max_file_bytes=None):
520 520 """
521 521 recursive walk in root dir and return a set of all path in that dir
522 522 based on repository walk function
523 523
524 524 :param repo_name: name of repository
525 525 :param commit_id: commit id for which to list nodes
526 526 :param root_path: root path to list
527 527 :param flat: return as a list, if False returns a dict with description
528 528 :param extended_info: show additional info such as md5, binary, size etc
529 529 :param content: add nodes content to the return data
530 530 :param max_file_bytes: will not return file contents over this limit
531 531
532 532 """
533 533 _files = list()
534 534 _dirs = list()
535 535 try:
536 536 _repo = self._get_repo(repo_name)
537 537 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
538 538 root_path = root_path.lstrip('/')
539 539 for __, dirs, files in commit.walk(root_path):
540 540
541 541 for f in files:
542 542 _content = None
543 543 _data = f_name = f.unicode_path
544 544
545 545 if not flat:
546 546 _data = {
547 547 "name": h.escape(f_name),
548 548 "type": "file",
549 549 }
550 550 if extended_info:
551 551 _data.update({
552 552 "md5": f.md5,
553 553 "binary": f.is_binary,
554 554 "size": f.size,
555 555 "extension": f.extension,
556 556 "mimetype": f.mimetype,
557 557 "lines": f.lines()[0]
558 558 })
559 559
560 560 if content:
561 561 over_size_limit = (max_file_bytes is not None
562 562 and f.size > max_file_bytes)
563 563 full_content = None
564 564 if not f.is_binary and not over_size_limit:
565 565 full_content = safe_str(f.content)
566 566
567 567 _data.update({
568 568 "content": full_content,
569 569 })
570 570 _files.append(_data)
571 571
572 572 for d in dirs:
573 573 _data = d_name = d.unicode_path
574 574 if not flat:
575 575 _data = {
576 576 "name": h.escape(d_name),
577 577 "type": "dir",
578 578 }
579 579 if extended_info:
580 580 _data.update({
581 581 "md5": None,
582 582 "binary": None,
583 583 "size": None,
584 584 "extension": None,
585 585 })
586 586 if content:
587 587 _data.update({
588 588 "content": None
589 589 })
590 590 _dirs.append(_data)
591 591 except RepositoryError:
592 592 log.exception("Exception in get_nodes")
593 593 raise
594 594
595 595 return _dirs, _files
596 596
597 597 def get_quick_filter_nodes(self, repo_name, commit_id, root_path='/'):
598 598 """
599 599 Generate files for quick filter in files view
600 600 """
601 601
602 602 _files = list()
603 603 _dirs = list()
604 604 try:
605 605 _repo = self._get_repo(repo_name)
606 606 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
607 607 root_path = root_path.lstrip('/')
608 608 for __, dirs, files in commit.walk(root_path):
609 609
610 610 for f in files:
611 611
612 612 _data = {
613 613 "name": h.escape(f.unicode_path),
614 614 "type": "file",
615 615 }
616 616
617 617 _files.append(_data)
618 618
619 619 for d in dirs:
620 620
621 621 _data = {
622 622 "name": h.escape(d.unicode_path),
623 623 "type": "dir",
624 624 }
625 625
626 626 _dirs.append(_data)
627 627 except RepositoryError:
628 628 log.exception("Exception in get_quick_filter_nodes")
629 629 raise
630 630
631 631 return _dirs, _files
632 632
633 633 def get_node(self, repo_name, commit_id, file_path,
634 634 extended_info=False, content=False, max_file_bytes=None, cache=True):
635 635 """
636 636 retrieve single node from commit
637 637 """
638 638 try:
639 639
640 640 _repo = self._get_repo(repo_name)
641 641 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
642 642
643 643 file_node = commit.get_node(file_path)
644 644 if file_node.is_dir():
645 645 raise RepositoryError('The given path is a directory')
646 646
647 647 _content = None
648 648 f_name = file_node.unicode_path
649 649
650 650 file_data = {
651 651 "name": h.escape(f_name),
652 652 "type": "file",
653 653 }
654 654
655 655 if extended_info:
656 656 file_data.update({
657 657 "extension": file_node.extension,
658 658 "mimetype": file_node.mimetype,
659 659 })
660 660
661 661 if cache:
662 662 md5 = file_node.md5
663 663 is_binary = file_node.is_binary
664 664 size = file_node.size
665 665 else:
666 666 is_binary, md5, size, _content = file_node.metadata_uncached()
667 667
668 668 file_data.update({
669 669 "md5": md5,
670 670 "binary": is_binary,
671 671 "size": size,
672 672 })
673 673
674 674 if content and cache:
675 675 # get content + cache
676 676 size = file_node.size
677 677 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
678 678 full_content = None
679 679 all_lines = 0
680 680 if not file_node.is_binary and not over_size_limit:
681 681 full_content = safe_unicode(file_node.content)
682 682 all_lines, empty_lines = file_node.count_lines(full_content)
683 683
684 684 file_data.update({
685 685 "content": full_content,
686 686 "lines": all_lines
687 687 })
688 688 elif content:
689 689 # get content *without* cache
690 690 if _content is None:
691 691 is_binary, md5, size, _content = file_node.metadata_uncached()
692 692
693 693 over_size_limit = (max_file_bytes is not None and size > max_file_bytes)
694 694 full_content = None
695 695 all_lines = 0
696 696 if not is_binary and not over_size_limit:
697 697 full_content = safe_unicode(_content)
698 698 all_lines, empty_lines = file_node.count_lines(full_content)
699 699
700 700 file_data.update({
701 701 "content": full_content,
702 702 "lines": all_lines
703 703 })
704 704
705 705 except RepositoryError:
706 706 log.exception("Exception in get_node")
707 707 raise
708 708
709 709 return file_data
710 710
711 711 def get_fts_data(self, repo_name, commit_id, root_path='/'):
712 712 """
713 713 Fetch node tree for usage in full text search
714 714 """
715 715
716 716 tree_info = list()
717 717
718 718 try:
719 719 _repo = self._get_repo(repo_name)
720 720 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
721 721 root_path = root_path.lstrip('/')
722 722 for __, dirs, files in commit.walk(root_path):
723 723
724 724 for f in files:
725 725 is_binary, md5, size, _content = f.metadata_uncached()
726 726 _data = {
727 727 "name": f.unicode_path,
728 728 "md5": md5,
729 729 "extension": f.extension,
730 730 "binary": is_binary,
731 731 "size": size
732 732 }
733 733
734 734 tree_info.append(_data)
735 735
736 736 except RepositoryError:
737 737 log.exception("Exception in get_nodes")
738 738 raise
739 739
740 740 return tree_info
741 741
742 742 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
743 743 author=None, trigger_push_hook=True):
744 744 """
745 745 Commits given multiple nodes into repo
746 746
747 747 :param user: RhodeCode User object or user_id, the commiter
748 748 :param repo: RhodeCode Repository object
749 749 :param message: commit message
750 750 :param nodes: mapping {filename:{'content':content},...}
751 751 :param parent_commit: parent commit, can be empty than it's
752 752 initial commit
753 753 :param author: author of commit, cna be different that commiter
754 754 only for git
755 755 :param trigger_push_hook: trigger push hooks
756 756
757 757 :returns: new commited commit
758 758 """
759 759
760 760 user = self._get_user(user)
761 761 scm_instance = repo.scm_instance(cache=False)
762 762
763 763 processed_nodes = []
764 764 for f_path in nodes:
765 765 f_path = self._sanitize_path(f_path)
766 766 content = nodes[f_path]['content']
767 767 f_path = safe_str(f_path)
768 768 # decoding here will force that we have proper encoded values
769 769 # in any other case this will throw exceptions and deny commit
770 770 if isinstance(content, (basestring,)):
771 771 content = safe_str(content)
772 772 elif isinstance(content, (file, cStringIO.OutputType,)):
773 773 content = content.read()
774 774 else:
775 775 raise Exception('Content is of unrecognized type %s' % (
776 776 type(content)
777 777 ))
778 778 processed_nodes.append((f_path, content))
779 779
780 780 message = safe_unicode(message)
781 781 commiter = user.full_contact
782 782 author = safe_unicode(author) if author else commiter
783 783
784 784 imc = scm_instance.in_memory_commit
785 785
786 786 if not parent_commit:
787 787 parent_commit = EmptyCommit(alias=scm_instance.alias)
788 788
789 789 if isinstance(parent_commit, EmptyCommit):
790 790 # EmptyCommit means we we're editing empty repository
791 791 parents = None
792 792 else:
793 793 parents = [parent_commit]
794 794 # add multiple nodes
795 795 for path, content in processed_nodes:
796 796 imc.add(FileNode(path, content=content))
797 797 # TODO: handle pre push scenario
798 798 tip = imc.commit(message=message,
799 799 author=author,
800 800 parents=parents,
801 801 branch=parent_commit.branch)
802 802
803 803 self.mark_for_invalidation(repo.repo_name)
804 804 if trigger_push_hook:
805 805 hooks_utils.trigger_post_push_hook(
806 806 username=user.username, action='push_local',
807 807 repo_name=repo.repo_name, repo_type=scm_instance.alias,
808 808 hook_type='post_push',
809 809 commit_ids=[tip.raw_id])
810 810 return tip
811 811
812 812 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
813 813 author=None, trigger_push_hook=True):
814 814 user = self._get_user(user)
815 815 scm_instance = repo.scm_instance(cache=False)
816 816
817 817 message = safe_unicode(message)
818 818 commiter = user.full_contact
819 819 author = safe_unicode(author) if author else commiter
820 820
821 821 imc = scm_instance.in_memory_commit
822 822
823 823 if not parent_commit:
824 824 parent_commit = EmptyCommit(alias=scm_instance.alias)
825 825
826 826 if isinstance(parent_commit, EmptyCommit):
827 827 # EmptyCommit means we we're editing empty repository
828 828 parents = None
829 829 else:
830 830 parents = [parent_commit]
831 831
832 832 # add multiple nodes
833 833 for _filename, data in nodes.items():
834 834 # new filename, can be renamed from the old one, also sanitaze
835 835 # the path for any hack around relative paths like ../../ etc.
836 836 filename = self._sanitize_path(data['filename'])
837 837 old_filename = self._sanitize_path(_filename)
838 838 content = data['content']
839 839 file_mode = data.get('mode')
840 840 filenode = FileNode(old_filename, content=content, mode=file_mode)
841 841 op = data['op']
842 842 if op == 'add':
843 843 imc.add(filenode)
844 844 elif op == 'del':
845 845 imc.remove(filenode)
846 846 elif op == 'mod':
847 847 if filename != old_filename:
848 848 # TODO: handle renames more efficient, needs vcs lib changes
849 849 imc.remove(filenode)
850 850 imc.add(FileNode(filename, content=content, mode=file_mode))
851 851 else:
852 852 imc.change(filenode)
853 853
854 854 try:
855 855 # TODO: handle pre push scenario commit changes
856 856 tip = imc.commit(message=message,
857 857 author=author,
858 858 parents=parents,
859 859 branch=parent_commit.branch)
860 860 except NodeNotChangedError:
861 861 raise
862 862 except Exception as e:
863 863 log.exception("Unexpected exception during call to imc.commit")
864 864 raise IMCCommitError(str(e))
865 865 finally:
866 866 # always clear caches, if commit fails we want fresh object also
867 867 self.mark_for_invalidation(repo.repo_name)
868 868
869 869 if trigger_push_hook:
870 870 hooks_utils.trigger_post_push_hook(
871 871 username=user.username, action='push_local', hook_type='post_push',
872 872 repo_name=repo.repo_name, repo_type=scm_instance.alias,
873 873 commit_ids=[tip.raw_id])
874 874
875 875 return tip
876 876
877 877 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
878 878 author=None, trigger_push_hook=True):
879 879 """
880 880 Deletes given multiple nodes into `repo`
881 881
882 882 :param user: RhodeCode User object or user_id, the committer
883 883 :param repo: RhodeCode Repository object
884 884 :param message: commit message
885 885 :param nodes: mapping {filename:{'content':content},...}
886 886 :param parent_commit: parent commit, can be empty than it's initial
887 887 commit
888 888 :param author: author of commit, cna be different that commiter only
889 889 for git
890 890 :param trigger_push_hook: trigger push hooks
891 891
892 892 :returns: new commit after deletion
893 893 """
894 894
895 895 user = self._get_user(user)
896 896 scm_instance = repo.scm_instance(cache=False)
897 897
898 898 processed_nodes = []
899 899 for f_path in nodes:
900 900 f_path = self._sanitize_path(f_path)
901 901 # content can be empty but for compatabilty it allows same dicts
902 902 # structure as add_nodes
903 903 content = nodes[f_path].get('content')
904 904 processed_nodes.append((f_path, content))
905 905
906 906 message = safe_unicode(message)
907 907 commiter = user.full_contact
908 908 author = safe_unicode(author) if author else commiter
909 909
910 910 imc = scm_instance.in_memory_commit
911 911
912 912 if not parent_commit:
913 913 parent_commit = EmptyCommit(alias=scm_instance.alias)
914 914
915 915 if isinstance(parent_commit, EmptyCommit):
916 916 # EmptyCommit means we we're editing empty repository
917 917 parents = None
918 918 else:
919 919 parents = [parent_commit]
920 920 # add multiple nodes
921 921 for path, content in processed_nodes:
922 922 imc.remove(FileNode(path, content=content))
923 923
924 924 # TODO: handle pre push scenario
925 925 tip = imc.commit(message=message,
926 926 author=author,
927 927 parents=parents,
928 928 branch=parent_commit.branch)
929 929
930 930 self.mark_for_invalidation(repo.repo_name)
931 931 if trigger_push_hook:
932 932 hooks_utils.trigger_post_push_hook(
933 933 username=user.username, action='push_local', hook_type='post_push',
934 934 repo_name=repo.repo_name, repo_type=scm_instance.alias,
935 935 commit_ids=[tip.raw_id])
936 936 return tip
937 937
938 938 def strip(self, repo, commit_id, branch):
939 939 scm_instance = repo.scm_instance(cache=False)
940 940 scm_instance.config.clear_section('hooks')
941 941 scm_instance.strip(commit_id, branch)
942 942 self.mark_for_invalidation(repo.repo_name)
943 943
944 944 def get_unread_journal(self):
945 945 return self.sa.query(UserLog).count()
946 946
947 947 @classmethod
948 948 def backend_landing_ref(cls, repo_type):
949 949 """
950 950 Return a default landing ref based on a repository type.
951 951 """
952 952
953 953 landing_ref = {
954 954 'hg': ('branch:default', 'default'),
955 955 'git': ('branch:master', 'master'),
956 956 'svn': ('rev:tip', 'latest tip'),
957 957 'default': ('rev:tip', 'latest tip'),
958 958 }
959 959
960 960 return landing_ref.get(repo_type) or landing_ref['default']
961 961
962 962 def get_repo_landing_revs(self, translator, repo=None):
963 963 """
964 964 Generates select option with tags branches and bookmarks (for hg only)
965 965 grouped by type
966 966
967 967 :param repo:
968 968 """
969 from rhodecode.lib.vcs.backends.git import GitRepository
970
969 971 _ = translator
970 972 repo = self._get_repo(repo)
971 973
972 974 if repo:
973 975 repo_type = repo.repo_type
974 976 else:
975 977 repo_type = 'default'
976 978
977 979 default_landing_ref, landing_ref_lbl = self.backend_landing_ref(repo_type)
978 980
979 981 default_ref_options = [
980 982 [default_landing_ref, landing_ref_lbl]
981 983 ]
982 984 default_choices = [
983 985 default_landing_ref
984 986 ]
985 987
986 988 if not repo:
989 # presented at NEW repo creation
987 990 return default_choices, default_ref_options
988 991
989 992 repo = repo.scm_instance()
990 993
991 ref_options = [('rev:tip', 'latest tip')]
992 choices = ['rev:tip']
994 ref_options = [(default_landing_ref, landing_ref_lbl)]
995 choices = [default_landing_ref]
993 996
994 997 # branches
995 998 branch_group = [(u'branch:%s' % safe_unicode(b), safe_unicode(b)) for b in repo.branches]
996 999 if not branch_group:
997 1000 # new repo, or without maybe a branch?
998 1001 branch_group = default_ref_options
999 1002
1000 1003 branches_group = (branch_group, _("Branches"))
1001 1004 ref_options.append(branches_group)
1002 1005 choices.extend([x[0] for x in branches_group[0]])
1003 1006
1004 1007 # bookmarks for HG
1005 1008 if repo.alias == 'hg':
1006 1009 bookmarks_group = (
1007 1010 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
1008 1011 for b in repo.bookmarks],
1009 1012 _("Bookmarks"))
1010 1013 ref_options.append(bookmarks_group)
1011 1014 choices.extend([x[0] for x in bookmarks_group[0]])
1012 1015
1013 1016 # tags
1014 1017 tags_group = (
1015 1018 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
1016 1019 for t in repo.tags],
1017 1020 _("Tags"))
1018 1021 ref_options.append(tags_group)
1019 1022 choices.extend([x[0] for x in tags_group[0]])
1020 1023
1021 1024 return choices, ref_options
1022 1025
1023 1026 def get_server_info(self, environ=None):
1024 1027 server_info = get_system_info(environ)
1025 1028 return server_info
@@ -1,454 +1,450 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import colander
22 22 import deform.widget
23 23
24 24 from rhodecode.translation import _
25 25 from rhodecode.model.validation_schema.utils import convert_to_optgroup, username_converter
26 26 from rhodecode.model.validation_schema import validators, preparers, types
27 27
28 28 DEFAULT_LANDING_REF = 'rev:tip'
29 DEFAULT_BACKEND_LANDING_REF = {
30 'hg': 'branch:default',
31 'git': 'branch:master',
32 'svn': 'rev:tip',
33 }
34 29
35 30
36 31 def get_group_and_repo(repo_name):
37 32 from rhodecode.model.repo_group import RepoGroupModel
38 33 return RepoGroupModel()._get_group_name_and_parent(
39 34 repo_name, get_object=True)
40 35
41 36
42 37 def get_repo_group(repo_group_id):
43 38 from rhodecode.model.repo_group import RepoGroup
44 39 return RepoGroup.get(repo_group_id), RepoGroup.CHOICES_SEPARATOR
45 40
46 41
47 42 @colander.deferred
48 43 def deferred_repo_type_validator(node, kw):
49 44 options = kw.get('repo_type_options', [])
50 45 return colander.OneOf([x for x in options])
51 46
52 47
53 48 @colander.deferred
54 49 def deferred_repo_owner_validator(node, kw):
55 50
56 51 def repo_owner_validator(node, value):
57 52 from rhodecode.model.db import User
58 53 value = username_converter(value)
59 54 existing = User.get_by_username(value)
60 55 if not existing:
61 56 msg = _(u'Repo owner with id `{}` does not exists').format(value)
62 57 raise colander.Invalid(node, msg)
63 58
64 59 return repo_owner_validator
65 60
66 61
67 62 @colander.deferred
68 63 def deferred_landing_ref_validator(node, kw):
69 64 options = kw.get(
70 65 'repo_ref_options', [DEFAULT_LANDING_REF])
71 66 return colander.OneOf([x for x in options])
72 67
73 68
74 69 @colander.deferred
75 70 def deferred_sync_uri_validator(node, kw):
76 71 repo_type = kw.get('repo_type')
77 72 validator = validators.CloneUriValidator(repo_type)
78 73 return validator
79 74
80 75
81 76 @colander.deferred
82 77 def deferred_landing_ref_widget(node, kw):
78 from rhodecode.model.scm import ScmModel
79
83 80 repo_type = kw.get('repo_type')
84 81 default_opts = []
85 82 if repo_type:
86 default_opts.append(
87 (DEFAULT_BACKEND_LANDING_REF[repo_type],
88 DEFAULT_BACKEND_LANDING_REF[repo_type]))
83 default_landing_ref, _lbl = ScmModel.backend_landing_ref(repo_type)
84 default_opts.append((default_landing_ref, default_landing_ref))
89 85
90 86 items = kw.get('repo_ref_items', default_opts)
91 87 items = convert_to_optgroup(items)
92 88 return deform.widget.Select2Widget(values=items)
93 89
94 90
95 91 @colander.deferred
96 92 def deferred_fork_of_validator(node, kw):
97 93 old_values = kw.get('old_values') or {}
98 94
99 95 def fork_of_validator(node, value):
100 96 from rhodecode.model.db import Repository, RepoGroup
101 97 existing = Repository.get_by_repo_name(value)
102 98 if not existing:
103 99 msg = _(u'Fork with id `{}` does not exists').format(value)
104 100 raise colander.Invalid(node, msg)
105 101 elif old_values['repo_name'] == existing.repo_name:
106 102 msg = _(u'Cannot set fork of '
107 103 u'parameter of this repository to itself').format(value)
108 104 raise colander.Invalid(node, msg)
109 105
110 106 return fork_of_validator
111 107
112 108
113 109 @colander.deferred
114 110 def deferred_can_write_to_group_validator(node, kw):
115 111 request_user = kw.get('user')
116 112 old_values = kw.get('old_values') or {}
117 113
118 114 def can_write_to_group_validator(node, value):
119 115 """
120 116 Checks if given repo path is writable by user. This includes checks if
121 117 user is allowed to create repositories under root path or under
122 118 repo group paths
123 119 """
124 120
125 121 from rhodecode.lib.auth import (
126 122 HasPermissionAny, HasRepoGroupPermissionAny)
127 123 from rhodecode.model.repo_group import RepoGroupModel
128 124
129 125 messages = {
130 126 'invalid_repo_group':
131 127 _(u"Repository group `{}` does not exist"),
132 128 # permissions denied we expose as not existing, to prevent
133 129 # resource discovery
134 130 'permission_denied':
135 131 _(u"Repository group `{}` does not exist"),
136 132 'permission_denied_root':
137 133 _(u"You do not have the permission to store "
138 134 u"repositories in the root location.")
139 135 }
140 136
141 137 value = value['repo_group_name']
142 138
143 139 is_root_location = value is types.RootLocation
144 140 # NOT initialized validators, we must call them
145 141 can_create_repos_at_root = HasPermissionAny('hg.admin', 'hg.create.repository')
146 142
147 143 # if values is root location, we simply need to check if we can write
148 144 # to root location !
149 145 if is_root_location:
150 146
151 147 if can_create_repos_at_root(user=request_user):
152 148 # we can create repo group inside tool-level. No more checks
153 149 # are required
154 150 return
155 151 else:
156 152 old_name = old_values.get('repo_name')
157 153 if old_name and old_name == old_values.get('submitted_repo_name'):
158 154 # since we didn't change the name, we can skip validation and
159 155 # allow current users without store-in-root permissions to update
160 156 return
161 157
162 158 # "fake" node name as repo_name, otherwise we oddly report
163 159 # the error as if it was coming form repo_group
164 160 # however repo_group is empty when using root location.
165 161 node.name = 'repo_name'
166 162 raise colander.Invalid(node, messages['permission_denied_root'])
167 163
168 164 # parent group not exists ? throw an error
169 165 repo_group = RepoGroupModel().get_by_group_name(value)
170 166 if value and not repo_group:
171 167 raise colander.Invalid(
172 168 node, messages['invalid_repo_group'].format(value))
173 169
174 170 gr_name = repo_group.group_name
175 171
176 172 # create repositories with write permission on group is set to true
177 173 create_on_write = HasPermissionAny(
178 174 'hg.create.write_on_repogroup.true')(user=request_user)
179 175
180 176 group_admin = HasRepoGroupPermissionAny('group.admin')(
181 177 gr_name, 'can write into group validator', user=request_user)
182 178 group_write = HasRepoGroupPermissionAny('group.write')(
183 179 gr_name, 'can write into group validator', user=request_user)
184 180
185 181 forbidden = not (group_admin or (group_write and create_on_write))
186 182
187 183 # TODO: handling of old values, and detecting no-change in path
188 184 # to skip permission checks in such cases. This only needs to be
189 185 # implemented if we use this schema in forms as well
190 186
191 187 # gid = (old_data['repo_group'].get('group_id')
192 188 # if (old_data and 'repo_group' in old_data) else None)
193 189 # value_changed = gid != safe_int(value)
194 190 # new = not old_data
195 191
196 192 # do check if we changed the value, there's a case that someone got
197 193 # revoked write permissions to a repository, he still created, we
198 194 # don't need to check permission if he didn't change the value of
199 195 # groups in form box
200 196 # if value_changed or new:
201 197 # # parent group need to be existing
202 198 # TODO: ENDS HERE
203 199
204 200 if repo_group and forbidden:
205 201 msg = messages['permission_denied'].format(value)
206 202 raise colander.Invalid(node, msg)
207 203
208 204 return can_write_to_group_validator
209 205
210 206
211 207 @colander.deferred
212 208 def deferred_unique_name_validator(node, kw):
213 209 request_user = kw.get('user')
214 210 old_values = kw.get('old_values') or {}
215 211
216 212 def unique_name_validator(node, value):
217 213 from rhodecode.model.db import Repository, RepoGroup
218 214 name_changed = value != old_values.get('repo_name')
219 215
220 216 existing = Repository.get_by_repo_name(value)
221 217 if name_changed and existing:
222 218 msg = _(u'Repository with name `{}` already exists').format(value)
223 219 raise colander.Invalid(node, msg)
224 220
225 221 existing_group = RepoGroup.get_by_group_name(value)
226 222 if name_changed and existing_group:
227 223 msg = _(u'Repository group with name `{}` already exists').format(
228 224 value)
229 225 raise colander.Invalid(node, msg)
230 226 return unique_name_validator
231 227
232 228
233 229 @colander.deferred
234 230 def deferred_repo_name_validator(node, kw):
235 231 def no_git_suffix_validator(node, value):
236 232 if value.endswith('.git'):
237 233 msg = _('Repository name cannot end with .git')
238 234 raise colander.Invalid(node, msg)
239 235 return colander.All(
240 236 no_git_suffix_validator, validators.valid_name_validator)
241 237
242 238
243 239 @colander.deferred
244 240 def deferred_repo_group_validator(node, kw):
245 241 options = kw.get(
246 242 'repo_repo_group_options')
247 243 return colander.OneOf([x for x in options])
248 244
249 245
250 246 @colander.deferred
251 247 def deferred_repo_group_widget(node, kw):
252 248 items = kw.get('repo_repo_group_items')
253 249 return deform.widget.Select2Widget(values=items)
254 250
255 251
256 252 class GroupType(colander.Mapping):
257 253 def _validate(self, node, value):
258 254 try:
259 255 return dict(repo_group_name=value)
260 256 except Exception as e:
261 257 raise colander.Invalid(
262 258 node, '"${val}" is not a mapping type: ${err}'.format(
263 259 val=value, err=e))
264 260
265 261 def deserialize(self, node, cstruct):
266 262 if cstruct is colander.null:
267 263 return cstruct
268 264
269 265 appstruct = super(GroupType, self).deserialize(node, cstruct)
270 266 validated_name = appstruct['repo_group_name']
271 267
272 268 # inject group based on once deserialized data
273 269 (repo_name_without_group,
274 270 parent_group_name,
275 271 parent_group) = get_group_and_repo(validated_name)
276 272
277 273 appstruct['repo_name_with_group'] = validated_name
278 274 appstruct['repo_name_without_group'] = repo_name_without_group
279 275 appstruct['repo_group_name'] = parent_group_name or types.RootLocation
280 276
281 277 if parent_group:
282 278 appstruct['repo_group_id'] = parent_group.group_id
283 279
284 280 return appstruct
285 281
286 282
287 283 class GroupSchema(colander.SchemaNode):
288 284 schema_type = GroupType
289 285 validator = deferred_can_write_to_group_validator
290 286 missing = colander.null
291 287
292 288
293 289 class RepoGroup(GroupSchema):
294 290 repo_group_name = colander.SchemaNode(
295 291 types.GroupNameType())
296 292 repo_group_id = colander.SchemaNode(
297 293 colander.String(), missing=None)
298 294 repo_name_without_group = colander.SchemaNode(
299 295 colander.String(), missing=None)
300 296
301 297
302 298 class RepoGroupAccessSchema(colander.MappingSchema):
303 299 repo_group = RepoGroup()
304 300
305 301
306 302 class RepoNameUniqueSchema(colander.MappingSchema):
307 303 unique_repo_name = colander.SchemaNode(
308 304 colander.String(),
309 305 validator=deferred_unique_name_validator)
310 306
311 307
312 308 class RepoSchema(colander.MappingSchema):
313 309
314 310 repo_name = colander.SchemaNode(
315 311 types.RepoNameType(),
316 312 validator=deferred_repo_name_validator)
317 313
318 314 repo_type = colander.SchemaNode(
319 315 colander.String(),
320 316 validator=deferred_repo_type_validator)
321 317
322 318 repo_owner = colander.SchemaNode(
323 319 colander.String(),
324 320 validator=deferred_repo_owner_validator,
325 321 widget=deform.widget.TextInputWidget())
326 322
327 323 repo_description = colander.SchemaNode(
328 324 colander.String(), missing='',
329 325 widget=deform.widget.TextAreaWidget())
330 326
331 327 repo_landing_commit_ref = colander.SchemaNode(
332 328 colander.String(),
333 329 validator=deferred_landing_ref_validator,
334 330 preparers=[preparers.strip_preparer],
335 331 missing=DEFAULT_LANDING_REF,
336 332 widget=deferred_landing_ref_widget)
337 333
338 334 repo_clone_uri = colander.SchemaNode(
339 335 colander.String(),
340 336 validator=deferred_sync_uri_validator,
341 337 preparers=[preparers.strip_preparer],
342 338 missing='')
343 339
344 340 repo_push_uri = colander.SchemaNode(
345 341 colander.String(),
346 342 validator=deferred_sync_uri_validator,
347 343 preparers=[preparers.strip_preparer],
348 344 missing='')
349 345
350 346 repo_fork_of = colander.SchemaNode(
351 347 colander.String(),
352 348 validator=deferred_fork_of_validator,
353 349 missing=None)
354 350
355 351 repo_private = colander.SchemaNode(
356 352 types.StringBooleanType(),
357 353 missing=False, widget=deform.widget.CheckboxWidget())
358 354 repo_copy_permissions = colander.SchemaNode(
359 355 types.StringBooleanType(),
360 356 missing=False, widget=deform.widget.CheckboxWidget())
361 357 repo_enable_statistics = colander.SchemaNode(
362 358 types.StringBooleanType(),
363 359 missing=False, widget=deform.widget.CheckboxWidget())
364 360 repo_enable_downloads = colander.SchemaNode(
365 361 types.StringBooleanType(),
366 362 missing=False, widget=deform.widget.CheckboxWidget())
367 363 repo_enable_locking = colander.SchemaNode(
368 364 types.StringBooleanType(),
369 365 missing=False, widget=deform.widget.CheckboxWidget())
370 366
371 367 def deserialize(self, cstruct):
372 368 """
373 369 Custom deserialize that allows to chain validation, and verify
374 370 permissions, and as last step uniqueness
375 371 """
376 372
377 373 # first pass, to validate given data
378 374 appstruct = super(RepoSchema, self).deserialize(cstruct)
379 375 validated_name = appstruct['repo_name']
380 376
381 377 # second pass to validate permissions to repo_group
382 378 if 'old_values' in self.bindings:
383 379 # save current repo name for name change checks
384 380 self.bindings['old_values']['submitted_repo_name'] = validated_name
385 381 second = RepoGroupAccessSchema().bind(**self.bindings)
386 382 appstruct_second = second.deserialize({'repo_group': validated_name})
387 383 # save result
388 384 appstruct['repo_group'] = appstruct_second['repo_group']
389 385
390 386 # thirds to validate uniqueness
391 387 third = RepoNameUniqueSchema().bind(**self.bindings)
392 388 third.deserialize({'unique_repo_name': validated_name})
393 389
394 390 return appstruct
395 391
396 392
397 393 class RepoSettingsSchema(RepoSchema):
398 394 repo_group = colander.SchemaNode(
399 395 colander.Integer(),
400 396 validator=deferred_repo_group_validator,
401 397 widget=deferred_repo_group_widget,
402 398 missing='')
403 399
404 400 repo_clone_uri_change = colander.SchemaNode(
405 401 colander.String(),
406 402 missing='NEW')
407 403
408 404 repo_clone_uri = colander.SchemaNode(
409 405 colander.String(),
410 406 preparers=[preparers.strip_preparer],
411 407 validator=deferred_sync_uri_validator,
412 408 missing='')
413 409
414 410 repo_push_uri_change = colander.SchemaNode(
415 411 colander.String(),
416 412 missing='NEW')
417 413
418 414 repo_push_uri = colander.SchemaNode(
419 415 colander.String(),
420 416 preparers=[preparers.strip_preparer],
421 417 validator=deferred_sync_uri_validator,
422 418 missing='')
423 419
424 420 def deserialize(self, cstruct):
425 421 """
426 422 Custom deserialize that allows to chain validation, and verify
427 423 permissions, and as last step uniqueness
428 424 """
429 425
430 426 # first pass, to validate given data
431 427 appstruct = super(RepoSchema, self).deserialize(cstruct)
432 428 validated_name = appstruct['repo_name']
433 429 # because of repoSchema adds repo-group as an ID, we inject it as
434 430 # full name here because validators require it, it's unwrapped later
435 431 # so it's safe to use and final name is going to be without group anyway
436 432
437 433 group, separator = get_repo_group(appstruct['repo_group'])
438 434 if group:
439 435 validated_name = separator.join([group.group_name, validated_name])
440 436
441 437 # second pass to validate permissions to repo_group
442 438 if 'old_values' in self.bindings:
443 439 # save current repo name for name change checks
444 440 self.bindings['old_values']['submitted_repo_name'] = validated_name
445 441 second = RepoGroupAccessSchema().bind(**self.bindings)
446 442 appstruct_second = second.deserialize({'repo_group': validated_name})
447 443 # save result
448 444 appstruct['repo_group'] = appstruct_second['repo_group']
449 445
450 446 # thirds to validate uniqueness
451 447 third = RepoNameUniqueSchema().bind(**self.bindings)
452 448 third.deserialize({'unique_repo_name': validated_name})
453 449
454 450 return appstruct
@@ -1,419 +1,426 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Helpers for fixture generation
23 23 """
24 24
25 25 import os
26 26 import time
27 27 import tempfile
28 28 import shutil
29 29
30 30 import configobj
31 31
32 32 from rhodecode.model.settings import SettingsModel
33 33 from rhodecode.tests import *
34 34 from rhodecode.model.db import Repository, User, RepoGroup, UserGroup, Gist, UserEmailMap
35 35 from rhodecode.model.meta import Session
36 36 from rhodecode.model.repo import RepoModel
37 37 from rhodecode.model.user import UserModel
38 38 from rhodecode.model.repo_group import RepoGroupModel
39 39 from rhodecode.model.user_group import UserGroupModel
40 40 from rhodecode.model.gist import GistModel
41 41 from rhodecode.model.auth_token import AuthTokenModel
42 from rhodecode.model.scm import ScmModel
42 43 from rhodecode.authentication.plugins.auth_rhodecode import \
43 44 RhodeCodeAuthPlugin
44 45
45 46 dn = os.path.dirname
46 47 FIXTURES = os.path.join(dn(dn(os.path.abspath(__file__))), 'tests', 'fixtures')
47 48
48 49
49 50 def error_function(*args, **kwargs):
50 51 raise Exception('Total Crash !')
51 52
52 53
53 54 class TestINI(object):
54 55 """
55 56 Allows to create a new test.ini file as a copy of existing one with edited
56 57 data. Example usage::
57 58
58 59 with TestINI('test.ini', [{'section':{'key':val'}]) as new_test_ini_path:
59 60 print('paster server %s' % new_test_ini)
60 61 """
61 62
62 63 def __init__(self, ini_file_path, ini_params, new_file_prefix='DEFAULT',
63 64 destroy=True, dir=None):
64 65 self.ini_file_path = ini_file_path
65 66 self.ini_params = ini_params
66 67 self.new_path = None
67 68 self.new_path_prefix = new_file_prefix
68 69 self._destroy = destroy
69 70 self._dir = dir
70 71
71 72 def __enter__(self):
72 73 return self.create()
73 74
74 75 def __exit__(self, exc_type, exc_val, exc_tb):
75 76 self.destroy()
76 77
77 78 def create(self):
78 79 config = configobj.ConfigObj(
79 80 self.ini_file_path, file_error=True, write_empty_values=True)
80 81
81 82 for data in self.ini_params:
82 83 section, ini_params = data.items()[0]
83 84 for key, val in ini_params.items():
84 85 config[section][key] = val
85 86 with tempfile.NamedTemporaryFile(
86 87 prefix=self.new_path_prefix, suffix='.ini', dir=self._dir,
87 88 delete=False) as new_ini_file:
88 89 config.write(new_ini_file)
89 90 self.new_path = new_ini_file.name
90 91
91 92 return self.new_path
92 93
93 94 def destroy(self):
94 95 if self._destroy:
95 96 os.remove(self.new_path)
96 97
97 98
98 99 class Fixture(object):
99 100
100 101 def anon_access(self, status):
101 102 """
102 103 Context process for disabling anonymous access. use like:
103 104 fixture = Fixture()
104 105 with fixture.anon_access(False):
105 106 #tests
106 107
107 108 after this block anon access will be set to `not status`
108 109 """
109 110
110 111 class context(object):
111 112 def __enter__(self):
112 113 anon = User.get_default_user()
113 114 anon.active = status
114 115 Session().add(anon)
115 116 Session().commit()
116 117 time.sleep(1.5) # must sleep for cache (1s to expire)
117 118
118 119 def __exit__(self, exc_type, exc_val, exc_tb):
119 120 anon = User.get_default_user()
120 121 anon.active = not status
121 122 Session().add(anon)
122 123 Session().commit()
123 124
124 125 return context()
125 126
126 127 def auth_restriction(self, registry, auth_restriction):
127 128 """
128 129 Context process for changing the builtin rhodecode plugin auth restrictions.
129 130 Use like:
130 131 fixture = Fixture()
131 132 with fixture.auth_restriction('super_admin'):
132 133 #tests
133 134
134 135 after this block auth restriction will be taken off
135 136 """
136 137
137 138 class context(object):
138 139 def _get_plugin(self):
139 140 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
140 141 plugin = RhodeCodeAuthPlugin(plugin_id)
141 142 return plugin
142 143
143 144 def __enter__(self):
144 145
145 146 plugin = self._get_plugin()
146 147 plugin.create_or_update_setting('auth_restriction', auth_restriction)
147 148 Session().commit()
148 149 SettingsModel().invalidate_settings_cache()
149 150
150 151 def __exit__(self, exc_type, exc_val, exc_tb):
151 152
152 153 plugin = self._get_plugin()
153 154 plugin.create_or_update_setting(
154 155 'auth_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_NONE)
155 156 Session().commit()
156 157 SettingsModel().invalidate_settings_cache()
157 158
158 159 return context()
159 160
160 161 def scope_restriction(self, registry, scope_restriction):
161 162 """
162 163 Context process for changing the builtin rhodecode plugin scope restrictions.
163 164 Use like:
164 165 fixture = Fixture()
165 166 with fixture.scope_restriction('scope_http'):
166 167 #tests
167 168
168 169 after this block scope restriction will be taken off
169 170 """
170 171
171 172 class context(object):
172 173 def _get_plugin(self):
173 174 plugin_id = 'egg:rhodecode-enterprise-ce#{}'.format(RhodeCodeAuthPlugin.uid)
174 175 plugin = RhodeCodeAuthPlugin(plugin_id)
175 176 return plugin
176 177
177 178 def __enter__(self):
178 179 plugin = self._get_plugin()
179 180 plugin.create_or_update_setting('scope_restriction', scope_restriction)
180 181 Session().commit()
181 182 SettingsModel().invalidate_settings_cache()
182 183
183 184 def __exit__(self, exc_type, exc_val, exc_tb):
184 185 plugin = self._get_plugin()
185 186 plugin.create_or_update_setting(
186 187 'scope_restriction', RhodeCodeAuthPlugin.AUTH_RESTRICTION_SCOPE_ALL)
187 188 Session().commit()
188 189 SettingsModel().invalidate_settings_cache()
189 190
190 191 return context()
191 192
192 193 def _get_repo_create_params(self, **custom):
194 repo_type = custom.get('repo_type') or 'hg'
195
196 default_landing_ref, landing_ref_lbl = ScmModel.backend_landing_ref(repo_type)
197
193 198 defs = {
194 199 'repo_name': None,
195 'repo_type': 'hg',
200 'repo_type': repo_type,
196 201 'clone_uri': '',
197 202 'push_uri': '',
198 203 'repo_group': '-1',
199 204 'repo_description': 'DESC',
200 205 'repo_private': False,
201 'repo_landing_rev': 'rev:tip',
206 'repo_landing_commit_ref': default_landing_ref,
202 207 'repo_copy_permissions': False,
203 208 'repo_state': Repository.STATE_CREATED,
204 209 }
205 210 defs.update(custom)
206 211 if 'repo_name_full' not in custom:
207 212 defs.update({'repo_name_full': defs['repo_name']})
208 213
209 214 # fix the repo name if passed as repo_name_full
210 215 if defs['repo_name']:
211 216 defs['repo_name'] = defs['repo_name'].split('/')[-1]
212 217
213 218 return defs
214 219
215 220 def _get_group_create_params(self, **custom):
216 221 defs = {
217 222 'group_name': None,
218 223 'group_description': 'DESC',
219 224 'perm_updates': [],
220 225 'perm_additions': [],
221 226 'perm_deletions': [],
222 227 'group_parent_id': -1,
223 228 'enable_locking': False,
224 229 'recursive': False,
225 230 }
226 231 defs.update(custom)
227 232
228 233 return defs
229 234
230 235 def _get_user_create_params(self, name, **custom):
231 236 defs = {
232 237 'username': name,
233 238 'password': 'qweqwe',
234 239 'email': '%s+test@rhodecode.org' % name,
235 240 'firstname': 'TestUser',
236 241 'lastname': 'Test',
237 242 'description': 'test description',
238 243 'active': True,
239 244 'admin': False,
240 245 'extern_type': 'rhodecode',
241 246 'extern_name': None,
242 247 }
243 248 defs.update(custom)
244 249
245 250 return defs
246 251
247 252 def _get_user_group_create_params(self, name, **custom):
248 253 defs = {
249 254 'users_group_name': name,
250 255 'user_group_description': 'DESC',
251 256 'users_group_active': True,
252 257 'user_group_data': {},
253 258 }
254 259 defs.update(custom)
255 260
256 261 return defs
257 262
258 263 def create_repo(self, name, **kwargs):
259 264 repo_group = kwargs.get('repo_group')
260 265 if isinstance(repo_group, RepoGroup):
261 266 kwargs['repo_group'] = repo_group.group_id
262 267 name = name.split(Repository.NAME_SEP)[-1]
263 268 name = Repository.NAME_SEP.join((repo_group.group_name, name))
264 269
265 270 if 'skip_if_exists' in kwargs:
266 271 del kwargs['skip_if_exists']
267 272 r = Repository.get_by_repo_name(name)
268 273 if r:
269 274 return r
270 275
271 276 form_data = self._get_repo_create_params(repo_name=name, **kwargs)
272 277 cur_user = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
273 278 RepoModel().create(form_data, cur_user)
274 279 Session().commit()
275 280 repo = Repository.get_by_repo_name(name)
276 281 assert repo
277 282 return repo
278 283
279 284 def create_fork(self, repo_to_fork, fork_name, **kwargs):
280 285 repo_to_fork = Repository.get_by_repo_name(repo_to_fork)
281 286
282 form_data = self._get_repo_create_params(repo_name=fork_name,
283 fork_parent_id=repo_to_fork.repo_id,
284 repo_type=repo_to_fork.repo_type,
285 **kwargs)
287 form_data = self._get_repo_create_params(
288 repo_name=fork_name,
289 fork_parent_id=repo_to_fork.repo_id,
290 repo_type=repo_to_fork.repo_type,
291 **kwargs)
292
286 293 #TODO: fix it !!
287 294 form_data['description'] = form_data['repo_description']
288 295 form_data['private'] = form_data['repo_private']
289 form_data['landing_rev'] = form_data['repo_landing_rev']
296 form_data['landing_rev'] = form_data['repo_landing_commit_ref']
290 297
291 298 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
292 299 RepoModel().create_fork(form_data, cur_user=owner)
293 300 Session().commit()
294 301 r = Repository.get_by_repo_name(fork_name)
295 302 assert r
296 303 return r
297 304
298 305 def destroy_repo(self, repo_name, **kwargs):
299 306 RepoModel().delete(repo_name, pull_requests='delete', **kwargs)
300 307 Session().commit()
301 308
302 309 def destroy_repo_on_filesystem(self, repo_name):
303 310 rm_path = os.path.join(RepoModel().repos_path, repo_name)
304 311 if os.path.isdir(rm_path):
305 312 shutil.rmtree(rm_path)
306 313
307 314 def create_repo_group(self, name, **kwargs):
308 315 if 'skip_if_exists' in kwargs:
309 316 del kwargs['skip_if_exists']
310 317 gr = RepoGroup.get_by_group_name(group_name=name)
311 318 if gr:
312 319 return gr
313 320 form_data = self._get_group_create_params(group_name=name, **kwargs)
314 321 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
315 322 gr = RepoGroupModel().create(
316 323 group_name=form_data['group_name'],
317 324 group_description=form_data['group_name'],
318 325 owner=owner)
319 326 Session().commit()
320 327 gr = RepoGroup.get_by_group_name(gr.group_name)
321 328 return gr
322 329
323 330 def destroy_repo_group(self, repogroupid):
324 331 RepoGroupModel().delete(repogroupid)
325 332 Session().commit()
326 333
327 334 def create_user(self, name, **kwargs):
328 335 if 'skip_if_exists' in kwargs:
329 336 del kwargs['skip_if_exists']
330 337 user = User.get_by_username(name)
331 338 if user:
332 339 return user
333 340 form_data = self._get_user_create_params(name, **kwargs)
334 341 user = UserModel().create(form_data)
335 342
336 343 # create token for user
337 344 AuthTokenModel().create(
338 345 user=user, description=u'TEST_USER_TOKEN')
339 346
340 347 Session().commit()
341 348 user = User.get_by_username(user.username)
342 349 return user
343 350
344 351 def destroy_user(self, userid):
345 352 UserModel().delete(userid)
346 353 Session().commit()
347 354
348 355 def create_additional_user_email(self, user, email):
349 356 uem = UserEmailMap()
350 357 uem.user = user
351 358 uem.email = email
352 359 Session().add(uem)
353 360 return uem
354 361
355 362 def destroy_users(self, userid_iter):
356 363 for user_id in userid_iter:
357 364 if User.get_by_username(user_id):
358 365 UserModel().delete(user_id)
359 366 Session().commit()
360 367
361 368 def create_user_group(self, name, **kwargs):
362 369 if 'skip_if_exists' in kwargs:
363 370 del kwargs['skip_if_exists']
364 371 gr = UserGroup.get_by_group_name(group_name=name)
365 372 if gr:
366 373 return gr
367 374 # map active flag to the real attribute. For API consistency of fixtures
368 375 if 'active' in kwargs:
369 376 kwargs['users_group_active'] = kwargs['active']
370 377 del kwargs['active']
371 378 form_data = self._get_user_group_create_params(name, **kwargs)
372 379 owner = kwargs.get('cur_user', TEST_USER_ADMIN_LOGIN)
373 380 user_group = UserGroupModel().create(
374 381 name=form_data['users_group_name'],
375 382 description=form_data['user_group_description'],
376 383 owner=owner, active=form_data['users_group_active'],
377 384 group_data=form_data['user_group_data'])
378 385 Session().commit()
379 386 user_group = UserGroup.get_by_group_name(user_group.users_group_name)
380 387 return user_group
381 388
382 389 def destroy_user_group(self, usergroupid):
383 390 UserGroupModel().delete(user_group=usergroupid, force=True)
384 391 Session().commit()
385 392
386 393 def create_gist(self, **kwargs):
387 394 form_data = {
388 395 'description': 'new-gist',
389 396 'owner': TEST_USER_ADMIN_LOGIN,
390 397 'gist_type': GistModel.cls.GIST_PUBLIC,
391 398 'lifetime': -1,
392 399 'acl_level': Gist.ACL_LEVEL_PUBLIC,
393 400 'gist_mapping': {'filename1.txt': {'content': 'hello world'},}
394 401 }
395 402 form_data.update(kwargs)
396 403 gist = GistModel().create(
397 404 description=form_data['description'], owner=form_data['owner'],
398 405 gist_mapping=form_data['gist_mapping'], gist_type=form_data['gist_type'],
399 406 lifetime=form_data['lifetime'], gist_acl_level=form_data['acl_level']
400 407 )
401 408 Session().commit()
402 409 return gist
403 410
404 411 def destroy_gists(self, gistid=None):
405 412 for g in GistModel.cls.get_all():
406 413 if gistid:
407 414 if gistid == g.gist_access_id:
408 415 GistModel().delete(g)
409 416 else:
410 417 GistModel().delete(g)
411 418 Session().commit()
412 419
413 420 def load_resource(self, resource_name, strip=False):
414 421 with open(os.path.join(FIXTURES, resource_name)) as f:
415 422 source = f.read()
416 423 if strip:
417 424 source = source.strip()
418 425
419 426 return source
General Comments 0
You need to be logged in to leave comments. Login now