##// END OF EJS Templates
validators: fix url_validator tests and make it flag controllable.
marcink -
r3072:fe39713b default
parent child Browse files
Show More
@@ -1,53 +1,55 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22
22 import mock
23 23 import pytest
24 24
25 25 from rhodecode.tests import TESTS_TMP_PATH
26 26 from rhodecode.api.tests.utils import (
27 27 build_data, api_call, assert_ok, assert_error)
28 28
29 29
30 30 @pytest.mark.usefixtures("testuser_api", "app")
31 31 class TestPull(object):
32
32 33 @pytest.mark.backends("git", "hg")
33 34 def test_api_pull(self, backend):
34 35 r = backend.create_repo()
35 36 repo_name = r.repo_name
36 37 clone_uri = os.path.join(TESTS_TMP_PATH, backend.repo_name)
37 38 r.clone_uri = clone_uri
38 39
39 40 id_, params = build_data(self.apikey, 'pull', repoid=repo_name,)
40 response = api_call(self.app, params)
41 msg = 'Pulled from url `%s` on repo `%s`' % (
42 clone_uri, repo_name)
43 expected = {'msg': msg,
44 'repository': repo_name}
45 assert_ok(id_, expected, given=response.body)
41 with mock.patch('rhodecode.model.scm.url_validator'):
42 response = api_call(self.app, params)
43 msg = 'Pulled from url `%s` on repo `%s`' % (
44 clone_uri, repo_name)
45 expected = {'msg': msg,
46 'repository': repo_name}
47 assert_ok(id_, expected, given=response.body)
46 48
47 49 def test_api_pull_error(self, backend):
48 50 id_, params = build_data(
49 51 self.apikey, 'pull', repoid=backend.repo_name)
50 52 response = api_call(self.app, params)
51 53
52 54 expected = 'Unable to pull changes from `None`'
53 55 assert_error(id_, expected, given=response.body)
@@ -1,197 +1,203 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import mock
22 22 import pytest
23 23
24 24 from rhodecode.model.repo import RepoModel
25 25 from rhodecode.tests import TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN
26 26 from rhodecode.api.tests.utils import (
27 27 build_data, api_call, assert_error, assert_ok, crash, jsonify)
28 28 from rhodecode.tests.fixture import Fixture
29 29 from rhodecode.tests.plugin import http_host_stub, http_host_only_stub
30 30
31 31 fixture = Fixture()
32 32
33 33 UPDATE_REPO_NAME = 'api_update_me'
34 34
35 35
36 36 class SAME_AS_UPDATES(object):
37 37 """ Constant used for tests below """
38 38
39 39
40 40 @pytest.mark.usefixtures("testuser_api", "app")
41 41 class TestApiUpdateRepo(object):
42 42
43 43 @pytest.mark.parametrize("updates, expected", [
44 44 ({'owner': TEST_USER_REGULAR_LOGIN},
45 45 SAME_AS_UPDATES),
46 46
47 47 ({'description': 'new description'},
48 48 SAME_AS_UPDATES),
49 49
50 50 ({'clone_uri': 'http://foo.com/repo'},
51 51 SAME_AS_UPDATES),
52 52
53 53 ({'clone_uri': None},
54 54 {'clone_uri': ''}),
55 55
56 56 ({'clone_uri': ''},
57 57 {'clone_uri': ''}),
58 58
59 ({'clone_uri': 'http://example.com/repo_pull'},
60 {'clone_uri': 'http://example.com/repo_pull'}),
61
59 62 ({'push_uri': ''},
60 63 {'push_uri': ''}),
61 64
65 ({'push_uri': 'http://example.com/repo_push'},
66 {'push_uri': 'http://example.com/repo_push'}),
67
62 68 ({'landing_rev': 'rev:tip'},
63 69 {'landing_rev': ['rev', 'tip']}),
64 70
65 71 ({'enable_statistics': True},
66 72 SAME_AS_UPDATES),
67 73
68 74 ({'enable_locking': True},
69 75 SAME_AS_UPDATES),
70 76
71 77 ({'enable_downloads': True},
72 78 SAME_AS_UPDATES),
73 79
74 80 ({'repo_name': 'new_repo_name'},
75 81 {
76 82 'repo_name': 'new_repo_name',
77 83 'url': 'http://{}/new_repo_name'.format(http_host_only_stub())
78 84 }),
79 85
80 86 ({'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
81 87 '_group': 'test_group_for_update'},
82 88 {
83 89 'repo_name': 'test_group_for_update/{}'.format(UPDATE_REPO_NAME),
84 90 'url': 'http://{}/test_group_for_update/{}'.format(
85 91 http_host_only_stub(), UPDATE_REPO_NAME)
86 92 }),
87 93 ])
88 94 def test_api_update_repo(self, updates, expected, backend):
89 95 repo_name = UPDATE_REPO_NAME
90 96 repo = fixture.create_repo(repo_name, repo_type=backend.alias)
91 97 if updates.get('_group'):
92 98 fixture.create_repo_group(updates['_group'])
93 99
94 100 expected_api_data = repo.get_api_data(include_secrets=True)
95 101 if expected is SAME_AS_UPDATES:
96 102 expected_api_data.update(updates)
97 103 else:
98 104 expected_api_data.update(expected)
99 105
100 106 id_, params = build_data(
101 107 self.apikey, 'update_repo', repoid=repo_name, **updates)
102 108
103 109 with mock.patch('rhodecode.model.validation_schema.validators.url_validator'):
104 110 response = api_call(self.app, params)
105 111
106 112 if updates.get('repo_name'):
107 113 repo_name = updates['repo_name']
108 114
109 115 try:
110 116 expected = {
111 117 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo_name),
112 118 'repository': jsonify(expected_api_data)
113 119 }
114 120 assert_ok(id_, expected, given=response.body)
115 121 finally:
116 122 fixture.destroy_repo(repo_name)
117 123 if updates.get('_group'):
118 124 fixture.destroy_repo_group(updates['_group'])
119 125
120 126 def test_api_update_repo_fork_of_field(self, backend):
121 127 master_repo = backend.create_repo()
122 128 repo = backend.create_repo()
123 129 updates = {
124 130 'fork_of': master_repo.repo_name,
125 131 'fork_of_id': master_repo.repo_id
126 132 }
127 133 expected_api_data = repo.get_api_data(include_secrets=True)
128 134 expected_api_data.update(updates)
129 135
130 136 id_, params = build_data(
131 137 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
132 138 response = api_call(self.app, params)
133 139 expected = {
134 140 'msg': 'updated repo ID:%s %s' % (repo.repo_id, repo.repo_name),
135 141 'repository': jsonify(expected_api_data)
136 142 }
137 143 assert_ok(id_, expected, given=response.body)
138 144 result = response.json['result']['repository']
139 145 assert result['fork_of'] == master_repo.repo_name
140 146 assert result['fork_of_id'] == master_repo.repo_id
141 147
142 148 def test_api_update_repo_fork_of_not_found(self, backend):
143 149 master_repo_name = 'fake-parent-repo'
144 150 repo = backend.create_repo()
145 151 updates = {
146 152 'fork_of': master_repo_name
147 153 }
148 154 id_, params = build_data(
149 155 self.apikey, 'update_repo', repoid=repo.repo_name, **updates)
150 156 response = api_call(self.app, params)
151 157 expected = {
152 158 'repo_fork_of': 'Fork with id `{}` does not exists'.format(
153 159 master_repo_name)}
154 160 assert_error(id_, expected, given=response.body)
155 161
156 162 def test_api_update_repo_with_repo_group_not_existing(self):
157 163 repo_name = 'admin_owned'
158 164 fake_repo_group = 'test_group_for_update'
159 165 fixture.create_repo(repo_name)
160 166 updates = {'repo_name': '{}/{}'.format(fake_repo_group, repo_name)}
161 167 id_, params = build_data(
162 168 self.apikey, 'update_repo', repoid=repo_name, **updates)
163 169 response = api_call(self.app, params)
164 170 try:
165 171 expected = {
166 172 'repo_group': 'Repository group `{}` does not exist'.format(fake_repo_group)
167 173 }
168 174 assert_error(id_, expected, given=response.body)
169 175 finally:
170 176 fixture.destroy_repo(repo_name)
171 177
172 178 def test_api_update_repo_regular_user_not_allowed(self):
173 179 repo_name = 'admin_owned'
174 180 fixture.create_repo(repo_name)
175 181 updates = {'active': False}
176 182 id_, params = build_data(
177 183 self.apikey_regular, 'update_repo', repoid=repo_name, **updates)
178 184 response = api_call(self.app, params)
179 185 try:
180 186 expected = 'repository `%s` does not exist' % (repo_name,)
181 187 assert_error(id_, expected, given=response.body)
182 188 finally:
183 189 fixture.destroy_repo(repo_name)
184 190
185 191 @mock.patch.object(RepoModel, 'update', crash)
186 192 def test_api_update_repo_exception_occurred(self, backend):
187 193 repo_name = UPDATE_REPO_NAME
188 194 fixture.create_repo(repo_name, repo_type=backend.alias)
189 195 id_, params = build_data(
190 196 self.apikey, 'update_repo', repoid=repo_name,
191 197 owner=TEST_USER_ADMIN_LOGIN,)
192 198 response = api_call(self.app, params)
193 199 try:
194 200 expected = 'failed to update repo `%s`' % (repo_name,)
195 201 assert_error(id_, expected, given=response.body)
196 202 finally:
197 203 fixture.destroy_repo(repo_name)
@@ -1,831 +1,833 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Scm model for RhodeCode
23 23 """
24 24
25 25 import os.path
26 26 import traceback
27 27 import logging
28 28 import cStringIO
29 29
30 30 from sqlalchemy import func
31 31 from zope.cachedescriptors.property import Lazy as LazyProperty
32 32
33 33 import rhodecode
34 34 from rhodecode.lib.vcs import get_backend
35 35 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
36 36 from rhodecode.lib.vcs.nodes import FileNode
37 37 from rhodecode.lib.vcs.backends.base import EmptyCommit
38 38 from rhodecode.lib import helpers as h, rc_cache
39 39 from rhodecode.lib.auth import (
40 40 HasRepoPermissionAny, HasRepoGroupPermissionAny,
41 41 HasUserGroupPermissionAny)
42 42 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
43 43 from rhodecode.lib import hooks_utils
44 44 from rhodecode.lib.utils import (
45 45 get_filesystem_repos, make_db_config)
46 46 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
47 47 from rhodecode.lib.system_info import get_system_info
48 48 from rhodecode.model import BaseModel
49 49 from rhodecode.model.db import (
50 50 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
51 51 PullRequest)
52 52 from rhodecode.model.settings import VcsSettingsModel
53 53 from rhodecode.model.validation_schema.validators import url_validator, InvalidCloneUrl
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 class UserTemp(object):
59 59 def __init__(self, user_id):
60 60 self.user_id = user_id
61 61
62 62 def __repr__(self):
63 63 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
64 64
65 65
66 66 class RepoTemp(object):
67 67 def __init__(self, repo_id):
68 68 self.repo_id = repo_id
69 69
70 70 def __repr__(self):
71 71 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
72 72
73 73
74 74 class SimpleCachedRepoList(object):
75 75 """
76 76 Lighter version of of iteration of repos without the scm initialisation,
77 77 and with cache usage
78 78 """
79 79 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
80 80 self.db_repo_list = db_repo_list
81 81 self.repos_path = repos_path
82 82 self.order_by = order_by
83 83 self.reversed = (order_by or '').startswith('-')
84 84 if not perm_set:
85 85 perm_set = ['repository.read', 'repository.write',
86 86 'repository.admin']
87 87 self.perm_set = perm_set
88 88
89 89 def __len__(self):
90 90 return len(self.db_repo_list)
91 91
92 92 def __repr__(self):
93 93 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
94 94
95 95 def __iter__(self):
96 96 for dbr in self.db_repo_list:
97 97 # check permission at this level
98 98 has_perm = HasRepoPermissionAny(*self.perm_set)(
99 99 dbr.repo_name, 'SimpleCachedRepoList check')
100 100 if not has_perm:
101 101 continue
102 102
103 103 tmp_d = {
104 104 'name': dbr.repo_name,
105 105 'dbrepo': dbr.get_dict(),
106 106 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
107 107 }
108 108 yield tmp_d
109 109
110 110
111 111 class _PermCheckIterator(object):
112 112
113 113 def __init__(
114 114 self, obj_list, obj_attr, perm_set, perm_checker,
115 115 extra_kwargs=None):
116 116 """
117 117 Creates iterator from given list of objects, additionally
118 118 checking permission for them from perm_set var
119 119
120 120 :param obj_list: list of db objects
121 121 :param obj_attr: attribute of object to pass into perm_checker
122 122 :param perm_set: list of permissions to check
123 123 :param perm_checker: callable to check permissions against
124 124 """
125 125 self.obj_list = obj_list
126 126 self.obj_attr = obj_attr
127 127 self.perm_set = perm_set
128 128 self.perm_checker = perm_checker
129 129 self.extra_kwargs = extra_kwargs or {}
130 130
131 131 def __len__(self):
132 132 return len(self.obj_list)
133 133
134 134 def __repr__(self):
135 135 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
136 136
137 137 def __iter__(self):
138 138 checker = self.perm_checker(*self.perm_set)
139 139 for db_obj in self.obj_list:
140 140 # check permission at this level
141 141 name = getattr(db_obj, self.obj_attr, None)
142 142 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
143 143 continue
144 144
145 145 yield db_obj
146 146
147 147
148 148 class RepoList(_PermCheckIterator):
149 149
150 150 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
151 151 if not perm_set:
152 152 perm_set = [
153 153 'repository.read', 'repository.write', 'repository.admin']
154 154
155 155 super(RepoList, self).__init__(
156 156 obj_list=db_repo_list,
157 157 obj_attr='repo_name', perm_set=perm_set,
158 158 perm_checker=HasRepoPermissionAny,
159 159 extra_kwargs=extra_kwargs)
160 160
161 161
162 162 class RepoGroupList(_PermCheckIterator):
163 163
164 164 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
165 165 if not perm_set:
166 166 perm_set = ['group.read', 'group.write', 'group.admin']
167 167
168 168 super(RepoGroupList, self).__init__(
169 169 obj_list=db_repo_group_list,
170 170 obj_attr='group_name', perm_set=perm_set,
171 171 perm_checker=HasRepoGroupPermissionAny,
172 172 extra_kwargs=extra_kwargs)
173 173
174 174
175 175 class UserGroupList(_PermCheckIterator):
176 176
177 177 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
178 178 if not perm_set:
179 179 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
180 180
181 181 super(UserGroupList, self).__init__(
182 182 obj_list=db_user_group_list,
183 183 obj_attr='users_group_name', perm_set=perm_set,
184 184 perm_checker=HasUserGroupPermissionAny,
185 185 extra_kwargs=extra_kwargs)
186 186
187 187
188 188 class ScmModel(BaseModel):
189 189 """
190 190 Generic Scm Model
191 191 """
192 192
193 193 @LazyProperty
194 194 def repos_path(self):
195 195 """
196 196 Gets the repositories root path from database
197 197 """
198 198
199 199 settings_model = VcsSettingsModel(sa=self.sa)
200 200 return settings_model.get_repos_location()
201 201
202 202 def repo_scan(self, repos_path=None):
203 203 """
204 204 Listing of repositories in given path. This path should not be a
205 205 repository itself. Return a dictionary of repository objects
206 206
207 207 :param repos_path: path to directory containing repositories
208 208 """
209 209
210 210 if repos_path is None:
211 211 repos_path = self.repos_path
212 212
213 213 log.info('scanning for repositories in %s', repos_path)
214 214
215 215 config = make_db_config()
216 216 config.set('extensions', 'largefiles', '')
217 217 repos = {}
218 218
219 219 for name, path in get_filesystem_repos(repos_path, recursive=True):
220 220 # name need to be decomposed and put back together using the /
221 221 # since this is internal storage separator for rhodecode
222 222 name = Repository.normalize_repo_name(name)
223 223
224 224 try:
225 225 if name in repos:
226 226 raise RepositoryError('Duplicate repository name %s '
227 227 'found in %s' % (name, path))
228 228 elif path[0] in rhodecode.BACKENDS:
229 229 klass = get_backend(path[0])
230 230 repos[name] = klass(path[1], config=config)
231 231 except OSError:
232 232 continue
233 233 log.debug('found %s paths with repositories', len(repos))
234 234 return repos
235 235
236 236 def get_repos(self, all_repos=None, sort_key=None):
237 237 """
238 238 Get all repositories from db and for each repo create it's
239 239 backend instance and fill that backed with information from database
240 240
241 241 :param all_repos: list of repository names as strings
242 242 give specific repositories list, good for filtering
243 243
244 244 :param sort_key: initial sorting of repositories
245 245 """
246 246 if all_repos is None:
247 247 all_repos = self.sa.query(Repository)\
248 248 .filter(Repository.group_id == None)\
249 249 .order_by(func.lower(Repository.repo_name)).all()
250 250 repo_iter = SimpleCachedRepoList(
251 251 all_repos, repos_path=self.repos_path, order_by=sort_key)
252 252 return repo_iter
253 253
254 254 def get_repo_groups(self, all_groups=None):
255 255 if all_groups is None:
256 256 all_groups = RepoGroup.query()\
257 257 .filter(RepoGroup.group_parent_id == None).all()
258 258 return [x for x in RepoGroupList(all_groups)]
259 259
260 260 def mark_for_invalidation(self, repo_name, delete=False):
261 261 """
262 262 Mark caches of this repo invalid in the database. `delete` flag
263 263 removes the cache entries
264 264
265 265 :param repo_name: the repo_name for which caches should be marked
266 266 invalid, or deleted
267 267 :param delete: delete the entry keys instead of setting bool
268 268 flag on them, and also purge caches used by the dogpile
269 269 """
270 270 repo = Repository.get_by_repo_name(repo_name)
271 271
272 272 if repo:
273 273 invalidation_namespace = CacheKey.REPO_INVALIDATION_NAMESPACE.format(
274 274 repo_id=repo.repo_id)
275 275 CacheKey.set_invalidate(invalidation_namespace, delete=delete)
276 276
277 277 repo_id = repo.repo_id
278 278 config = repo._config
279 279 config.set('extensions', 'largefiles', '')
280 280 repo.update_commit_cache(config=config, cs_cache=None)
281 281 if delete:
282 282 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
283 283 rc_cache.clear_cache_namespace('cache_repo', cache_namespace_uid)
284 284
285 285 def toggle_following_repo(self, follow_repo_id, user_id):
286 286
287 287 f = self.sa.query(UserFollowing)\
288 288 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
289 289 .filter(UserFollowing.user_id == user_id).scalar()
290 290
291 291 if f is not None:
292 292 try:
293 293 self.sa.delete(f)
294 294 return
295 295 except Exception:
296 296 log.error(traceback.format_exc())
297 297 raise
298 298
299 299 try:
300 300 f = UserFollowing()
301 301 f.user_id = user_id
302 302 f.follows_repo_id = follow_repo_id
303 303 self.sa.add(f)
304 304 except Exception:
305 305 log.error(traceback.format_exc())
306 306 raise
307 307
308 308 def toggle_following_user(self, follow_user_id, user_id):
309 309 f = self.sa.query(UserFollowing)\
310 310 .filter(UserFollowing.follows_user_id == follow_user_id)\
311 311 .filter(UserFollowing.user_id == user_id).scalar()
312 312
313 313 if f is not None:
314 314 try:
315 315 self.sa.delete(f)
316 316 return
317 317 except Exception:
318 318 log.error(traceback.format_exc())
319 319 raise
320 320
321 321 try:
322 322 f = UserFollowing()
323 323 f.user_id = user_id
324 324 f.follows_user_id = follow_user_id
325 325 self.sa.add(f)
326 326 except Exception:
327 327 log.error(traceback.format_exc())
328 328 raise
329 329
330 330 def is_following_repo(self, repo_name, user_id, cache=False):
331 331 r = self.sa.query(Repository)\
332 332 .filter(Repository.repo_name == repo_name).scalar()
333 333
334 334 f = self.sa.query(UserFollowing)\
335 335 .filter(UserFollowing.follows_repository == r)\
336 336 .filter(UserFollowing.user_id == user_id).scalar()
337 337
338 338 return f is not None
339 339
340 340 def is_following_user(self, username, user_id, cache=False):
341 341 u = User.get_by_username(username)
342 342
343 343 f = self.sa.query(UserFollowing)\
344 344 .filter(UserFollowing.follows_user == u)\
345 345 .filter(UserFollowing.user_id == user_id).scalar()
346 346
347 347 return f is not None
348 348
349 349 def get_followers(self, repo):
350 350 repo = self._get_repo(repo)
351 351
352 352 return self.sa.query(UserFollowing)\
353 353 .filter(UserFollowing.follows_repository == repo).count()
354 354
355 355 def get_forks(self, repo):
356 356 repo = self._get_repo(repo)
357 357 return self.sa.query(Repository)\
358 358 .filter(Repository.fork == repo).count()
359 359
360 360 def get_pull_requests(self, repo):
361 361 repo = self._get_repo(repo)
362 362 return self.sa.query(PullRequest)\
363 363 .filter(PullRequest.target_repo == repo)\
364 364 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
365 365
366 366 def mark_as_fork(self, repo, fork, user):
367 367 repo = self._get_repo(repo)
368 368 fork = self._get_repo(fork)
369 369 if fork and repo.repo_id == fork.repo_id:
370 370 raise Exception("Cannot set repository as fork of itself")
371 371
372 372 if fork and repo.repo_type != fork.repo_type:
373 373 raise RepositoryError(
374 374 "Cannot set repository as fork of repository with other type")
375 375
376 376 repo.fork = fork
377 377 self.sa.add(repo)
378 378 return repo
379 379
380 def pull_changes(self, repo, username, remote_uri=None):
380 def pull_changes(self, repo, username, remote_uri=None, validate_uri=True):
381 381 dbrepo = self._get_repo(repo)
382 382 remote_uri = remote_uri or dbrepo.clone_uri
383 383 if not remote_uri:
384 384 raise Exception("This repository doesn't have a clone uri")
385 385
386 386 repo = dbrepo.scm_instance(cache=False)
387 387 repo.config.clear_section('hooks')
388 388
389 389 try:
390 390 # NOTE(marcink): add extra validation so we skip invalid urls
391 391 # this is due this tasks can be executed via scheduler without
392 392 # proper validation of remote_uri
393 config = make_db_config(clear_session=False)
394 url_validator(remote_uri, dbrepo.repo_type, config)
393 if validate_uri:
394 config = make_db_config(clear_session=False)
395 url_validator(remote_uri, dbrepo.repo_type, config)
395 396 except InvalidCloneUrl:
396 397 raise
397 398
398 399 repo_name = dbrepo.repo_name
399 400 try:
400 401 # TODO: we need to make sure those operations call proper hooks !
401 402 repo.pull(remote_uri)
402 403
403 404 self.mark_for_invalidation(repo_name)
404 405 except Exception:
405 406 log.error(traceback.format_exc())
406 407 raise
407 408
408 def push_changes(self, repo, username, remote_uri=None):
409 def push_changes(self, repo, username, remote_uri=None, validate_uri=True):
409 410 dbrepo = self._get_repo(repo)
410 411 remote_uri = remote_uri or dbrepo.push_uri
411 412 if not remote_uri:
412 413 raise Exception("This repository doesn't have a clone uri")
413 414
414 415 repo = dbrepo.scm_instance(cache=False)
415 416 repo.config.clear_section('hooks')
416 417
417 418 try:
418 419 # NOTE(marcink): add extra validation so we skip invalid urls
419 420 # this is due this tasks can be executed via scheduler without
420 421 # proper validation of remote_uri
421 config = make_db_config(clear_session=False)
422 url_validator(remote_uri, dbrepo.repo_type, config)
422 if validate_uri:
423 config = make_db_config(clear_session=False)
424 url_validator(remote_uri, dbrepo.repo_type, config)
423 425 except InvalidCloneUrl:
424 426 raise
425 427
426 428 try:
427 429 repo.push(remote_uri)
428 430 except Exception:
429 431 log.error(traceback.format_exc())
430 432 raise
431 433
432 434 def commit_change(self, repo, repo_name, commit, user, author, message,
433 435 content, f_path):
434 436 """
435 437 Commits changes
436 438
437 439 :param repo: SCM instance
438 440
439 441 """
440 442 user = self._get_user(user)
441 443
442 444 # decoding here will force that we have proper encoded values
443 445 # in any other case this will throw exceptions and deny commit
444 446 content = safe_str(content)
445 447 path = safe_str(f_path)
446 448 # message and author needs to be unicode
447 449 # proper backend should then translate that into required type
448 450 message = safe_unicode(message)
449 451 author = safe_unicode(author)
450 452 imc = repo.in_memory_commit
451 453 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
452 454 try:
453 455 # TODO: handle pre-push action !
454 456 tip = imc.commit(
455 457 message=message, author=author, parents=[commit],
456 458 branch=commit.branch)
457 459 except Exception as e:
458 460 log.error(traceback.format_exc())
459 461 raise IMCCommitError(str(e))
460 462 finally:
461 463 # always clear caches, if commit fails we want fresh object also
462 464 self.mark_for_invalidation(repo_name)
463 465
464 466 # We trigger the post-push action
465 467 hooks_utils.trigger_post_push_hook(
466 468 username=user.username, action='push_local', repo_name=repo_name,
467 469 repo_alias=repo.alias, commit_ids=[tip.raw_id])
468 470 return tip
469 471
470 472 def _sanitize_path(self, f_path):
471 473 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
472 474 raise NonRelativePathError('%s is not an relative path' % f_path)
473 475 if f_path:
474 476 f_path = os.path.normpath(f_path)
475 477 return f_path
476 478
477 479 def get_dirnode_metadata(self, request, commit, dir_node):
478 480 if not dir_node.is_dir():
479 481 return []
480 482
481 483 data = []
482 484 for node in dir_node:
483 485 if not node.is_file():
484 486 # we skip file-nodes
485 487 continue
486 488
487 489 last_commit = node.last_commit
488 490 last_commit_date = last_commit.date
489 491 data.append({
490 492 'name': node.name,
491 493 'size': h.format_byte_size_binary(node.size),
492 494 'modified_at': h.format_date(last_commit_date),
493 495 'modified_ts': last_commit_date.isoformat(),
494 496 'revision': last_commit.revision,
495 497 'short_id': last_commit.short_id,
496 498 'message': h.escape(last_commit.message),
497 499 'author': h.escape(last_commit.author),
498 500 'user_profile': h.gravatar_with_user(
499 501 request, last_commit.author),
500 502 })
501 503
502 504 return data
503 505
504 506 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
505 507 extended_info=False, content=False, max_file_bytes=None):
506 508 """
507 509 recursive walk in root dir and return a set of all path in that dir
508 510 based on repository walk function
509 511
510 512 :param repo_name: name of repository
511 513 :param commit_id: commit id for which to list nodes
512 514 :param root_path: root path to list
513 515 :param flat: return as a list, if False returns a dict with description
514 516 :param max_file_bytes: will not return file contents over this limit
515 517
516 518 """
517 519 _files = list()
518 520 _dirs = list()
519 521 try:
520 522 _repo = self._get_repo(repo_name)
521 523 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
522 524 root_path = root_path.lstrip('/')
523 525 for __, dirs, files in commit.walk(root_path):
524 526 for f in files:
525 527 _content = None
526 528 _data = f.unicode_path
527 529 over_size_limit = (max_file_bytes is not None
528 530 and f.size > max_file_bytes)
529 531
530 532 if not flat:
531 533 _data = {
532 534 "name": h.escape(f.unicode_path),
533 535 "type": "file",
534 536 }
535 537 if extended_info:
536 538 _data.update({
537 539 "md5": f.md5,
538 540 "binary": f.is_binary,
539 541 "size": f.size,
540 542 "extension": f.extension,
541 543 "mimetype": f.mimetype,
542 544 "lines": f.lines()[0]
543 545 })
544 546
545 547 if content:
546 548 full_content = None
547 549 if not f.is_binary and not over_size_limit:
548 550 full_content = safe_str(f.content)
549 551
550 552 _data.update({
551 553 "content": full_content,
552 554 })
553 555 _files.append(_data)
554 556 for d in dirs:
555 557 _data = d.unicode_path
556 558 if not flat:
557 559 _data = {
558 560 "name": h.escape(d.unicode_path),
559 561 "type": "dir",
560 562 }
561 563 if extended_info:
562 564 _data.update({
563 565 "md5": None,
564 566 "binary": None,
565 567 "size": None,
566 568 "extension": None,
567 569 })
568 570 if content:
569 571 _data.update({
570 572 "content": None
571 573 })
572 574 _dirs.append(_data)
573 575 except RepositoryError:
574 576 log.debug("Exception in get_nodes", exc_info=True)
575 577 raise
576 578
577 579 return _dirs, _files
578 580
579 581 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
580 582 author=None, trigger_push_hook=True):
581 583 """
582 584 Commits given multiple nodes into repo
583 585
584 586 :param user: RhodeCode User object or user_id, the commiter
585 587 :param repo: RhodeCode Repository object
586 588 :param message: commit message
587 589 :param nodes: mapping {filename:{'content':content},...}
588 590 :param parent_commit: parent commit, can be empty than it's
589 591 initial commit
590 592 :param author: author of commit, cna be different that commiter
591 593 only for git
592 594 :param trigger_push_hook: trigger push hooks
593 595
594 596 :returns: new commited commit
595 597 """
596 598
597 599 user = self._get_user(user)
598 600 scm_instance = repo.scm_instance(cache=False)
599 601
600 602 processed_nodes = []
601 603 for f_path in nodes:
602 604 f_path = self._sanitize_path(f_path)
603 605 content = nodes[f_path]['content']
604 606 f_path = safe_str(f_path)
605 607 # decoding here will force that we have proper encoded values
606 608 # in any other case this will throw exceptions and deny commit
607 609 if isinstance(content, (basestring,)):
608 610 content = safe_str(content)
609 611 elif isinstance(content, (file, cStringIO.OutputType,)):
610 612 content = content.read()
611 613 else:
612 614 raise Exception('Content is of unrecognized type %s' % (
613 615 type(content)
614 616 ))
615 617 processed_nodes.append((f_path, content))
616 618
617 619 message = safe_unicode(message)
618 620 commiter = user.full_contact
619 621 author = safe_unicode(author) if author else commiter
620 622
621 623 imc = scm_instance.in_memory_commit
622 624
623 625 if not parent_commit:
624 626 parent_commit = EmptyCommit(alias=scm_instance.alias)
625 627
626 628 if isinstance(parent_commit, EmptyCommit):
627 629 # EmptyCommit means we we're editing empty repository
628 630 parents = None
629 631 else:
630 632 parents = [parent_commit]
631 633 # add multiple nodes
632 634 for path, content in processed_nodes:
633 635 imc.add(FileNode(path, content=content))
634 636 # TODO: handle pre push scenario
635 637 tip = imc.commit(message=message,
636 638 author=author,
637 639 parents=parents,
638 640 branch=parent_commit.branch)
639 641
640 642 self.mark_for_invalidation(repo.repo_name)
641 643 if trigger_push_hook:
642 644 hooks_utils.trigger_post_push_hook(
643 645 username=user.username, action='push_local',
644 646 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
645 647 commit_ids=[tip.raw_id])
646 648 return tip
647 649
648 650 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
649 651 author=None, trigger_push_hook=True):
650 652 user = self._get_user(user)
651 653 scm_instance = repo.scm_instance(cache=False)
652 654
653 655 message = safe_unicode(message)
654 656 commiter = user.full_contact
655 657 author = safe_unicode(author) if author else commiter
656 658
657 659 imc = scm_instance.in_memory_commit
658 660
659 661 if not parent_commit:
660 662 parent_commit = EmptyCommit(alias=scm_instance.alias)
661 663
662 664 if isinstance(parent_commit, EmptyCommit):
663 665 # EmptyCommit means we we're editing empty repository
664 666 parents = None
665 667 else:
666 668 parents = [parent_commit]
667 669
668 670 # add multiple nodes
669 671 for _filename, data in nodes.items():
670 672 # new filename, can be renamed from the old one, also sanitaze
671 673 # the path for any hack around relative paths like ../../ etc.
672 674 filename = self._sanitize_path(data['filename'])
673 675 old_filename = self._sanitize_path(_filename)
674 676 content = data['content']
675 677
676 678 filenode = FileNode(old_filename, content=content)
677 679 op = data['op']
678 680 if op == 'add':
679 681 imc.add(filenode)
680 682 elif op == 'del':
681 683 imc.remove(filenode)
682 684 elif op == 'mod':
683 685 if filename != old_filename:
684 686 # TODO: handle renames more efficient, needs vcs lib
685 687 # changes
686 688 imc.remove(filenode)
687 689 imc.add(FileNode(filename, content=content))
688 690 else:
689 691 imc.change(filenode)
690 692
691 693 try:
692 694 # TODO: handle pre push scenario
693 695 # commit changes
694 696 tip = imc.commit(message=message,
695 697 author=author,
696 698 parents=parents,
697 699 branch=parent_commit.branch)
698 700 except NodeNotChangedError:
699 701 raise
700 702 except Exception as e:
701 703 log.exception("Unexpected exception during call to imc.commit")
702 704 raise IMCCommitError(str(e))
703 705 finally:
704 706 # always clear caches, if commit fails we want fresh object also
705 707 self.mark_for_invalidation(repo.repo_name)
706 708
707 709 if trigger_push_hook:
708 710 hooks_utils.trigger_post_push_hook(
709 711 username=user.username, action='push_local',
710 712 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
711 713 commit_ids=[tip.raw_id])
712 714
713 715 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
714 716 author=None, trigger_push_hook=True):
715 717 """
716 718 Deletes given multiple nodes into `repo`
717 719
718 720 :param user: RhodeCode User object or user_id, the committer
719 721 :param repo: RhodeCode Repository object
720 722 :param message: commit message
721 723 :param nodes: mapping {filename:{'content':content},...}
722 724 :param parent_commit: parent commit, can be empty than it's initial
723 725 commit
724 726 :param author: author of commit, cna be different that commiter only
725 727 for git
726 728 :param trigger_push_hook: trigger push hooks
727 729
728 730 :returns: new commit after deletion
729 731 """
730 732
731 733 user = self._get_user(user)
732 734 scm_instance = repo.scm_instance(cache=False)
733 735
734 736 processed_nodes = []
735 737 for f_path in nodes:
736 738 f_path = self._sanitize_path(f_path)
737 739 # content can be empty but for compatabilty it allows same dicts
738 740 # structure as add_nodes
739 741 content = nodes[f_path].get('content')
740 742 processed_nodes.append((f_path, content))
741 743
742 744 message = safe_unicode(message)
743 745 commiter = user.full_contact
744 746 author = safe_unicode(author) if author else commiter
745 747
746 748 imc = scm_instance.in_memory_commit
747 749
748 750 if not parent_commit:
749 751 parent_commit = EmptyCommit(alias=scm_instance.alias)
750 752
751 753 if isinstance(parent_commit, EmptyCommit):
752 754 # EmptyCommit means we we're editing empty repository
753 755 parents = None
754 756 else:
755 757 parents = [parent_commit]
756 758 # add multiple nodes
757 759 for path, content in processed_nodes:
758 760 imc.remove(FileNode(path, content=content))
759 761
760 762 # TODO: handle pre push scenario
761 763 tip = imc.commit(message=message,
762 764 author=author,
763 765 parents=parents,
764 766 branch=parent_commit.branch)
765 767
766 768 self.mark_for_invalidation(repo.repo_name)
767 769 if trigger_push_hook:
768 770 hooks_utils.trigger_post_push_hook(
769 771 username=user.username, action='push_local',
770 772 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
771 773 commit_ids=[tip.raw_id])
772 774 return tip
773 775
774 776 def strip(self, repo, commit_id, branch):
775 777 scm_instance = repo.scm_instance(cache=False)
776 778 scm_instance.config.clear_section('hooks')
777 779 scm_instance.strip(commit_id, branch)
778 780 self.mark_for_invalidation(repo.repo_name)
779 781
780 782 def get_unread_journal(self):
781 783 return self.sa.query(UserLog).count()
782 784
783 785 def get_repo_landing_revs(self, translator, repo=None):
784 786 """
785 787 Generates select option with tags branches and bookmarks (for hg only)
786 788 grouped by type
787 789
788 790 :param repo:
789 791 """
790 792 _ = translator
791 793 repo = self._get_repo(repo)
792 794
793 795 hist_l = [
794 796 ['rev:tip', _('latest tip')]
795 797 ]
796 798 choices = [
797 799 'rev:tip'
798 800 ]
799 801
800 802 if not repo:
801 803 return choices, hist_l
802 804
803 805 repo = repo.scm_instance()
804 806
805 807 branches_group = (
806 808 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
807 809 for b in repo.branches],
808 810 _("Branches"))
809 811 hist_l.append(branches_group)
810 812 choices.extend([x[0] for x in branches_group[0]])
811 813
812 814 if repo.alias == 'hg':
813 815 bookmarks_group = (
814 816 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
815 817 for b in repo.bookmarks],
816 818 _("Bookmarks"))
817 819 hist_l.append(bookmarks_group)
818 820 choices.extend([x[0] for x in bookmarks_group[0]])
819 821
820 822 tags_group = (
821 823 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
822 824 for t in repo.tags],
823 825 _("Tags"))
824 826 hist_l.append(tags_group)
825 827 choices.extend([x[0] for x in tags_group[0]])
826 828
827 829 return choices, hist_l
828 830
829 831 def get_server_info(self, environ=None):
830 832 server_info = get_system_info(environ)
831 833 return server_info
General Comments 0
You need to be logged in to leave comments. Login now