##// END OF EJS Templates
scm-model: fix the reference to the proper default url used for pushing
marcink -
r2561:d072c29a default
parent child Browse files
Show More
@@ -1,922 +1,922 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2018 RhodeCode GmbH
3 # Copyright (C) 2010-2018 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import traceback
28 import traceback
29 import logging
29 import logging
30 import cStringIO
30 import cStringIO
31 import pkg_resources
31 import pkg_resources
32
32
33 from sqlalchemy import func
33 from sqlalchemy import func
34 from zope.cachedescriptors.property import Lazy as LazyProperty
34 from zope.cachedescriptors.property import Lazy as LazyProperty
35
35
36 import rhodecode
36 import rhodecode
37 from rhodecode.lib.vcs import get_backend
37 from rhodecode.lib.vcs import get_backend
38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
38 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 from rhodecode.lib.vcs.nodes import FileNode
39 from rhodecode.lib.vcs.nodes import FileNode
40 from rhodecode.lib.vcs.backends.base import EmptyCommit
40 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 from rhodecode.lib import helpers as h
41 from rhodecode.lib import helpers as h
42 from rhodecode.lib.auth import (
42 from rhodecode.lib.auth import (
43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
43 HasRepoPermissionAny, HasRepoGroupPermissionAny,
44 HasUserGroupPermissionAny)
44 HasUserGroupPermissionAny)
45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
45 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
46 from rhodecode.lib import hooks_utils, caches
46 from rhodecode.lib import hooks_utils, caches
47 from rhodecode.lib.utils import (
47 from rhodecode.lib.utils import (
48 get_filesystem_repos, make_db_config)
48 get_filesystem_repos, make_db_config)
49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
49 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
50 from rhodecode.lib.system_info import get_system_info
50 from rhodecode.lib.system_info import get_system_info
51 from rhodecode.model import BaseModel
51 from rhodecode.model import BaseModel
52 from rhodecode.model.db import (
52 from rhodecode.model.db import (
53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
53 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
54 PullRequest)
54 PullRequest)
55 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.settings import VcsSettingsModel
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 class UserTemp(object):
60 class UserTemp(object):
61 def __init__(self, user_id):
61 def __init__(self, user_id):
62 self.user_id = user_id
62 self.user_id = user_id
63
63
64 def __repr__(self):
64 def __repr__(self):
65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
65 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
66
66
67
67
68 class RepoTemp(object):
68 class RepoTemp(object):
69 def __init__(self, repo_id):
69 def __init__(self, repo_id):
70 self.repo_id = repo_id
70 self.repo_id = repo_id
71
71
72 def __repr__(self):
72 def __repr__(self):
73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
73 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
74
74
75
75
76 class SimpleCachedRepoList(object):
76 class SimpleCachedRepoList(object):
77 """
77 """
78 Lighter version of of iteration of repos without the scm initialisation,
78 Lighter version of of iteration of repos without the scm initialisation,
79 and with cache usage
79 and with cache usage
80 """
80 """
81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
81 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
82 self.db_repo_list = db_repo_list
82 self.db_repo_list = db_repo_list
83 self.repos_path = repos_path
83 self.repos_path = repos_path
84 self.order_by = order_by
84 self.order_by = order_by
85 self.reversed = (order_by or '').startswith('-')
85 self.reversed = (order_by or '').startswith('-')
86 if not perm_set:
86 if not perm_set:
87 perm_set = ['repository.read', 'repository.write',
87 perm_set = ['repository.read', 'repository.write',
88 'repository.admin']
88 'repository.admin']
89 self.perm_set = perm_set
89 self.perm_set = perm_set
90
90
91 def __len__(self):
91 def __len__(self):
92 return len(self.db_repo_list)
92 return len(self.db_repo_list)
93
93
94 def __repr__(self):
94 def __repr__(self):
95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
95 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
96
96
97 def __iter__(self):
97 def __iter__(self):
98 for dbr in self.db_repo_list:
98 for dbr in self.db_repo_list:
99 # check permission at this level
99 # check permission at this level
100 has_perm = HasRepoPermissionAny(*self.perm_set)(
100 has_perm = HasRepoPermissionAny(*self.perm_set)(
101 dbr.repo_name, 'SimpleCachedRepoList check')
101 dbr.repo_name, 'SimpleCachedRepoList check')
102 if not has_perm:
102 if not has_perm:
103 continue
103 continue
104
104
105 tmp_d = {
105 tmp_d = {
106 'name': dbr.repo_name,
106 'name': dbr.repo_name,
107 'dbrepo': dbr.get_dict(),
107 'dbrepo': dbr.get_dict(),
108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
108 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
109 }
109 }
110 yield tmp_d
110 yield tmp_d
111
111
112
112
113 class _PermCheckIterator(object):
113 class _PermCheckIterator(object):
114
114
115 def __init__(
115 def __init__(
116 self, obj_list, obj_attr, perm_set, perm_checker,
116 self, obj_list, obj_attr, perm_set, perm_checker,
117 extra_kwargs=None):
117 extra_kwargs=None):
118 """
118 """
119 Creates iterator from given list of objects, additionally
119 Creates iterator from given list of objects, additionally
120 checking permission for them from perm_set var
120 checking permission for them from perm_set var
121
121
122 :param obj_list: list of db objects
122 :param obj_list: list of db objects
123 :param obj_attr: attribute of object to pass into perm_checker
123 :param obj_attr: attribute of object to pass into perm_checker
124 :param perm_set: list of permissions to check
124 :param perm_set: list of permissions to check
125 :param perm_checker: callable to check permissions against
125 :param perm_checker: callable to check permissions against
126 """
126 """
127 self.obj_list = obj_list
127 self.obj_list = obj_list
128 self.obj_attr = obj_attr
128 self.obj_attr = obj_attr
129 self.perm_set = perm_set
129 self.perm_set = perm_set
130 self.perm_checker = perm_checker
130 self.perm_checker = perm_checker
131 self.extra_kwargs = extra_kwargs or {}
131 self.extra_kwargs = extra_kwargs or {}
132
132
133 def __len__(self):
133 def __len__(self):
134 return len(self.obj_list)
134 return len(self.obj_list)
135
135
136 def __repr__(self):
136 def __repr__(self):
137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
137 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
138
138
139 def __iter__(self):
139 def __iter__(self):
140 checker = self.perm_checker(*self.perm_set)
140 checker = self.perm_checker(*self.perm_set)
141 for db_obj in self.obj_list:
141 for db_obj in self.obj_list:
142 # check permission at this level
142 # check permission at this level
143 name = getattr(db_obj, self.obj_attr, None)
143 name = getattr(db_obj, self.obj_attr, None)
144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
144 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
145 continue
145 continue
146
146
147 yield db_obj
147 yield db_obj
148
148
149
149
150 class RepoList(_PermCheckIterator):
150 class RepoList(_PermCheckIterator):
151
151
152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
152 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
153 if not perm_set:
153 if not perm_set:
154 perm_set = [
154 perm_set = [
155 'repository.read', 'repository.write', 'repository.admin']
155 'repository.read', 'repository.write', 'repository.admin']
156
156
157 super(RepoList, self).__init__(
157 super(RepoList, self).__init__(
158 obj_list=db_repo_list,
158 obj_list=db_repo_list,
159 obj_attr='repo_name', perm_set=perm_set,
159 obj_attr='repo_name', perm_set=perm_set,
160 perm_checker=HasRepoPermissionAny,
160 perm_checker=HasRepoPermissionAny,
161 extra_kwargs=extra_kwargs)
161 extra_kwargs=extra_kwargs)
162
162
163
163
164 class RepoGroupList(_PermCheckIterator):
164 class RepoGroupList(_PermCheckIterator):
165
165
166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
166 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
167 if not perm_set:
167 if not perm_set:
168 perm_set = ['group.read', 'group.write', 'group.admin']
168 perm_set = ['group.read', 'group.write', 'group.admin']
169
169
170 super(RepoGroupList, self).__init__(
170 super(RepoGroupList, self).__init__(
171 obj_list=db_repo_group_list,
171 obj_list=db_repo_group_list,
172 obj_attr='group_name', perm_set=perm_set,
172 obj_attr='group_name', perm_set=perm_set,
173 perm_checker=HasRepoGroupPermissionAny,
173 perm_checker=HasRepoGroupPermissionAny,
174 extra_kwargs=extra_kwargs)
174 extra_kwargs=extra_kwargs)
175
175
176
176
177 class UserGroupList(_PermCheckIterator):
177 class UserGroupList(_PermCheckIterator):
178
178
179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
179 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
180 if not perm_set:
180 if not perm_set:
181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
181 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
182
182
183 super(UserGroupList, self).__init__(
183 super(UserGroupList, self).__init__(
184 obj_list=db_user_group_list,
184 obj_list=db_user_group_list,
185 obj_attr='users_group_name', perm_set=perm_set,
185 obj_attr='users_group_name', perm_set=perm_set,
186 perm_checker=HasUserGroupPermissionAny,
186 perm_checker=HasUserGroupPermissionAny,
187 extra_kwargs=extra_kwargs)
187 extra_kwargs=extra_kwargs)
188
188
189
189
190 class ScmModel(BaseModel):
190 class ScmModel(BaseModel):
191 """
191 """
192 Generic Scm Model
192 Generic Scm Model
193 """
193 """
194
194
195 @LazyProperty
195 @LazyProperty
196 def repos_path(self):
196 def repos_path(self):
197 """
197 """
198 Gets the repositories root path from database
198 Gets the repositories root path from database
199 """
199 """
200
200
201 settings_model = VcsSettingsModel(sa=self.sa)
201 settings_model = VcsSettingsModel(sa=self.sa)
202 return settings_model.get_repos_location()
202 return settings_model.get_repos_location()
203
203
204 def repo_scan(self, repos_path=None):
204 def repo_scan(self, repos_path=None):
205 """
205 """
206 Listing of repositories in given path. This path should not be a
206 Listing of repositories in given path. This path should not be a
207 repository itself. Return a dictionary of repository objects
207 repository itself. Return a dictionary of repository objects
208
208
209 :param repos_path: path to directory containing repositories
209 :param repos_path: path to directory containing repositories
210 """
210 """
211
211
212 if repos_path is None:
212 if repos_path is None:
213 repos_path = self.repos_path
213 repos_path = self.repos_path
214
214
215 log.info('scanning for repositories in %s', repos_path)
215 log.info('scanning for repositories in %s', repos_path)
216
216
217 config = make_db_config()
217 config = make_db_config()
218 config.set('extensions', 'largefiles', '')
218 config.set('extensions', 'largefiles', '')
219 repos = {}
219 repos = {}
220
220
221 for name, path in get_filesystem_repos(repos_path, recursive=True):
221 for name, path in get_filesystem_repos(repos_path, recursive=True):
222 # name need to be decomposed and put back together using the /
222 # name need to be decomposed and put back together using the /
223 # since this is internal storage separator for rhodecode
223 # since this is internal storage separator for rhodecode
224 name = Repository.normalize_repo_name(name)
224 name = Repository.normalize_repo_name(name)
225
225
226 try:
226 try:
227 if name in repos:
227 if name in repos:
228 raise RepositoryError('Duplicate repository name %s '
228 raise RepositoryError('Duplicate repository name %s '
229 'found in %s' % (name, path))
229 'found in %s' % (name, path))
230 elif path[0] in rhodecode.BACKENDS:
230 elif path[0] in rhodecode.BACKENDS:
231 klass = get_backend(path[0])
231 klass = get_backend(path[0])
232 repos[name] = klass(path[1], config=config)
232 repos[name] = klass(path[1], config=config)
233 except OSError:
233 except OSError:
234 continue
234 continue
235 log.debug('found %s paths with repositories', len(repos))
235 log.debug('found %s paths with repositories', len(repos))
236 return repos
236 return repos
237
237
238 def get_repos(self, all_repos=None, sort_key=None):
238 def get_repos(self, all_repos=None, sort_key=None):
239 """
239 """
240 Get all repositories from db and for each repo create it's
240 Get all repositories from db and for each repo create it's
241 backend instance and fill that backed with information from database
241 backend instance and fill that backed with information from database
242
242
243 :param all_repos: list of repository names as strings
243 :param all_repos: list of repository names as strings
244 give specific repositories list, good for filtering
244 give specific repositories list, good for filtering
245
245
246 :param sort_key: initial sorting of repositories
246 :param sort_key: initial sorting of repositories
247 """
247 """
248 if all_repos is None:
248 if all_repos is None:
249 all_repos = self.sa.query(Repository)\
249 all_repos = self.sa.query(Repository)\
250 .filter(Repository.group_id == None)\
250 .filter(Repository.group_id == None)\
251 .order_by(func.lower(Repository.repo_name)).all()
251 .order_by(func.lower(Repository.repo_name)).all()
252 repo_iter = SimpleCachedRepoList(
252 repo_iter = SimpleCachedRepoList(
253 all_repos, repos_path=self.repos_path, order_by=sort_key)
253 all_repos, repos_path=self.repos_path, order_by=sort_key)
254 return repo_iter
254 return repo_iter
255
255
256 def get_repo_groups(self, all_groups=None):
256 def get_repo_groups(self, all_groups=None):
257 if all_groups is None:
257 if all_groups is None:
258 all_groups = RepoGroup.query()\
258 all_groups = RepoGroup.query()\
259 .filter(RepoGroup.group_parent_id == None).all()
259 .filter(RepoGroup.group_parent_id == None).all()
260 return [x for x in RepoGroupList(all_groups)]
260 return [x for x in RepoGroupList(all_groups)]
261
261
262 def mark_for_invalidation(self, repo_name, delete=False):
262 def mark_for_invalidation(self, repo_name, delete=False):
263 """
263 """
264 Mark caches of this repo invalid in the database. `delete` flag
264 Mark caches of this repo invalid in the database. `delete` flag
265 removes the cache entries
265 removes the cache entries
266
266
267 :param repo_name: the repo_name for which caches should be marked
267 :param repo_name: the repo_name for which caches should be marked
268 invalid, or deleted
268 invalid, or deleted
269 :param delete: delete the entry keys instead of setting bool
269 :param delete: delete the entry keys instead of setting bool
270 flag on them
270 flag on them
271 """
271 """
272 CacheKey.set_invalidate(repo_name, delete=delete)
272 CacheKey.set_invalidate(repo_name, delete=delete)
273 repo = Repository.get_by_repo_name(repo_name)
273 repo = Repository.get_by_repo_name(repo_name)
274
274
275 if repo:
275 if repo:
276 config = repo._config
276 config = repo._config
277 config.set('extensions', 'largefiles', '')
277 config.set('extensions', 'largefiles', '')
278 repo.update_commit_cache(config=config, cs_cache=None)
278 repo.update_commit_cache(config=config, cs_cache=None)
279 caches.clear_repo_caches(repo_name)
279 caches.clear_repo_caches(repo_name)
280
280
281 def toggle_following_repo(self, follow_repo_id, user_id):
281 def toggle_following_repo(self, follow_repo_id, user_id):
282
282
283 f = self.sa.query(UserFollowing)\
283 f = self.sa.query(UserFollowing)\
284 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
284 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
285 .filter(UserFollowing.user_id == user_id).scalar()
285 .filter(UserFollowing.user_id == user_id).scalar()
286
286
287 if f is not None:
287 if f is not None:
288 try:
288 try:
289 self.sa.delete(f)
289 self.sa.delete(f)
290 return
290 return
291 except Exception:
291 except Exception:
292 log.error(traceback.format_exc())
292 log.error(traceback.format_exc())
293 raise
293 raise
294
294
295 try:
295 try:
296 f = UserFollowing()
296 f = UserFollowing()
297 f.user_id = user_id
297 f.user_id = user_id
298 f.follows_repo_id = follow_repo_id
298 f.follows_repo_id = follow_repo_id
299 self.sa.add(f)
299 self.sa.add(f)
300 except Exception:
300 except Exception:
301 log.error(traceback.format_exc())
301 log.error(traceback.format_exc())
302 raise
302 raise
303
303
304 def toggle_following_user(self, follow_user_id, user_id):
304 def toggle_following_user(self, follow_user_id, user_id):
305 f = self.sa.query(UserFollowing)\
305 f = self.sa.query(UserFollowing)\
306 .filter(UserFollowing.follows_user_id == follow_user_id)\
306 .filter(UserFollowing.follows_user_id == follow_user_id)\
307 .filter(UserFollowing.user_id == user_id).scalar()
307 .filter(UserFollowing.user_id == user_id).scalar()
308
308
309 if f is not None:
309 if f is not None:
310 try:
310 try:
311 self.sa.delete(f)
311 self.sa.delete(f)
312 return
312 return
313 except Exception:
313 except Exception:
314 log.error(traceback.format_exc())
314 log.error(traceback.format_exc())
315 raise
315 raise
316
316
317 try:
317 try:
318 f = UserFollowing()
318 f = UserFollowing()
319 f.user_id = user_id
319 f.user_id = user_id
320 f.follows_user_id = follow_user_id
320 f.follows_user_id = follow_user_id
321 self.sa.add(f)
321 self.sa.add(f)
322 except Exception:
322 except Exception:
323 log.error(traceback.format_exc())
323 log.error(traceback.format_exc())
324 raise
324 raise
325
325
326 def is_following_repo(self, repo_name, user_id, cache=False):
326 def is_following_repo(self, repo_name, user_id, cache=False):
327 r = self.sa.query(Repository)\
327 r = self.sa.query(Repository)\
328 .filter(Repository.repo_name == repo_name).scalar()
328 .filter(Repository.repo_name == repo_name).scalar()
329
329
330 f = self.sa.query(UserFollowing)\
330 f = self.sa.query(UserFollowing)\
331 .filter(UserFollowing.follows_repository == r)\
331 .filter(UserFollowing.follows_repository == r)\
332 .filter(UserFollowing.user_id == user_id).scalar()
332 .filter(UserFollowing.user_id == user_id).scalar()
333
333
334 return f is not None
334 return f is not None
335
335
336 def is_following_user(self, username, user_id, cache=False):
336 def is_following_user(self, username, user_id, cache=False):
337 u = User.get_by_username(username)
337 u = User.get_by_username(username)
338
338
339 f = self.sa.query(UserFollowing)\
339 f = self.sa.query(UserFollowing)\
340 .filter(UserFollowing.follows_user == u)\
340 .filter(UserFollowing.follows_user == u)\
341 .filter(UserFollowing.user_id == user_id).scalar()
341 .filter(UserFollowing.user_id == user_id).scalar()
342
342
343 return f is not None
343 return f is not None
344
344
345 def get_followers(self, repo):
345 def get_followers(self, repo):
346 repo = self._get_repo(repo)
346 repo = self._get_repo(repo)
347
347
348 return self.sa.query(UserFollowing)\
348 return self.sa.query(UserFollowing)\
349 .filter(UserFollowing.follows_repository == repo).count()
349 .filter(UserFollowing.follows_repository == repo).count()
350
350
351 def get_forks(self, repo):
351 def get_forks(self, repo):
352 repo = self._get_repo(repo)
352 repo = self._get_repo(repo)
353 return self.sa.query(Repository)\
353 return self.sa.query(Repository)\
354 .filter(Repository.fork == repo).count()
354 .filter(Repository.fork == repo).count()
355
355
356 def get_pull_requests(self, repo):
356 def get_pull_requests(self, repo):
357 repo = self._get_repo(repo)
357 repo = self._get_repo(repo)
358 return self.sa.query(PullRequest)\
358 return self.sa.query(PullRequest)\
359 .filter(PullRequest.target_repo == repo)\
359 .filter(PullRequest.target_repo == repo)\
360 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
360 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
361
361
362 def mark_as_fork(self, repo, fork, user):
362 def mark_as_fork(self, repo, fork, user):
363 repo = self._get_repo(repo)
363 repo = self._get_repo(repo)
364 fork = self._get_repo(fork)
364 fork = self._get_repo(fork)
365 if fork and repo.repo_id == fork.repo_id:
365 if fork and repo.repo_id == fork.repo_id:
366 raise Exception("Cannot set repository as fork of itself")
366 raise Exception("Cannot set repository as fork of itself")
367
367
368 if fork and repo.repo_type != fork.repo_type:
368 if fork and repo.repo_type != fork.repo_type:
369 raise RepositoryError(
369 raise RepositoryError(
370 "Cannot set repository as fork of repository with other type")
370 "Cannot set repository as fork of repository with other type")
371
371
372 repo.fork = fork
372 repo.fork = fork
373 self.sa.add(repo)
373 self.sa.add(repo)
374 return repo
374 return repo
375
375
376 def pull_changes(self, repo, username, remote_uri=None):
376 def pull_changes(self, repo, username, remote_uri=None):
377 dbrepo = self._get_repo(repo)
377 dbrepo = self._get_repo(repo)
378 remote_uri = remote_uri or dbrepo.clone_uri
378 remote_uri = remote_uri or dbrepo.clone_uri
379 if not remote_uri:
379 if not remote_uri:
380 raise Exception("This repository doesn't have a clone uri")
380 raise Exception("This repository doesn't have a clone uri")
381
381
382 repo = dbrepo.scm_instance(cache=False)
382 repo = dbrepo.scm_instance(cache=False)
383 # TODO: marcink fix this an re-enable since we need common logic
383 # TODO: marcink fix this an re-enable since we need common logic
384 # for hg/git remove hooks so we don't trigger them on fetching
384 # for hg/git remove hooks so we don't trigger them on fetching
385 # commits from remote
385 # commits from remote
386 repo.config.clear_section('hooks')
386 repo.config.clear_section('hooks')
387
387
388 repo_name = dbrepo.repo_name
388 repo_name = dbrepo.repo_name
389 try:
389 try:
390 # TODO: we need to make sure those operations call proper hooks !
390 # TODO: we need to make sure those operations call proper hooks !
391 repo.pull(remote_uri)
391 repo.pull(remote_uri)
392
392
393 self.mark_for_invalidation(repo_name)
393 self.mark_for_invalidation(repo_name)
394 except Exception:
394 except Exception:
395 log.error(traceback.format_exc())
395 log.error(traceback.format_exc())
396 raise
396 raise
397
397
398 def push_changes(self, repo, username, remote_uri=None):
398 def push_changes(self, repo, username, remote_uri=None):
399 dbrepo = self._get_repo(repo)
399 dbrepo = self._get_repo(repo)
400 remote_uri = remote_uri or dbrepo.clone_uri
400 remote_uri = remote_uri or dbrepo.push_uri
401 if not remote_uri:
401 if not remote_uri:
402 raise Exception("This repository doesn't have a clone uri")
402 raise Exception("This repository doesn't have a clone uri")
403
403
404 repo = dbrepo.scm_instance(cache=False)
404 repo = dbrepo.scm_instance(cache=False)
405 repo.config.clear_section('hooks')
405 repo.config.clear_section('hooks')
406
406
407 try:
407 try:
408 repo.push(remote_uri)
408 repo.push(remote_uri)
409 except Exception:
409 except Exception:
410 log.error(traceback.format_exc())
410 log.error(traceback.format_exc())
411 raise
411 raise
412
412
413 def commit_change(self, repo, repo_name, commit, user, author, message,
413 def commit_change(self, repo, repo_name, commit, user, author, message,
414 content, f_path):
414 content, f_path):
415 """
415 """
416 Commits changes
416 Commits changes
417
417
418 :param repo: SCM instance
418 :param repo: SCM instance
419
419
420 """
420 """
421 user = self._get_user(user)
421 user = self._get_user(user)
422
422
423 # decoding here will force that we have proper encoded values
423 # decoding here will force that we have proper encoded values
424 # in any other case this will throw exceptions and deny commit
424 # in any other case this will throw exceptions and deny commit
425 content = safe_str(content)
425 content = safe_str(content)
426 path = safe_str(f_path)
426 path = safe_str(f_path)
427 # message and author needs to be unicode
427 # message and author needs to be unicode
428 # proper backend should then translate that into required type
428 # proper backend should then translate that into required type
429 message = safe_unicode(message)
429 message = safe_unicode(message)
430 author = safe_unicode(author)
430 author = safe_unicode(author)
431 imc = repo.in_memory_commit
431 imc = repo.in_memory_commit
432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
432 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
433 try:
433 try:
434 # TODO: handle pre-push action !
434 # TODO: handle pre-push action !
435 tip = imc.commit(
435 tip = imc.commit(
436 message=message, author=author, parents=[commit],
436 message=message, author=author, parents=[commit],
437 branch=commit.branch)
437 branch=commit.branch)
438 except Exception as e:
438 except Exception as e:
439 log.error(traceback.format_exc())
439 log.error(traceback.format_exc())
440 raise IMCCommitError(str(e))
440 raise IMCCommitError(str(e))
441 finally:
441 finally:
442 # always clear caches, if commit fails we want fresh object also
442 # always clear caches, if commit fails we want fresh object also
443 self.mark_for_invalidation(repo_name)
443 self.mark_for_invalidation(repo_name)
444
444
445 # We trigger the post-push action
445 # We trigger the post-push action
446 hooks_utils.trigger_post_push_hook(
446 hooks_utils.trigger_post_push_hook(
447 username=user.username, action='push_local', repo_name=repo_name,
447 username=user.username, action='push_local', repo_name=repo_name,
448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
448 repo_alias=repo.alias, commit_ids=[tip.raw_id])
449 return tip
449 return tip
450
450
451 def _sanitize_path(self, f_path):
451 def _sanitize_path(self, f_path):
452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
452 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
453 raise NonRelativePathError('%s is not an relative path' % f_path)
453 raise NonRelativePathError('%s is not an relative path' % f_path)
454 if f_path:
454 if f_path:
455 f_path = os.path.normpath(f_path)
455 f_path = os.path.normpath(f_path)
456 return f_path
456 return f_path
457
457
458 def get_dirnode_metadata(self, request, commit, dir_node):
458 def get_dirnode_metadata(self, request, commit, dir_node):
459 if not dir_node.is_dir():
459 if not dir_node.is_dir():
460 return []
460 return []
461
461
462 data = []
462 data = []
463 for node in dir_node:
463 for node in dir_node:
464 if not node.is_file():
464 if not node.is_file():
465 # we skip file-nodes
465 # we skip file-nodes
466 continue
466 continue
467
467
468 last_commit = node.last_commit
468 last_commit = node.last_commit
469 last_commit_date = last_commit.date
469 last_commit_date = last_commit.date
470 data.append({
470 data.append({
471 'name': node.name,
471 'name': node.name,
472 'size': h.format_byte_size_binary(node.size),
472 'size': h.format_byte_size_binary(node.size),
473 'modified_at': h.format_date(last_commit_date),
473 'modified_at': h.format_date(last_commit_date),
474 'modified_ts': last_commit_date.isoformat(),
474 'modified_ts': last_commit_date.isoformat(),
475 'revision': last_commit.revision,
475 'revision': last_commit.revision,
476 'short_id': last_commit.short_id,
476 'short_id': last_commit.short_id,
477 'message': h.escape(last_commit.message),
477 'message': h.escape(last_commit.message),
478 'author': h.escape(last_commit.author),
478 'author': h.escape(last_commit.author),
479 'user_profile': h.gravatar_with_user(
479 'user_profile': h.gravatar_with_user(
480 request, last_commit.author),
480 request, last_commit.author),
481 })
481 })
482
482
483 return data
483 return data
484
484
485 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
485 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
486 extended_info=False, content=False, max_file_bytes=None):
486 extended_info=False, content=False, max_file_bytes=None):
487 """
487 """
488 recursive walk in root dir and return a set of all path in that dir
488 recursive walk in root dir and return a set of all path in that dir
489 based on repository walk function
489 based on repository walk function
490
490
491 :param repo_name: name of repository
491 :param repo_name: name of repository
492 :param commit_id: commit id for which to list nodes
492 :param commit_id: commit id for which to list nodes
493 :param root_path: root path to list
493 :param root_path: root path to list
494 :param flat: return as a list, if False returns a dict with description
494 :param flat: return as a list, if False returns a dict with description
495 :param max_file_bytes: will not return file contents over this limit
495 :param max_file_bytes: will not return file contents over this limit
496
496
497 """
497 """
498 _files = list()
498 _files = list()
499 _dirs = list()
499 _dirs = list()
500 try:
500 try:
501 _repo = self._get_repo(repo_name)
501 _repo = self._get_repo(repo_name)
502 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
502 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
503 root_path = root_path.lstrip('/')
503 root_path = root_path.lstrip('/')
504 for __, dirs, files in commit.walk(root_path):
504 for __, dirs, files in commit.walk(root_path):
505 for f in files:
505 for f in files:
506 _content = None
506 _content = None
507 _data = f.unicode_path
507 _data = f.unicode_path
508 over_size_limit = (max_file_bytes is not None
508 over_size_limit = (max_file_bytes is not None
509 and f.size > max_file_bytes)
509 and f.size > max_file_bytes)
510
510
511 if not flat:
511 if not flat:
512 _data = {
512 _data = {
513 "name": h.escape(f.unicode_path),
513 "name": h.escape(f.unicode_path),
514 "type": "file",
514 "type": "file",
515 }
515 }
516 if extended_info:
516 if extended_info:
517 _data.update({
517 _data.update({
518 "md5": f.md5,
518 "md5": f.md5,
519 "binary": f.is_binary,
519 "binary": f.is_binary,
520 "size": f.size,
520 "size": f.size,
521 "extension": f.extension,
521 "extension": f.extension,
522 "mimetype": f.mimetype,
522 "mimetype": f.mimetype,
523 "lines": f.lines()[0]
523 "lines": f.lines()[0]
524 })
524 })
525
525
526 if content:
526 if content:
527 full_content = None
527 full_content = None
528 if not f.is_binary and not over_size_limit:
528 if not f.is_binary and not over_size_limit:
529 full_content = safe_str(f.content)
529 full_content = safe_str(f.content)
530
530
531 _data.update({
531 _data.update({
532 "content": full_content,
532 "content": full_content,
533 })
533 })
534 _files.append(_data)
534 _files.append(_data)
535 for d in dirs:
535 for d in dirs:
536 _data = d.unicode_path
536 _data = d.unicode_path
537 if not flat:
537 if not flat:
538 _data = {
538 _data = {
539 "name": h.escape(d.unicode_path),
539 "name": h.escape(d.unicode_path),
540 "type": "dir",
540 "type": "dir",
541 }
541 }
542 if extended_info:
542 if extended_info:
543 _data.update({
543 _data.update({
544 "md5": None,
544 "md5": None,
545 "binary": None,
545 "binary": None,
546 "size": None,
546 "size": None,
547 "extension": None,
547 "extension": None,
548 })
548 })
549 if content:
549 if content:
550 _data.update({
550 _data.update({
551 "content": None
551 "content": None
552 })
552 })
553 _dirs.append(_data)
553 _dirs.append(_data)
554 except RepositoryError:
554 except RepositoryError:
555 log.debug("Exception in get_nodes", exc_info=True)
555 log.debug("Exception in get_nodes", exc_info=True)
556 raise
556 raise
557
557
558 return _dirs, _files
558 return _dirs, _files
559
559
560 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
560 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
561 author=None, trigger_push_hook=True):
561 author=None, trigger_push_hook=True):
562 """
562 """
563 Commits given multiple nodes into repo
563 Commits given multiple nodes into repo
564
564
565 :param user: RhodeCode User object or user_id, the commiter
565 :param user: RhodeCode User object or user_id, the commiter
566 :param repo: RhodeCode Repository object
566 :param repo: RhodeCode Repository object
567 :param message: commit message
567 :param message: commit message
568 :param nodes: mapping {filename:{'content':content},...}
568 :param nodes: mapping {filename:{'content':content},...}
569 :param parent_commit: parent commit, can be empty than it's
569 :param parent_commit: parent commit, can be empty than it's
570 initial commit
570 initial commit
571 :param author: author of commit, cna be different that commiter
571 :param author: author of commit, cna be different that commiter
572 only for git
572 only for git
573 :param trigger_push_hook: trigger push hooks
573 :param trigger_push_hook: trigger push hooks
574
574
575 :returns: new commited commit
575 :returns: new commited commit
576 """
576 """
577
577
578 user = self._get_user(user)
578 user = self._get_user(user)
579 scm_instance = repo.scm_instance(cache=False)
579 scm_instance = repo.scm_instance(cache=False)
580
580
581 processed_nodes = []
581 processed_nodes = []
582 for f_path in nodes:
582 for f_path in nodes:
583 f_path = self._sanitize_path(f_path)
583 f_path = self._sanitize_path(f_path)
584 content = nodes[f_path]['content']
584 content = nodes[f_path]['content']
585 f_path = safe_str(f_path)
585 f_path = safe_str(f_path)
586 # decoding here will force that we have proper encoded values
586 # decoding here will force that we have proper encoded values
587 # in any other case this will throw exceptions and deny commit
587 # in any other case this will throw exceptions and deny commit
588 if isinstance(content, (basestring,)):
588 if isinstance(content, (basestring,)):
589 content = safe_str(content)
589 content = safe_str(content)
590 elif isinstance(content, (file, cStringIO.OutputType,)):
590 elif isinstance(content, (file, cStringIO.OutputType,)):
591 content = content.read()
591 content = content.read()
592 else:
592 else:
593 raise Exception('Content is of unrecognized type %s' % (
593 raise Exception('Content is of unrecognized type %s' % (
594 type(content)
594 type(content)
595 ))
595 ))
596 processed_nodes.append((f_path, content))
596 processed_nodes.append((f_path, content))
597
597
598 message = safe_unicode(message)
598 message = safe_unicode(message)
599 commiter = user.full_contact
599 commiter = user.full_contact
600 author = safe_unicode(author) if author else commiter
600 author = safe_unicode(author) if author else commiter
601
601
602 imc = scm_instance.in_memory_commit
602 imc = scm_instance.in_memory_commit
603
603
604 if not parent_commit:
604 if not parent_commit:
605 parent_commit = EmptyCommit(alias=scm_instance.alias)
605 parent_commit = EmptyCommit(alias=scm_instance.alias)
606
606
607 if isinstance(parent_commit, EmptyCommit):
607 if isinstance(parent_commit, EmptyCommit):
608 # EmptyCommit means we we're editing empty repository
608 # EmptyCommit means we we're editing empty repository
609 parents = None
609 parents = None
610 else:
610 else:
611 parents = [parent_commit]
611 parents = [parent_commit]
612 # add multiple nodes
612 # add multiple nodes
613 for path, content in processed_nodes:
613 for path, content in processed_nodes:
614 imc.add(FileNode(path, content=content))
614 imc.add(FileNode(path, content=content))
615 # TODO: handle pre push scenario
615 # TODO: handle pre push scenario
616 tip = imc.commit(message=message,
616 tip = imc.commit(message=message,
617 author=author,
617 author=author,
618 parents=parents,
618 parents=parents,
619 branch=parent_commit.branch)
619 branch=parent_commit.branch)
620
620
621 self.mark_for_invalidation(repo.repo_name)
621 self.mark_for_invalidation(repo.repo_name)
622 if trigger_push_hook:
622 if trigger_push_hook:
623 hooks_utils.trigger_post_push_hook(
623 hooks_utils.trigger_post_push_hook(
624 username=user.username, action='push_local',
624 username=user.username, action='push_local',
625 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
625 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
626 commit_ids=[tip.raw_id])
626 commit_ids=[tip.raw_id])
627 return tip
627 return tip
628
628
629 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
629 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
630 author=None, trigger_push_hook=True):
630 author=None, trigger_push_hook=True):
631 user = self._get_user(user)
631 user = self._get_user(user)
632 scm_instance = repo.scm_instance(cache=False)
632 scm_instance = repo.scm_instance(cache=False)
633
633
634 message = safe_unicode(message)
634 message = safe_unicode(message)
635 commiter = user.full_contact
635 commiter = user.full_contact
636 author = safe_unicode(author) if author else commiter
636 author = safe_unicode(author) if author else commiter
637
637
638 imc = scm_instance.in_memory_commit
638 imc = scm_instance.in_memory_commit
639
639
640 if not parent_commit:
640 if not parent_commit:
641 parent_commit = EmptyCommit(alias=scm_instance.alias)
641 parent_commit = EmptyCommit(alias=scm_instance.alias)
642
642
643 if isinstance(parent_commit, EmptyCommit):
643 if isinstance(parent_commit, EmptyCommit):
644 # EmptyCommit means we we're editing empty repository
644 # EmptyCommit means we we're editing empty repository
645 parents = None
645 parents = None
646 else:
646 else:
647 parents = [parent_commit]
647 parents = [parent_commit]
648
648
649 # add multiple nodes
649 # add multiple nodes
650 for _filename, data in nodes.items():
650 for _filename, data in nodes.items():
651 # new filename, can be renamed from the old one, also sanitaze
651 # new filename, can be renamed from the old one, also sanitaze
652 # the path for any hack around relative paths like ../../ etc.
652 # the path for any hack around relative paths like ../../ etc.
653 filename = self._sanitize_path(data['filename'])
653 filename = self._sanitize_path(data['filename'])
654 old_filename = self._sanitize_path(_filename)
654 old_filename = self._sanitize_path(_filename)
655 content = data['content']
655 content = data['content']
656
656
657 filenode = FileNode(old_filename, content=content)
657 filenode = FileNode(old_filename, content=content)
658 op = data['op']
658 op = data['op']
659 if op == 'add':
659 if op == 'add':
660 imc.add(filenode)
660 imc.add(filenode)
661 elif op == 'del':
661 elif op == 'del':
662 imc.remove(filenode)
662 imc.remove(filenode)
663 elif op == 'mod':
663 elif op == 'mod':
664 if filename != old_filename:
664 if filename != old_filename:
665 # TODO: handle renames more efficient, needs vcs lib
665 # TODO: handle renames more efficient, needs vcs lib
666 # changes
666 # changes
667 imc.remove(filenode)
667 imc.remove(filenode)
668 imc.add(FileNode(filename, content=content))
668 imc.add(FileNode(filename, content=content))
669 else:
669 else:
670 imc.change(filenode)
670 imc.change(filenode)
671
671
672 try:
672 try:
673 # TODO: handle pre push scenario
673 # TODO: handle pre push scenario
674 # commit changes
674 # commit changes
675 tip = imc.commit(message=message,
675 tip = imc.commit(message=message,
676 author=author,
676 author=author,
677 parents=parents,
677 parents=parents,
678 branch=parent_commit.branch)
678 branch=parent_commit.branch)
679 except NodeNotChangedError:
679 except NodeNotChangedError:
680 raise
680 raise
681 except Exception as e:
681 except Exception as e:
682 log.exception("Unexpected exception during call to imc.commit")
682 log.exception("Unexpected exception during call to imc.commit")
683 raise IMCCommitError(str(e))
683 raise IMCCommitError(str(e))
684 finally:
684 finally:
685 # always clear caches, if commit fails we want fresh object also
685 # always clear caches, if commit fails we want fresh object also
686 self.mark_for_invalidation(repo.repo_name)
686 self.mark_for_invalidation(repo.repo_name)
687
687
688 if trigger_push_hook:
688 if trigger_push_hook:
689 hooks_utils.trigger_post_push_hook(
689 hooks_utils.trigger_post_push_hook(
690 username=user.username, action='push_local',
690 username=user.username, action='push_local',
691 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
691 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
692 commit_ids=[tip.raw_id])
692 commit_ids=[tip.raw_id])
693
693
694 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
694 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
695 author=None, trigger_push_hook=True):
695 author=None, trigger_push_hook=True):
696 """
696 """
697 Deletes given multiple nodes into `repo`
697 Deletes given multiple nodes into `repo`
698
698
699 :param user: RhodeCode User object or user_id, the committer
699 :param user: RhodeCode User object or user_id, the committer
700 :param repo: RhodeCode Repository object
700 :param repo: RhodeCode Repository object
701 :param message: commit message
701 :param message: commit message
702 :param nodes: mapping {filename:{'content':content},...}
702 :param nodes: mapping {filename:{'content':content},...}
703 :param parent_commit: parent commit, can be empty than it's initial
703 :param parent_commit: parent commit, can be empty than it's initial
704 commit
704 commit
705 :param author: author of commit, cna be different that commiter only
705 :param author: author of commit, cna be different that commiter only
706 for git
706 for git
707 :param trigger_push_hook: trigger push hooks
707 :param trigger_push_hook: trigger push hooks
708
708
709 :returns: new commit after deletion
709 :returns: new commit after deletion
710 """
710 """
711
711
712 user = self._get_user(user)
712 user = self._get_user(user)
713 scm_instance = repo.scm_instance(cache=False)
713 scm_instance = repo.scm_instance(cache=False)
714
714
715 processed_nodes = []
715 processed_nodes = []
716 for f_path in nodes:
716 for f_path in nodes:
717 f_path = self._sanitize_path(f_path)
717 f_path = self._sanitize_path(f_path)
718 # content can be empty but for compatabilty it allows same dicts
718 # content can be empty but for compatabilty it allows same dicts
719 # structure as add_nodes
719 # structure as add_nodes
720 content = nodes[f_path].get('content')
720 content = nodes[f_path].get('content')
721 processed_nodes.append((f_path, content))
721 processed_nodes.append((f_path, content))
722
722
723 message = safe_unicode(message)
723 message = safe_unicode(message)
724 commiter = user.full_contact
724 commiter = user.full_contact
725 author = safe_unicode(author) if author else commiter
725 author = safe_unicode(author) if author else commiter
726
726
727 imc = scm_instance.in_memory_commit
727 imc = scm_instance.in_memory_commit
728
728
729 if not parent_commit:
729 if not parent_commit:
730 parent_commit = EmptyCommit(alias=scm_instance.alias)
730 parent_commit = EmptyCommit(alias=scm_instance.alias)
731
731
732 if isinstance(parent_commit, EmptyCommit):
732 if isinstance(parent_commit, EmptyCommit):
733 # EmptyCommit means we we're editing empty repository
733 # EmptyCommit means we we're editing empty repository
734 parents = None
734 parents = None
735 else:
735 else:
736 parents = [parent_commit]
736 parents = [parent_commit]
737 # add multiple nodes
737 # add multiple nodes
738 for path, content in processed_nodes:
738 for path, content in processed_nodes:
739 imc.remove(FileNode(path, content=content))
739 imc.remove(FileNode(path, content=content))
740
740
741 # TODO: handle pre push scenario
741 # TODO: handle pre push scenario
742 tip = imc.commit(message=message,
742 tip = imc.commit(message=message,
743 author=author,
743 author=author,
744 parents=parents,
744 parents=parents,
745 branch=parent_commit.branch)
745 branch=parent_commit.branch)
746
746
747 self.mark_for_invalidation(repo.repo_name)
747 self.mark_for_invalidation(repo.repo_name)
748 if trigger_push_hook:
748 if trigger_push_hook:
749 hooks_utils.trigger_post_push_hook(
749 hooks_utils.trigger_post_push_hook(
750 username=user.username, action='push_local',
750 username=user.username, action='push_local',
751 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
751 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
752 commit_ids=[tip.raw_id])
752 commit_ids=[tip.raw_id])
753 return tip
753 return tip
754
754
755 def strip(self, repo, commit_id, branch):
755 def strip(self, repo, commit_id, branch):
756 scm_instance = repo.scm_instance(cache=False)
756 scm_instance = repo.scm_instance(cache=False)
757 scm_instance.config.clear_section('hooks')
757 scm_instance.config.clear_section('hooks')
758 scm_instance.strip(commit_id, branch)
758 scm_instance.strip(commit_id, branch)
759 self.mark_for_invalidation(repo.repo_name)
759 self.mark_for_invalidation(repo.repo_name)
760
760
761 def get_unread_journal(self):
761 def get_unread_journal(self):
762 return self.sa.query(UserLog).count()
762 return self.sa.query(UserLog).count()
763
763
764 def get_repo_landing_revs(self, translator, repo=None):
764 def get_repo_landing_revs(self, translator, repo=None):
765 """
765 """
766 Generates select option with tags branches and bookmarks (for hg only)
766 Generates select option with tags branches and bookmarks (for hg only)
767 grouped by type
767 grouped by type
768
768
769 :param repo:
769 :param repo:
770 """
770 """
771 _ = translator
771 _ = translator
772 repo = self._get_repo(repo)
772 repo = self._get_repo(repo)
773
773
774 hist_l = [
774 hist_l = [
775 ['rev:tip', _('latest tip')]
775 ['rev:tip', _('latest tip')]
776 ]
776 ]
777 choices = [
777 choices = [
778 'rev:tip'
778 'rev:tip'
779 ]
779 ]
780
780
781 if not repo:
781 if not repo:
782 return choices, hist_l
782 return choices, hist_l
783
783
784 repo = repo.scm_instance()
784 repo = repo.scm_instance()
785
785
786 branches_group = (
786 branches_group = (
787 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
787 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
788 for b in repo.branches],
788 for b in repo.branches],
789 _("Branches"))
789 _("Branches"))
790 hist_l.append(branches_group)
790 hist_l.append(branches_group)
791 choices.extend([x[0] for x in branches_group[0]])
791 choices.extend([x[0] for x in branches_group[0]])
792
792
793 if repo.alias == 'hg':
793 if repo.alias == 'hg':
794 bookmarks_group = (
794 bookmarks_group = (
795 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
795 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
796 for b in repo.bookmarks],
796 for b in repo.bookmarks],
797 _("Bookmarks"))
797 _("Bookmarks"))
798 hist_l.append(bookmarks_group)
798 hist_l.append(bookmarks_group)
799 choices.extend([x[0] for x in bookmarks_group[0]])
799 choices.extend([x[0] for x in bookmarks_group[0]])
800
800
801 tags_group = (
801 tags_group = (
802 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
802 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
803 for t in repo.tags],
803 for t in repo.tags],
804 _("Tags"))
804 _("Tags"))
805 hist_l.append(tags_group)
805 hist_l.append(tags_group)
806 choices.extend([x[0] for x in tags_group[0]])
806 choices.extend([x[0] for x in tags_group[0]])
807
807
808 return choices, hist_l
808 return choices, hist_l
809
809
810 def install_git_hook(self, repo, force_create=False):
810 def install_git_hook(self, repo, force_create=False):
811 """
811 """
812 Creates a rhodecode hook inside a git repository
812 Creates a rhodecode hook inside a git repository
813
813
814 :param repo: Instance of VCS repo
814 :param repo: Instance of VCS repo
815 :param force_create: Create even if same name hook exists
815 :param force_create: Create even if same name hook exists
816 """
816 """
817
817
818 loc = os.path.join(repo.path, 'hooks')
818 loc = os.path.join(repo.path, 'hooks')
819 if not repo.bare:
819 if not repo.bare:
820 loc = os.path.join(repo.path, '.git', 'hooks')
820 loc = os.path.join(repo.path, '.git', 'hooks')
821 if not os.path.isdir(loc):
821 if not os.path.isdir(loc):
822 os.makedirs(loc, mode=0777)
822 os.makedirs(loc, mode=0777)
823
823
824 tmpl_post = pkg_resources.resource_string(
824 tmpl_post = pkg_resources.resource_string(
825 'rhodecode', '/'.join(
825 'rhodecode', '/'.join(
826 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
826 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
827 tmpl_pre = pkg_resources.resource_string(
827 tmpl_pre = pkg_resources.resource_string(
828 'rhodecode', '/'.join(
828 'rhodecode', '/'.join(
829 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
829 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
830
830
831 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
831 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
832 _hook_file = os.path.join(loc, '%s-receive' % h_type)
832 _hook_file = os.path.join(loc, '%s-receive' % h_type)
833 log.debug('Installing git hook in repo %s', repo)
833 log.debug('Installing git hook in repo %s', repo)
834 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
834 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
835
835
836 if _rhodecode_hook or force_create:
836 if _rhodecode_hook or force_create:
837 log.debug('writing %s hook file !', h_type)
837 log.debug('writing %s hook file !', h_type)
838 try:
838 try:
839 with open(_hook_file, 'wb') as f:
839 with open(_hook_file, 'wb') as f:
840 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
840 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
841 tmpl = tmpl.replace('_ENV_', sys.executable)
841 tmpl = tmpl.replace('_ENV_', sys.executable)
842 f.write(tmpl)
842 f.write(tmpl)
843 os.chmod(_hook_file, 0755)
843 os.chmod(_hook_file, 0755)
844 except IOError:
844 except IOError:
845 log.exception('error writing hook file %s', _hook_file)
845 log.exception('error writing hook file %s', _hook_file)
846 else:
846 else:
847 log.debug('skipping writing hook file')
847 log.debug('skipping writing hook file')
848
848
849 def install_svn_hooks(self, repo, force_create=False):
849 def install_svn_hooks(self, repo, force_create=False):
850 """
850 """
851 Creates rhodecode hooks inside a svn repository
851 Creates rhodecode hooks inside a svn repository
852
852
853 :param repo: Instance of VCS repo
853 :param repo: Instance of VCS repo
854 :param force_create: Create even if same name hook exists
854 :param force_create: Create even if same name hook exists
855 """
855 """
856 hooks_path = os.path.join(repo.path, 'hooks')
856 hooks_path = os.path.join(repo.path, 'hooks')
857 if not os.path.isdir(hooks_path):
857 if not os.path.isdir(hooks_path):
858 os.makedirs(hooks_path)
858 os.makedirs(hooks_path)
859 post_commit_tmpl = pkg_resources.resource_string(
859 post_commit_tmpl = pkg_resources.resource_string(
860 'rhodecode', '/'.join(
860 'rhodecode', '/'.join(
861 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
861 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
862 pre_commit_template = pkg_resources.resource_string(
862 pre_commit_template = pkg_resources.resource_string(
863 'rhodecode', '/'.join(
863 'rhodecode', '/'.join(
864 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
864 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
865 templates = {
865 templates = {
866 'post-commit': post_commit_tmpl,
866 'post-commit': post_commit_tmpl,
867 'pre-commit': pre_commit_template
867 'pre-commit': pre_commit_template
868 }
868 }
869 for filename in templates:
869 for filename in templates:
870 _hook_file = os.path.join(hooks_path, filename)
870 _hook_file = os.path.join(hooks_path, filename)
871 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
871 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
872 if _rhodecode_hook or force_create:
872 if _rhodecode_hook or force_create:
873 log.debug('writing %s hook file !', filename)
873 log.debug('writing %s hook file !', filename)
874 template = templates[filename]
874 template = templates[filename]
875 try:
875 try:
876 with open(_hook_file, 'wb') as f:
876 with open(_hook_file, 'wb') as f:
877 template = template.replace(
877 template = template.replace(
878 '_TMPL_', rhodecode.__version__)
878 '_TMPL_', rhodecode.__version__)
879 template = template.replace('_ENV_', sys.executable)
879 template = template.replace('_ENV_', sys.executable)
880 f.write(template)
880 f.write(template)
881 os.chmod(_hook_file, 0755)
881 os.chmod(_hook_file, 0755)
882 except IOError:
882 except IOError:
883 log.exception('error writing hook file %s', filename)
883 log.exception('error writing hook file %s', filename)
884 else:
884 else:
885 log.debug('skipping writing hook file')
885 log.debug('skipping writing hook file')
886
886
887 def install_hooks(self, repo, repo_type):
887 def install_hooks(self, repo, repo_type):
888 if repo_type == 'git':
888 if repo_type == 'git':
889 self.install_git_hook(repo)
889 self.install_git_hook(repo)
890 elif repo_type == 'svn':
890 elif repo_type == 'svn':
891 self.install_svn_hooks(repo)
891 self.install_svn_hooks(repo)
892
892
893 def get_server_info(self, environ=None):
893 def get_server_info(self, environ=None):
894 server_info = get_system_info(environ)
894 server_info = get_system_info(environ)
895 return server_info
895 return server_info
896
896
897
897
898 def _check_rhodecode_hook(hook_path):
898 def _check_rhodecode_hook(hook_path):
899 """
899 """
900 Check if the hook was created by RhodeCode
900 Check if the hook was created by RhodeCode
901 """
901 """
902 if not os.path.exists(hook_path):
902 if not os.path.exists(hook_path):
903 return True
903 return True
904
904
905 log.debug('hook exists, checking if it is from rhodecode')
905 log.debug('hook exists, checking if it is from rhodecode')
906 hook_content = _read_hook(hook_path)
906 hook_content = _read_hook(hook_path)
907 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
907 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
908 if matches:
908 if matches:
909 try:
909 try:
910 version = matches.groups()[0]
910 version = matches.groups()[0]
911 log.debug('got %s, it is rhodecode', version)
911 log.debug('got %s, it is rhodecode', version)
912 return True
912 return True
913 except Exception:
913 except Exception:
914 log.exception("Exception while reading the hook version.")
914 log.exception("Exception while reading the hook version.")
915
915
916 return False
916 return False
917
917
918
918
919 def _read_hook(hook_path):
919 def _read_hook(hook_path):
920 with open(hook_path, 'rb') as f:
920 with open(hook_path, 'rb') as f:
921 content = f.read()
921 content = f.read()
922 return content
922 return content
General Comments 0
You need to be logged in to leave comments. Login now