##// END OF EJS Templates
security: escape the returned paths of files and directories....
ergo -
r1827:9e60361c default
parent child Browse files
Show More
@@ -1,908 +1,908 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import traceback
28 import traceback
29 import logging
29 import logging
30 import cStringIO
30 import cStringIO
31 import pkg_resources
31 import pkg_resources
32
32
33 from pylons.i18n.translation import _
33 from pylons.i18n.translation import _
34 from sqlalchemy import func
34 from sqlalchemy import func
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 import rhodecode
37 import rhodecode
38 from rhodecode.lib.vcs import get_backend
38 from rhodecode.lib.vcs import get_backend
39 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
40 from rhodecode.lib.vcs.nodes import FileNode
40 from rhodecode.lib.vcs.nodes import FileNode
41 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 from rhodecode.lib.vcs.backends.base import EmptyCommit
42 from rhodecode.lib import helpers as h
42 from rhodecode.lib import helpers as h
43
43
44 from rhodecode.lib.auth import (
44 from rhodecode.lib.auth import (
45 HasRepoPermissionAny, HasRepoGroupPermissionAny,
45 HasRepoPermissionAny, HasRepoGroupPermissionAny,
46 HasUserGroupPermissionAny)
46 HasUserGroupPermissionAny)
47 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
47 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
48 from rhodecode.lib import hooks_utils, caches
48 from rhodecode.lib import hooks_utils, caches
49 from rhodecode.lib.utils import (
49 from rhodecode.lib.utils import (
50 get_filesystem_repos, make_db_config)
50 get_filesystem_repos, make_db_config)
51 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
51 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
52 from rhodecode.lib.system_info import get_system_info
52 from rhodecode.lib.system_info import get_system_info
53 from rhodecode.model import BaseModel
53 from rhodecode.model import BaseModel
54 from rhodecode.model.db import (
54 from rhodecode.model.db import (
55 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
55 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
56 PullRequest)
56 PullRequest)
57 from rhodecode.model.settings import VcsSettingsModel
57 from rhodecode.model.settings import VcsSettingsModel
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 class UserTemp(object):
62 class UserTemp(object):
63 def __init__(self, user_id):
63 def __init__(self, user_id):
64 self.user_id = user_id
64 self.user_id = user_id
65
65
66 def __repr__(self):
66 def __repr__(self):
67 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
67 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
68
68
69
69
70 class RepoTemp(object):
70 class RepoTemp(object):
71 def __init__(self, repo_id):
71 def __init__(self, repo_id):
72 self.repo_id = repo_id
72 self.repo_id = repo_id
73
73
74 def __repr__(self):
74 def __repr__(self):
75 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
75 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
76
76
77
77
78 class SimpleCachedRepoList(object):
78 class SimpleCachedRepoList(object):
79 """
79 """
80 Lighter version of of iteration of repos without the scm initialisation,
80 Lighter version of of iteration of repos without the scm initialisation,
81 and with cache usage
81 and with cache usage
82 """
82 """
83 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
83 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
84 self.db_repo_list = db_repo_list
84 self.db_repo_list = db_repo_list
85 self.repos_path = repos_path
85 self.repos_path = repos_path
86 self.order_by = order_by
86 self.order_by = order_by
87 self.reversed = (order_by or '').startswith('-')
87 self.reversed = (order_by or '').startswith('-')
88 if not perm_set:
88 if not perm_set:
89 perm_set = ['repository.read', 'repository.write',
89 perm_set = ['repository.read', 'repository.write',
90 'repository.admin']
90 'repository.admin']
91 self.perm_set = perm_set
91 self.perm_set = perm_set
92
92
93 def __len__(self):
93 def __len__(self):
94 return len(self.db_repo_list)
94 return len(self.db_repo_list)
95
95
96 def __repr__(self):
96 def __repr__(self):
97 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
97 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
98
98
99 def __iter__(self):
99 def __iter__(self):
100 for dbr in self.db_repo_list:
100 for dbr in self.db_repo_list:
101 # check permission at this level
101 # check permission at this level
102 has_perm = HasRepoPermissionAny(*self.perm_set)(
102 has_perm = HasRepoPermissionAny(*self.perm_set)(
103 dbr.repo_name, 'SimpleCachedRepoList check')
103 dbr.repo_name, 'SimpleCachedRepoList check')
104 if not has_perm:
104 if not has_perm:
105 continue
105 continue
106
106
107 tmp_d = {
107 tmp_d = {
108 'name': dbr.repo_name,
108 'name': dbr.repo_name,
109 'dbrepo': dbr.get_dict(),
109 'dbrepo': dbr.get_dict(),
110 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
110 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
111 }
111 }
112 yield tmp_d
112 yield tmp_d
113
113
114
114
115 class _PermCheckIterator(object):
115 class _PermCheckIterator(object):
116
116
117 def __init__(
117 def __init__(
118 self, obj_list, obj_attr, perm_set, perm_checker,
118 self, obj_list, obj_attr, perm_set, perm_checker,
119 extra_kwargs=None):
119 extra_kwargs=None):
120 """
120 """
121 Creates iterator from given list of objects, additionally
121 Creates iterator from given list of objects, additionally
122 checking permission for them from perm_set var
122 checking permission for them from perm_set var
123
123
124 :param obj_list: list of db objects
124 :param obj_list: list of db objects
125 :param obj_attr: attribute of object to pass into perm_checker
125 :param obj_attr: attribute of object to pass into perm_checker
126 :param perm_set: list of permissions to check
126 :param perm_set: list of permissions to check
127 :param perm_checker: callable to check permissions against
127 :param perm_checker: callable to check permissions against
128 """
128 """
129 self.obj_list = obj_list
129 self.obj_list = obj_list
130 self.obj_attr = obj_attr
130 self.obj_attr = obj_attr
131 self.perm_set = perm_set
131 self.perm_set = perm_set
132 self.perm_checker = perm_checker
132 self.perm_checker = perm_checker
133 self.extra_kwargs = extra_kwargs or {}
133 self.extra_kwargs = extra_kwargs or {}
134
134
135 def __len__(self):
135 def __len__(self):
136 return len(self.obj_list)
136 return len(self.obj_list)
137
137
138 def __repr__(self):
138 def __repr__(self):
139 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
139 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
140
140
141 def __iter__(self):
141 def __iter__(self):
142 checker = self.perm_checker(*self.perm_set)
142 checker = self.perm_checker(*self.perm_set)
143 for db_obj in self.obj_list:
143 for db_obj in self.obj_list:
144 # check permission at this level
144 # check permission at this level
145 name = getattr(db_obj, self.obj_attr, None)
145 name = getattr(db_obj, self.obj_attr, None)
146 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
146 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
147 continue
147 continue
148
148
149 yield db_obj
149 yield db_obj
150
150
151
151
152 class RepoList(_PermCheckIterator):
152 class RepoList(_PermCheckIterator):
153
153
154 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
154 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
155 if not perm_set:
155 if not perm_set:
156 perm_set = [
156 perm_set = [
157 'repository.read', 'repository.write', 'repository.admin']
157 'repository.read', 'repository.write', 'repository.admin']
158
158
159 super(RepoList, self).__init__(
159 super(RepoList, self).__init__(
160 obj_list=db_repo_list,
160 obj_list=db_repo_list,
161 obj_attr='repo_name', perm_set=perm_set,
161 obj_attr='repo_name', perm_set=perm_set,
162 perm_checker=HasRepoPermissionAny,
162 perm_checker=HasRepoPermissionAny,
163 extra_kwargs=extra_kwargs)
163 extra_kwargs=extra_kwargs)
164
164
165
165
166 class RepoGroupList(_PermCheckIterator):
166 class RepoGroupList(_PermCheckIterator):
167
167
168 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
168 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
169 if not perm_set:
169 if not perm_set:
170 perm_set = ['group.read', 'group.write', 'group.admin']
170 perm_set = ['group.read', 'group.write', 'group.admin']
171
171
172 super(RepoGroupList, self).__init__(
172 super(RepoGroupList, self).__init__(
173 obj_list=db_repo_group_list,
173 obj_list=db_repo_group_list,
174 obj_attr='group_name', perm_set=perm_set,
174 obj_attr='group_name', perm_set=perm_set,
175 perm_checker=HasRepoGroupPermissionAny,
175 perm_checker=HasRepoGroupPermissionAny,
176 extra_kwargs=extra_kwargs)
176 extra_kwargs=extra_kwargs)
177
177
178
178
179 class UserGroupList(_PermCheckIterator):
179 class UserGroupList(_PermCheckIterator):
180
180
181 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
181 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
182 if not perm_set:
182 if not perm_set:
183 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
183 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
184
184
185 super(UserGroupList, self).__init__(
185 super(UserGroupList, self).__init__(
186 obj_list=db_user_group_list,
186 obj_list=db_user_group_list,
187 obj_attr='users_group_name', perm_set=perm_set,
187 obj_attr='users_group_name', perm_set=perm_set,
188 perm_checker=HasUserGroupPermissionAny,
188 perm_checker=HasUserGroupPermissionAny,
189 extra_kwargs=extra_kwargs)
189 extra_kwargs=extra_kwargs)
190
190
191
191
192 class ScmModel(BaseModel):
192 class ScmModel(BaseModel):
193 """
193 """
194 Generic Scm Model
194 Generic Scm Model
195 """
195 """
196
196
197 @LazyProperty
197 @LazyProperty
198 def repos_path(self):
198 def repos_path(self):
199 """
199 """
200 Gets the repositories root path from database
200 Gets the repositories root path from database
201 """
201 """
202
202
203 settings_model = VcsSettingsModel(sa=self.sa)
203 settings_model = VcsSettingsModel(sa=self.sa)
204 return settings_model.get_repos_location()
204 return settings_model.get_repos_location()
205
205
206 def repo_scan(self, repos_path=None):
206 def repo_scan(self, repos_path=None):
207 """
207 """
208 Listing of repositories in given path. This path should not be a
208 Listing of repositories in given path. This path should not be a
209 repository itself. Return a dictionary of repository objects
209 repository itself. Return a dictionary of repository objects
210
210
211 :param repos_path: path to directory containing repositories
211 :param repos_path: path to directory containing repositories
212 """
212 """
213
213
214 if repos_path is None:
214 if repos_path is None:
215 repos_path = self.repos_path
215 repos_path = self.repos_path
216
216
217 log.info('scanning for repositories in %s', repos_path)
217 log.info('scanning for repositories in %s', repos_path)
218
218
219 config = make_db_config()
219 config = make_db_config()
220 config.set('extensions', 'largefiles', '')
220 config.set('extensions', 'largefiles', '')
221 repos = {}
221 repos = {}
222
222
223 for name, path in get_filesystem_repos(repos_path, recursive=True):
223 for name, path in get_filesystem_repos(repos_path, recursive=True):
224 # name need to be decomposed and put back together using the /
224 # name need to be decomposed and put back together using the /
225 # since this is internal storage separator for rhodecode
225 # since this is internal storage separator for rhodecode
226 name = Repository.normalize_repo_name(name)
226 name = Repository.normalize_repo_name(name)
227
227
228 try:
228 try:
229 if name in repos:
229 if name in repos:
230 raise RepositoryError('Duplicate repository name %s '
230 raise RepositoryError('Duplicate repository name %s '
231 'found in %s' % (name, path))
231 'found in %s' % (name, path))
232 elif path[0] in rhodecode.BACKENDS:
232 elif path[0] in rhodecode.BACKENDS:
233 klass = get_backend(path[0])
233 klass = get_backend(path[0])
234 repos[name] = klass(path[1], config=config)
234 repos[name] = klass(path[1], config=config)
235 except OSError:
235 except OSError:
236 continue
236 continue
237 log.debug('found %s paths with repositories', len(repos))
237 log.debug('found %s paths with repositories', len(repos))
238 return repos
238 return repos
239
239
240 def get_repos(self, all_repos=None, sort_key=None):
240 def get_repos(self, all_repos=None, sort_key=None):
241 """
241 """
242 Get all repositories from db and for each repo create it's
242 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
243 backend instance and fill that backed with information from database
244
244
245 :param all_repos: list of repository names as strings
245 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
246 give specific repositories list, good for filtering
247
247
248 :param sort_key: initial sorting of repositories
248 :param sort_key: initial sorting of repositories
249 """
249 """
250 if all_repos is None:
250 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
251 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == None)\
252 .filter(Repository.group_id == None)\
253 .order_by(func.lower(Repository.repo_name)).all()
253 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
254 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
256 return repo_iter
257
257
258 def get_repo_groups(self, all_groups=None):
258 def get_repo_groups(self, all_groups=None):
259 if all_groups is None:
259 if all_groups is None:
260 all_groups = RepoGroup.query()\
260 all_groups = RepoGroup.query()\
261 .filter(RepoGroup.group_parent_id == None).all()
261 .filter(RepoGroup.group_parent_id == None).all()
262 return [x for x in RepoGroupList(all_groups)]
262 return [x for x in RepoGroupList(all_groups)]
263
263
264 def mark_for_invalidation(self, repo_name, delete=False):
264 def mark_for_invalidation(self, repo_name, delete=False):
265 """
265 """
266 Mark caches of this repo invalid in the database. `delete` flag
266 Mark caches of this repo invalid in the database. `delete` flag
267 removes the cache entries
267 removes the cache entries
268
268
269 :param repo_name: the repo_name for which caches should be marked
269 :param repo_name: the repo_name for which caches should be marked
270 invalid, or deleted
270 invalid, or deleted
271 :param delete: delete the entry keys instead of setting bool
271 :param delete: delete the entry keys instead of setting bool
272 flag on them
272 flag on them
273 """
273 """
274 CacheKey.set_invalidate(repo_name, delete=delete)
274 CacheKey.set_invalidate(repo_name, delete=delete)
275 repo = Repository.get_by_repo_name(repo_name)
275 repo = Repository.get_by_repo_name(repo_name)
276
276
277 if repo:
277 if repo:
278 config = repo._config
278 config = repo._config
279 config.set('extensions', 'largefiles', '')
279 config.set('extensions', 'largefiles', '')
280 repo.update_commit_cache(config=config, cs_cache=None)
280 repo.update_commit_cache(config=config, cs_cache=None)
281 caches.clear_repo_caches(repo_name)
281 caches.clear_repo_caches(repo_name)
282
282
283 def toggle_following_repo(self, follow_repo_id, user_id):
283 def toggle_following_repo(self, follow_repo_id, user_id):
284
284
285 f = self.sa.query(UserFollowing)\
285 f = self.sa.query(UserFollowing)\
286 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
286 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
287 .filter(UserFollowing.user_id == user_id).scalar()
287 .filter(UserFollowing.user_id == user_id).scalar()
288
288
289 if f is not None:
289 if f is not None:
290 try:
290 try:
291 self.sa.delete(f)
291 self.sa.delete(f)
292 return
292 return
293 except Exception:
293 except Exception:
294 log.error(traceback.format_exc())
294 log.error(traceback.format_exc())
295 raise
295 raise
296
296
297 try:
297 try:
298 f = UserFollowing()
298 f = UserFollowing()
299 f.user_id = user_id
299 f.user_id = user_id
300 f.follows_repo_id = follow_repo_id
300 f.follows_repo_id = follow_repo_id
301 self.sa.add(f)
301 self.sa.add(f)
302 except Exception:
302 except Exception:
303 log.error(traceback.format_exc())
303 log.error(traceback.format_exc())
304 raise
304 raise
305
305
306 def toggle_following_user(self, follow_user_id, user_id):
306 def toggle_following_user(self, follow_user_id, user_id):
307 f = self.sa.query(UserFollowing)\
307 f = self.sa.query(UserFollowing)\
308 .filter(UserFollowing.follows_user_id == follow_user_id)\
308 .filter(UserFollowing.follows_user_id == follow_user_id)\
309 .filter(UserFollowing.user_id == user_id).scalar()
309 .filter(UserFollowing.user_id == user_id).scalar()
310
310
311 if f is not None:
311 if f is not None:
312 try:
312 try:
313 self.sa.delete(f)
313 self.sa.delete(f)
314 return
314 return
315 except Exception:
315 except Exception:
316 log.error(traceback.format_exc())
316 log.error(traceback.format_exc())
317 raise
317 raise
318
318
319 try:
319 try:
320 f = UserFollowing()
320 f = UserFollowing()
321 f.user_id = user_id
321 f.user_id = user_id
322 f.follows_user_id = follow_user_id
322 f.follows_user_id = follow_user_id
323 self.sa.add(f)
323 self.sa.add(f)
324 except Exception:
324 except Exception:
325 log.error(traceback.format_exc())
325 log.error(traceback.format_exc())
326 raise
326 raise
327
327
328 def is_following_repo(self, repo_name, user_id, cache=False):
328 def is_following_repo(self, repo_name, user_id, cache=False):
329 r = self.sa.query(Repository)\
329 r = self.sa.query(Repository)\
330 .filter(Repository.repo_name == repo_name).scalar()
330 .filter(Repository.repo_name == repo_name).scalar()
331
331
332 f = self.sa.query(UserFollowing)\
332 f = self.sa.query(UserFollowing)\
333 .filter(UserFollowing.follows_repository == r)\
333 .filter(UserFollowing.follows_repository == r)\
334 .filter(UserFollowing.user_id == user_id).scalar()
334 .filter(UserFollowing.user_id == user_id).scalar()
335
335
336 return f is not None
336 return f is not None
337
337
338 def is_following_user(self, username, user_id, cache=False):
338 def is_following_user(self, username, user_id, cache=False):
339 u = User.get_by_username(username)
339 u = User.get_by_username(username)
340
340
341 f = self.sa.query(UserFollowing)\
341 f = self.sa.query(UserFollowing)\
342 .filter(UserFollowing.follows_user == u)\
342 .filter(UserFollowing.follows_user == u)\
343 .filter(UserFollowing.user_id == user_id).scalar()
343 .filter(UserFollowing.user_id == user_id).scalar()
344
344
345 return f is not None
345 return f is not None
346
346
347 def get_followers(self, repo):
347 def get_followers(self, repo):
348 repo = self._get_repo(repo)
348 repo = self._get_repo(repo)
349
349
350 return self.sa.query(UserFollowing)\
350 return self.sa.query(UserFollowing)\
351 .filter(UserFollowing.follows_repository == repo).count()
351 .filter(UserFollowing.follows_repository == repo).count()
352
352
353 def get_forks(self, repo):
353 def get_forks(self, repo):
354 repo = self._get_repo(repo)
354 repo = self._get_repo(repo)
355 return self.sa.query(Repository)\
355 return self.sa.query(Repository)\
356 .filter(Repository.fork == repo).count()
356 .filter(Repository.fork == repo).count()
357
357
358 def get_pull_requests(self, repo):
358 def get_pull_requests(self, repo):
359 repo = self._get_repo(repo)
359 repo = self._get_repo(repo)
360 return self.sa.query(PullRequest)\
360 return self.sa.query(PullRequest)\
361 .filter(PullRequest.target_repo == repo)\
361 .filter(PullRequest.target_repo == repo)\
362 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
362 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
363
363
364 def mark_as_fork(self, repo, fork, user):
364 def mark_as_fork(self, repo, fork, user):
365 repo = self._get_repo(repo)
365 repo = self._get_repo(repo)
366 fork = self._get_repo(fork)
366 fork = self._get_repo(fork)
367 if fork and repo.repo_id == fork.repo_id:
367 if fork and repo.repo_id == fork.repo_id:
368 raise Exception("Cannot set repository as fork of itself")
368 raise Exception("Cannot set repository as fork of itself")
369
369
370 if fork and repo.repo_type != fork.repo_type:
370 if fork and repo.repo_type != fork.repo_type:
371 raise RepositoryError(
371 raise RepositoryError(
372 "Cannot set repository as fork of repository with other type")
372 "Cannot set repository as fork of repository with other type")
373
373
374 repo.fork = fork
374 repo.fork = fork
375 self.sa.add(repo)
375 self.sa.add(repo)
376 return repo
376 return repo
377
377
378 def pull_changes(self, repo, username):
378 def pull_changes(self, repo, username):
379 dbrepo = self._get_repo(repo)
379 dbrepo = self._get_repo(repo)
380 clone_uri = dbrepo.clone_uri
380 clone_uri = dbrepo.clone_uri
381 if not clone_uri:
381 if not clone_uri:
382 raise Exception("This repository doesn't have a clone uri")
382 raise Exception("This repository doesn't have a clone uri")
383
383
384 repo = dbrepo.scm_instance(cache=False)
384 repo = dbrepo.scm_instance(cache=False)
385 # TODO: marcink fix this an re-enable since we need common logic
385 # TODO: marcink fix this an re-enable since we need common logic
386 # for hg/git remove hooks so we don't trigger them on fetching
386 # for hg/git remove hooks so we don't trigger them on fetching
387 # commits from remote
387 # commits from remote
388 repo.config.clear_section('hooks')
388 repo.config.clear_section('hooks')
389
389
390 repo_name = dbrepo.repo_name
390 repo_name = dbrepo.repo_name
391 try:
391 try:
392 # TODO: we need to make sure those operations call proper hooks !
392 # TODO: we need to make sure those operations call proper hooks !
393 repo.pull(clone_uri)
393 repo.pull(clone_uri)
394
394
395 self.mark_for_invalidation(repo_name)
395 self.mark_for_invalidation(repo_name)
396 except Exception:
396 except Exception:
397 log.error(traceback.format_exc())
397 log.error(traceback.format_exc())
398 raise
398 raise
399
399
400 def commit_change(self, repo, repo_name, commit, user, author, message,
400 def commit_change(self, repo, repo_name, commit, user, author, message,
401 content, f_path):
401 content, f_path):
402 """
402 """
403 Commits changes
403 Commits changes
404
404
405 :param repo: SCM instance
405 :param repo: SCM instance
406
406
407 """
407 """
408 user = self._get_user(user)
408 user = self._get_user(user)
409
409
410 # decoding here will force that we have proper encoded values
410 # decoding here will force that we have proper encoded values
411 # in any other case this will throw exceptions and deny commit
411 # in any other case this will throw exceptions and deny commit
412 content = safe_str(content)
412 content = safe_str(content)
413 path = safe_str(f_path)
413 path = safe_str(f_path)
414 # message and author needs to be unicode
414 # message and author needs to be unicode
415 # proper backend should then translate that into required type
415 # proper backend should then translate that into required type
416 message = safe_unicode(message)
416 message = safe_unicode(message)
417 author = safe_unicode(author)
417 author = safe_unicode(author)
418 imc = repo.in_memory_commit
418 imc = repo.in_memory_commit
419 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
419 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
420 try:
420 try:
421 # TODO: handle pre-push action !
421 # TODO: handle pre-push action !
422 tip = imc.commit(
422 tip = imc.commit(
423 message=message, author=author, parents=[commit],
423 message=message, author=author, parents=[commit],
424 branch=commit.branch)
424 branch=commit.branch)
425 except Exception as e:
425 except Exception as e:
426 log.error(traceback.format_exc())
426 log.error(traceback.format_exc())
427 raise IMCCommitError(str(e))
427 raise IMCCommitError(str(e))
428 finally:
428 finally:
429 # always clear caches, if commit fails we want fresh object also
429 # always clear caches, if commit fails we want fresh object also
430 self.mark_for_invalidation(repo_name)
430 self.mark_for_invalidation(repo_name)
431
431
432 # We trigger the post-push action
432 # We trigger the post-push action
433 hooks_utils.trigger_post_push_hook(
433 hooks_utils.trigger_post_push_hook(
434 username=user.username, action='push_local', repo_name=repo_name,
434 username=user.username, action='push_local', repo_name=repo_name,
435 repo_alias=repo.alias, commit_ids=[tip.raw_id])
435 repo_alias=repo.alias, commit_ids=[tip.raw_id])
436 return tip
436 return tip
437
437
438 def _sanitize_path(self, f_path):
438 def _sanitize_path(self, f_path):
439 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
439 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
440 raise NonRelativePathError('%s is not an relative path' % f_path)
440 raise NonRelativePathError('%s is not an relative path' % f_path)
441 if f_path:
441 if f_path:
442 f_path = os.path.normpath(f_path)
442 f_path = os.path.normpath(f_path)
443 return f_path
443 return f_path
444
444
445 def get_dirnode_metadata(self, commit, dir_node):
445 def get_dirnode_metadata(self, commit, dir_node):
446 if not dir_node.is_dir():
446 if not dir_node.is_dir():
447 return []
447 return []
448
448
449 data = []
449 data = []
450 for node in dir_node:
450 for node in dir_node:
451 if not node.is_file():
451 if not node.is_file():
452 # we skip file-nodes
452 # we skip file-nodes
453 continue
453 continue
454
454
455 last_commit = node.last_commit
455 last_commit = node.last_commit
456 last_commit_date = last_commit.date
456 last_commit_date = last_commit.date
457 data.append({
457 data.append({
458 'name': node.name,
458 'name': node.name,
459 'size': h.format_byte_size_binary(node.size),
459 'size': h.format_byte_size_binary(node.size),
460 'modified_at': h.format_date(last_commit_date),
460 'modified_at': h.format_date(last_commit_date),
461 'modified_ts': last_commit_date.isoformat(),
461 'modified_ts': last_commit_date.isoformat(),
462 'revision': last_commit.revision,
462 'revision': last_commit.revision,
463 'short_id': last_commit.short_id,
463 'short_id': last_commit.short_id,
464 'message': h.escape(last_commit.message),
464 'message': h.escape(last_commit.message),
465 'author': h.escape(last_commit.author),
465 'author': h.escape(last_commit.author),
466 'user_profile': h.gravatar_with_user(last_commit.author),
466 'user_profile': h.gravatar_with_user(last_commit.author),
467 })
467 })
468
468
469 return data
469 return data
470
470
471 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
471 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
472 extended_info=False, content=False, max_file_bytes=None):
472 extended_info=False, content=False, max_file_bytes=None):
473 """
473 """
474 recursive walk in root dir and return a set of all path in that dir
474 recursive walk in root dir and return a set of all path in that dir
475 based on repository walk function
475 based on repository walk function
476
476
477 :param repo_name: name of repository
477 :param repo_name: name of repository
478 :param commit_id: commit id for which to list nodes
478 :param commit_id: commit id for which to list nodes
479 :param root_path: root path to list
479 :param root_path: root path to list
480 :param flat: return as a list, if False returns a dict with description
480 :param flat: return as a list, if False returns a dict with description
481 :param max_file_bytes: will not return file contents over this limit
481 :param max_file_bytes: will not return file contents over this limit
482
482
483 """
483 """
484 _files = list()
484 _files = list()
485 _dirs = list()
485 _dirs = list()
486 try:
486 try:
487 _repo = self._get_repo(repo_name)
487 _repo = self._get_repo(repo_name)
488 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
488 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
489 root_path = root_path.lstrip('/')
489 root_path = root_path.lstrip('/')
490 for __, dirs, files in commit.walk(root_path):
490 for __, dirs, files in commit.walk(root_path):
491 for f in files:
491 for f in files:
492 _content = None
492 _content = None
493 _data = f.unicode_path
493 _data = f.unicode_path
494 over_size_limit = (max_file_bytes is not None
494 over_size_limit = (max_file_bytes is not None
495 and f.size > max_file_bytes)
495 and f.size > max_file_bytes)
496
496
497 if not flat:
497 if not flat:
498 _data = {
498 _data = {
499 "name": f.unicode_path,
499 "name": h.escape(f.unicode_path),
500 "type": "file",
500 "type": "file",
501 }
501 }
502 if extended_info:
502 if extended_info:
503 _data.update({
503 _data.update({
504 "md5": f.md5,
504 "md5": f.md5,
505 "binary": f.is_binary,
505 "binary": f.is_binary,
506 "size": f.size,
506 "size": f.size,
507 "extension": f.extension,
507 "extension": f.extension,
508 "mimetype": f.mimetype,
508 "mimetype": f.mimetype,
509 "lines": f.lines()[0]
509 "lines": f.lines()[0]
510 })
510 })
511
511
512 if content:
512 if content:
513 full_content = None
513 full_content = None
514 if not f.is_binary and not over_size_limit:
514 if not f.is_binary and not over_size_limit:
515 full_content = safe_str(f.content)
515 full_content = safe_str(f.content)
516
516
517 _data.update({
517 _data.update({
518 "content": full_content,
518 "content": full_content,
519 })
519 })
520 _files.append(_data)
520 _files.append(_data)
521 for d in dirs:
521 for d in dirs:
522 _data = d.unicode_path
522 _data = d.unicode_path
523 if not flat:
523 if not flat:
524 _data = {
524 _data = {
525 "name": d.unicode_path,
525 "name": h.escape(d.unicode_path),
526 "type": "dir",
526 "type": "dir",
527 }
527 }
528 if extended_info:
528 if extended_info:
529 _data.update({
529 _data.update({
530 "md5": None,
530 "md5": None,
531 "binary": None,
531 "binary": None,
532 "size": None,
532 "size": None,
533 "extension": None,
533 "extension": None,
534 })
534 })
535 if content:
535 if content:
536 _data.update({
536 _data.update({
537 "content": None
537 "content": None
538 })
538 })
539 _dirs.append(_data)
539 _dirs.append(_data)
540 except RepositoryError:
540 except RepositoryError:
541 log.debug("Exception in get_nodes", exc_info=True)
541 log.debug("Exception in get_nodes", exc_info=True)
542 raise
542 raise
543
543
544 return _dirs, _files
544 return _dirs, _files
545
545
546 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
546 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
547 author=None, trigger_push_hook=True):
547 author=None, trigger_push_hook=True):
548 """
548 """
549 Commits given multiple nodes into repo
549 Commits given multiple nodes into repo
550
550
551 :param user: RhodeCode User object or user_id, the commiter
551 :param user: RhodeCode User object or user_id, the commiter
552 :param repo: RhodeCode Repository object
552 :param repo: RhodeCode Repository object
553 :param message: commit message
553 :param message: commit message
554 :param nodes: mapping {filename:{'content':content},...}
554 :param nodes: mapping {filename:{'content':content},...}
555 :param parent_commit: parent commit, can be empty than it's
555 :param parent_commit: parent commit, can be empty than it's
556 initial commit
556 initial commit
557 :param author: author of commit, cna be different that commiter
557 :param author: author of commit, cna be different that commiter
558 only for git
558 only for git
559 :param trigger_push_hook: trigger push hooks
559 :param trigger_push_hook: trigger push hooks
560
560
561 :returns: new commited commit
561 :returns: new commited commit
562 """
562 """
563
563
564 user = self._get_user(user)
564 user = self._get_user(user)
565 scm_instance = repo.scm_instance(cache=False)
565 scm_instance = repo.scm_instance(cache=False)
566
566
567 processed_nodes = []
567 processed_nodes = []
568 for f_path in nodes:
568 for f_path in nodes:
569 f_path = self._sanitize_path(f_path)
569 f_path = self._sanitize_path(f_path)
570 content = nodes[f_path]['content']
570 content = nodes[f_path]['content']
571 f_path = safe_str(f_path)
571 f_path = safe_str(f_path)
572 # decoding here will force that we have proper encoded values
572 # decoding here will force that we have proper encoded values
573 # in any other case this will throw exceptions and deny commit
573 # in any other case this will throw exceptions and deny commit
574 if isinstance(content, (basestring,)):
574 if isinstance(content, (basestring,)):
575 content = safe_str(content)
575 content = safe_str(content)
576 elif isinstance(content, (file, cStringIO.OutputType,)):
576 elif isinstance(content, (file, cStringIO.OutputType,)):
577 content = content.read()
577 content = content.read()
578 else:
578 else:
579 raise Exception('Content is of unrecognized type %s' % (
579 raise Exception('Content is of unrecognized type %s' % (
580 type(content)
580 type(content)
581 ))
581 ))
582 processed_nodes.append((f_path, content))
582 processed_nodes.append((f_path, content))
583
583
584 message = safe_unicode(message)
584 message = safe_unicode(message)
585 commiter = user.full_contact
585 commiter = user.full_contact
586 author = safe_unicode(author) if author else commiter
586 author = safe_unicode(author) if author else commiter
587
587
588 imc = scm_instance.in_memory_commit
588 imc = scm_instance.in_memory_commit
589
589
590 if not parent_commit:
590 if not parent_commit:
591 parent_commit = EmptyCommit(alias=scm_instance.alias)
591 parent_commit = EmptyCommit(alias=scm_instance.alias)
592
592
593 if isinstance(parent_commit, EmptyCommit):
593 if isinstance(parent_commit, EmptyCommit):
594 # EmptyCommit means we we're editing empty repository
594 # EmptyCommit means we we're editing empty repository
595 parents = None
595 parents = None
596 else:
596 else:
597 parents = [parent_commit]
597 parents = [parent_commit]
598 # add multiple nodes
598 # add multiple nodes
599 for path, content in processed_nodes:
599 for path, content in processed_nodes:
600 imc.add(FileNode(path, content=content))
600 imc.add(FileNode(path, content=content))
601 # TODO: handle pre push scenario
601 # TODO: handle pre push scenario
602 tip = imc.commit(message=message,
602 tip = imc.commit(message=message,
603 author=author,
603 author=author,
604 parents=parents,
604 parents=parents,
605 branch=parent_commit.branch)
605 branch=parent_commit.branch)
606
606
607 self.mark_for_invalidation(repo.repo_name)
607 self.mark_for_invalidation(repo.repo_name)
608 if trigger_push_hook:
608 if trigger_push_hook:
609 hooks_utils.trigger_post_push_hook(
609 hooks_utils.trigger_post_push_hook(
610 username=user.username, action='push_local',
610 username=user.username, action='push_local',
611 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
611 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
612 commit_ids=[tip.raw_id])
612 commit_ids=[tip.raw_id])
613 return tip
613 return tip
614
614
615 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
615 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
616 author=None, trigger_push_hook=True):
616 author=None, trigger_push_hook=True):
617 user = self._get_user(user)
617 user = self._get_user(user)
618 scm_instance = repo.scm_instance(cache=False)
618 scm_instance = repo.scm_instance(cache=False)
619
619
620 message = safe_unicode(message)
620 message = safe_unicode(message)
621 commiter = user.full_contact
621 commiter = user.full_contact
622 author = safe_unicode(author) if author else commiter
622 author = safe_unicode(author) if author else commiter
623
623
624 imc = scm_instance.in_memory_commit
624 imc = scm_instance.in_memory_commit
625
625
626 if not parent_commit:
626 if not parent_commit:
627 parent_commit = EmptyCommit(alias=scm_instance.alias)
627 parent_commit = EmptyCommit(alias=scm_instance.alias)
628
628
629 if isinstance(parent_commit, EmptyCommit):
629 if isinstance(parent_commit, EmptyCommit):
630 # EmptyCommit means we we're editing empty repository
630 # EmptyCommit means we we're editing empty repository
631 parents = None
631 parents = None
632 else:
632 else:
633 parents = [parent_commit]
633 parents = [parent_commit]
634
634
635 # add multiple nodes
635 # add multiple nodes
636 for _filename, data in nodes.items():
636 for _filename, data in nodes.items():
637 # new filename, can be renamed from the old one, also sanitaze
637 # new filename, can be renamed from the old one, also sanitaze
638 # the path for any hack around relative paths like ../../ etc.
638 # the path for any hack around relative paths like ../../ etc.
639 filename = self._sanitize_path(data['filename'])
639 filename = self._sanitize_path(data['filename'])
640 old_filename = self._sanitize_path(_filename)
640 old_filename = self._sanitize_path(_filename)
641 content = data['content']
641 content = data['content']
642
642
643 filenode = FileNode(old_filename, content=content)
643 filenode = FileNode(old_filename, content=content)
644 op = data['op']
644 op = data['op']
645 if op == 'add':
645 if op == 'add':
646 imc.add(filenode)
646 imc.add(filenode)
647 elif op == 'del':
647 elif op == 'del':
648 imc.remove(filenode)
648 imc.remove(filenode)
649 elif op == 'mod':
649 elif op == 'mod':
650 if filename != old_filename:
650 if filename != old_filename:
651 # TODO: handle renames more efficient, needs vcs lib
651 # TODO: handle renames more efficient, needs vcs lib
652 # changes
652 # changes
653 imc.remove(filenode)
653 imc.remove(filenode)
654 imc.add(FileNode(filename, content=content))
654 imc.add(FileNode(filename, content=content))
655 else:
655 else:
656 imc.change(filenode)
656 imc.change(filenode)
657
657
658 try:
658 try:
659 # TODO: handle pre push scenario
659 # TODO: handle pre push scenario
660 # commit changes
660 # commit changes
661 tip = imc.commit(message=message,
661 tip = imc.commit(message=message,
662 author=author,
662 author=author,
663 parents=parents,
663 parents=parents,
664 branch=parent_commit.branch)
664 branch=parent_commit.branch)
665 except NodeNotChangedError:
665 except NodeNotChangedError:
666 raise
666 raise
667 except Exception as e:
667 except Exception as e:
668 log.exception("Unexpected exception during call to imc.commit")
668 log.exception("Unexpected exception during call to imc.commit")
669 raise IMCCommitError(str(e))
669 raise IMCCommitError(str(e))
670 finally:
670 finally:
671 # always clear caches, if commit fails we want fresh object also
671 # always clear caches, if commit fails we want fresh object also
672 self.mark_for_invalidation(repo.repo_name)
672 self.mark_for_invalidation(repo.repo_name)
673
673
674 if trigger_push_hook:
674 if trigger_push_hook:
675 hooks_utils.trigger_post_push_hook(
675 hooks_utils.trigger_post_push_hook(
676 username=user.username, action='push_local',
676 username=user.username, action='push_local',
677 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
677 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
678 commit_ids=[tip.raw_id])
678 commit_ids=[tip.raw_id])
679
679
680 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
680 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
681 author=None, trigger_push_hook=True):
681 author=None, trigger_push_hook=True):
682 """
682 """
683 Deletes given multiple nodes into `repo`
683 Deletes given multiple nodes into `repo`
684
684
685 :param user: RhodeCode User object or user_id, the committer
685 :param user: RhodeCode User object or user_id, the committer
686 :param repo: RhodeCode Repository object
686 :param repo: RhodeCode Repository object
687 :param message: commit message
687 :param message: commit message
688 :param nodes: mapping {filename:{'content':content},...}
688 :param nodes: mapping {filename:{'content':content},...}
689 :param parent_commit: parent commit, can be empty than it's initial
689 :param parent_commit: parent commit, can be empty than it's initial
690 commit
690 commit
691 :param author: author of commit, cna be different that commiter only
691 :param author: author of commit, cna be different that commiter only
692 for git
692 for git
693 :param trigger_push_hook: trigger push hooks
693 :param trigger_push_hook: trigger push hooks
694
694
695 :returns: new commit after deletion
695 :returns: new commit after deletion
696 """
696 """
697
697
698 user = self._get_user(user)
698 user = self._get_user(user)
699 scm_instance = repo.scm_instance(cache=False)
699 scm_instance = repo.scm_instance(cache=False)
700
700
701 processed_nodes = []
701 processed_nodes = []
702 for f_path in nodes:
702 for f_path in nodes:
703 f_path = self._sanitize_path(f_path)
703 f_path = self._sanitize_path(f_path)
704 # content can be empty but for compatabilty it allows same dicts
704 # content can be empty but for compatabilty it allows same dicts
705 # structure as add_nodes
705 # structure as add_nodes
706 content = nodes[f_path].get('content')
706 content = nodes[f_path].get('content')
707 processed_nodes.append((f_path, content))
707 processed_nodes.append((f_path, content))
708
708
709 message = safe_unicode(message)
709 message = safe_unicode(message)
710 commiter = user.full_contact
710 commiter = user.full_contact
711 author = safe_unicode(author) if author else commiter
711 author = safe_unicode(author) if author else commiter
712
712
713 imc = scm_instance.in_memory_commit
713 imc = scm_instance.in_memory_commit
714
714
715 if not parent_commit:
715 if not parent_commit:
716 parent_commit = EmptyCommit(alias=scm_instance.alias)
716 parent_commit = EmptyCommit(alias=scm_instance.alias)
717
717
718 if isinstance(parent_commit, EmptyCommit):
718 if isinstance(parent_commit, EmptyCommit):
719 # EmptyCommit means we we're editing empty repository
719 # EmptyCommit means we we're editing empty repository
720 parents = None
720 parents = None
721 else:
721 else:
722 parents = [parent_commit]
722 parents = [parent_commit]
723 # add multiple nodes
723 # add multiple nodes
724 for path, content in processed_nodes:
724 for path, content in processed_nodes:
725 imc.remove(FileNode(path, content=content))
725 imc.remove(FileNode(path, content=content))
726
726
727 # TODO: handle pre push scenario
727 # TODO: handle pre push scenario
728 tip = imc.commit(message=message,
728 tip = imc.commit(message=message,
729 author=author,
729 author=author,
730 parents=parents,
730 parents=parents,
731 branch=parent_commit.branch)
731 branch=parent_commit.branch)
732
732
733 self.mark_for_invalidation(repo.repo_name)
733 self.mark_for_invalidation(repo.repo_name)
734 if trigger_push_hook:
734 if trigger_push_hook:
735 hooks_utils.trigger_post_push_hook(
735 hooks_utils.trigger_post_push_hook(
736 username=user.username, action='push_local',
736 username=user.username, action='push_local',
737 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
737 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
738 commit_ids=[tip.raw_id])
738 commit_ids=[tip.raw_id])
739 return tip
739 return tip
740
740
741 def strip(self, repo, commit_id, branch):
741 def strip(self, repo, commit_id, branch):
742 scm_instance = repo.scm_instance(cache=False)
742 scm_instance = repo.scm_instance(cache=False)
743 scm_instance.config.clear_section('hooks')
743 scm_instance.config.clear_section('hooks')
744 scm_instance.strip(commit_id, branch)
744 scm_instance.strip(commit_id, branch)
745 self.mark_for_invalidation(repo.repo_name)
745 self.mark_for_invalidation(repo.repo_name)
746
746
747 def get_unread_journal(self):
747 def get_unread_journal(self):
748 return self.sa.query(UserLog).count()
748 return self.sa.query(UserLog).count()
749
749
750 def get_repo_landing_revs(self, repo=None):
750 def get_repo_landing_revs(self, repo=None):
751 """
751 """
752 Generates select option with tags branches and bookmarks (for hg only)
752 Generates select option with tags branches and bookmarks (for hg only)
753 grouped by type
753 grouped by type
754
754
755 :param repo:
755 :param repo:
756 """
756 """
757
757
758 repo = self._get_repo(repo)
758 repo = self._get_repo(repo)
759
759
760 hist_l = [
760 hist_l = [
761 ['rev:tip', _('latest tip')]
761 ['rev:tip', _('latest tip')]
762 ]
762 ]
763 choices = [
763 choices = [
764 'rev:tip'
764 'rev:tip'
765 ]
765 ]
766
766
767 if not repo:
767 if not repo:
768 return choices, hist_l
768 return choices, hist_l
769
769
770 repo = repo.scm_instance()
770 repo = repo.scm_instance()
771
771
772 branches_group = (
772 branches_group = (
773 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
773 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
774 for b in repo.branches],
774 for b in repo.branches],
775 _("Branches"))
775 _("Branches"))
776 hist_l.append(branches_group)
776 hist_l.append(branches_group)
777 choices.extend([x[0] for x in branches_group[0]])
777 choices.extend([x[0] for x in branches_group[0]])
778
778
779 if repo.alias == 'hg':
779 if repo.alias == 'hg':
780 bookmarks_group = (
780 bookmarks_group = (
781 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
781 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
782 for b in repo.bookmarks],
782 for b in repo.bookmarks],
783 _("Bookmarks"))
783 _("Bookmarks"))
784 hist_l.append(bookmarks_group)
784 hist_l.append(bookmarks_group)
785 choices.extend([x[0] for x in bookmarks_group[0]])
785 choices.extend([x[0] for x in bookmarks_group[0]])
786
786
787 tags_group = (
787 tags_group = (
788 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
788 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
789 for t in repo.tags],
789 for t in repo.tags],
790 _("Tags"))
790 _("Tags"))
791 hist_l.append(tags_group)
791 hist_l.append(tags_group)
792 choices.extend([x[0] for x in tags_group[0]])
792 choices.extend([x[0] for x in tags_group[0]])
793
793
794 return choices, hist_l
794 return choices, hist_l
795
795
796 def install_git_hook(self, repo, force_create=False):
796 def install_git_hook(self, repo, force_create=False):
797 """
797 """
798 Creates a rhodecode hook inside a git repository
798 Creates a rhodecode hook inside a git repository
799
799
800 :param repo: Instance of VCS repo
800 :param repo: Instance of VCS repo
801 :param force_create: Create even if same name hook exists
801 :param force_create: Create even if same name hook exists
802 """
802 """
803
803
804 loc = os.path.join(repo.path, 'hooks')
804 loc = os.path.join(repo.path, 'hooks')
805 if not repo.bare:
805 if not repo.bare:
806 loc = os.path.join(repo.path, '.git', 'hooks')
806 loc = os.path.join(repo.path, '.git', 'hooks')
807 if not os.path.isdir(loc):
807 if not os.path.isdir(loc):
808 os.makedirs(loc, mode=0777)
808 os.makedirs(loc, mode=0777)
809
809
810 tmpl_post = pkg_resources.resource_string(
810 tmpl_post = pkg_resources.resource_string(
811 'rhodecode', '/'.join(
811 'rhodecode', '/'.join(
812 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
812 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
813 tmpl_pre = pkg_resources.resource_string(
813 tmpl_pre = pkg_resources.resource_string(
814 'rhodecode', '/'.join(
814 'rhodecode', '/'.join(
815 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
815 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
816
816
817 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
817 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
818 _hook_file = os.path.join(loc, '%s-receive' % h_type)
818 _hook_file = os.path.join(loc, '%s-receive' % h_type)
819 log.debug('Installing git hook in repo %s', repo)
819 log.debug('Installing git hook in repo %s', repo)
820 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
820 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
821
821
822 if _rhodecode_hook or force_create:
822 if _rhodecode_hook or force_create:
823 log.debug('writing %s hook file !', h_type)
823 log.debug('writing %s hook file !', h_type)
824 try:
824 try:
825 with open(_hook_file, 'wb') as f:
825 with open(_hook_file, 'wb') as f:
826 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
826 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
827 tmpl = tmpl.replace('_ENV_', sys.executable)
827 tmpl = tmpl.replace('_ENV_', sys.executable)
828 f.write(tmpl)
828 f.write(tmpl)
829 os.chmod(_hook_file, 0755)
829 os.chmod(_hook_file, 0755)
830 except IOError:
830 except IOError:
831 log.exception('error writing hook file %s', _hook_file)
831 log.exception('error writing hook file %s', _hook_file)
832 else:
832 else:
833 log.debug('skipping writing hook file')
833 log.debug('skipping writing hook file')
834
834
835 def install_svn_hooks(self, repo, force_create=False):
835 def install_svn_hooks(self, repo, force_create=False):
836 """
836 """
837 Creates rhodecode hooks inside a svn repository
837 Creates rhodecode hooks inside a svn repository
838
838
839 :param repo: Instance of VCS repo
839 :param repo: Instance of VCS repo
840 :param force_create: Create even if same name hook exists
840 :param force_create: Create even if same name hook exists
841 """
841 """
842 hooks_path = os.path.join(repo.path, 'hooks')
842 hooks_path = os.path.join(repo.path, 'hooks')
843 if not os.path.isdir(hooks_path):
843 if not os.path.isdir(hooks_path):
844 os.makedirs(hooks_path)
844 os.makedirs(hooks_path)
845 post_commit_tmpl = pkg_resources.resource_string(
845 post_commit_tmpl = pkg_resources.resource_string(
846 'rhodecode', '/'.join(
846 'rhodecode', '/'.join(
847 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
847 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
848 pre_commit_template = pkg_resources.resource_string(
848 pre_commit_template = pkg_resources.resource_string(
849 'rhodecode', '/'.join(
849 'rhodecode', '/'.join(
850 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
850 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
851 templates = {
851 templates = {
852 'post-commit': post_commit_tmpl,
852 'post-commit': post_commit_tmpl,
853 'pre-commit': pre_commit_template
853 'pre-commit': pre_commit_template
854 }
854 }
855 for filename in templates:
855 for filename in templates:
856 _hook_file = os.path.join(hooks_path, filename)
856 _hook_file = os.path.join(hooks_path, filename)
857 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
857 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
858 if _rhodecode_hook or force_create:
858 if _rhodecode_hook or force_create:
859 log.debug('writing %s hook file !', filename)
859 log.debug('writing %s hook file !', filename)
860 template = templates[filename]
860 template = templates[filename]
861 try:
861 try:
862 with open(_hook_file, 'wb') as f:
862 with open(_hook_file, 'wb') as f:
863 template = template.replace(
863 template = template.replace(
864 '_TMPL_', rhodecode.__version__)
864 '_TMPL_', rhodecode.__version__)
865 template = template.replace('_ENV_', sys.executable)
865 template = template.replace('_ENV_', sys.executable)
866 f.write(template)
866 f.write(template)
867 os.chmod(_hook_file, 0755)
867 os.chmod(_hook_file, 0755)
868 except IOError:
868 except IOError:
869 log.exception('error writing hook file %s', filename)
869 log.exception('error writing hook file %s', filename)
870 else:
870 else:
871 log.debug('skipping writing hook file')
871 log.debug('skipping writing hook file')
872
872
873 def install_hooks(self, repo, repo_type):
873 def install_hooks(self, repo, repo_type):
874 if repo_type == 'git':
874 if repo_type == 'git':
875 self.install_git_hook(repo)
875 self.install_git_hook(repo)
876 elif repo_type == 'svn':
876 elif repo_type == 'svn':
877 self.install_svn_hooks(repo)
877 self.install_svn_hooks(repo)
878
878
879 def get_server_info(self, environ=None):
879 def get_server_info(self, environ=None):
880 server_info = get_system_info(environ)
880 server_info = get_system_info(environ)
881 return server_info
881 return server_info
882
882
883
883
884 def _check_rhodecode_hook(hook_path):
884 def _check_rhodecode_hook(hook_path):
885 """
885 """
886 Check if the hook was created by RhodeCode
886 Check if the hook was created by RhodeCode
887 """
887 """
888 if not os.path.exists(hook_path):
888 if not os.path.exists(hook_path):
889 return True
889 return True
890
890
891 log.debug('hook exists, checking if it is from rhodecode')
891 log.debug('hook exists, checking if it is from rhodecode')
892 hook_content = _read_hook(hook_path)
892 hook_content = _read_hook(hook_path)
893 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
893 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
894 if matches:
894 if matches:
895 try:
895 try:
896 version = matches.groups()[0]
896 version = matches.groups()[0]
897 log.debug('got %s, it is rhodecode', version)
897 log.debug('got %s, it is rhodecode', version)
898 return True
898 return True
899 except Exception:
899 except Exception:
900 log.exception("Exception while reading the hook version.")
900 log.exception("Exception while reading the hook version.")
901
901
902 return False
902 return False
903
903
904
904
905 def _read_hook(hook_path):
905 def _read_hook(hook_path):
906 with open(hook_path, 'rb') as f:
906 with open(hook_path, 'rb') as f:
907 content = f.read()
907 content = f.read()
908 return content
908 return content
General Comments 0
You need to be logged in to leave comments. Login now