##// END OF EJS Templates
action_logger: removed really not required actions of start/stop following a repository....
marcink -
r1804:4a92b08c default
parent child Browse files
Show More
@@ -1,915 +1,908 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2017 RhodeCode GmbH
3 # Copyright (C) 2010-2017 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Scm model for RhodeCode
22 Scm model for RhodeCode
23 """
23 """
24
24
25 import os.path
25 import os.path
26 import re
26 import re
27 import sys
27 import sys
28 import traceback
28 import traceback
29 import logging
29 import logging
30 import cStringIO
30 import cStringIO
31 import pkg_resources
31 import pkg_resources
32
32
33 from pylons.i18n.translation import _
33 from pylons.i18n.translation import _
34 from sqlalchemy import func
34 from sqlalchemy import func
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 import rhodecode
37 import rhodecode
38 from rhodecode.lib.vcs import get_backend
38 from rhodecode.lib.vcs import get_backend
39 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
39 from rhodecode.lib.vcs.exceptions import RepositoryError, NodeNotChangedError
40 from rhodecode.lib.vcs.nodes import FileNode
40 from rhodecode.lib.vcs.nodes import FileNode
41 from rhodecode.lib.vcs.backends.base import EmptyCommit
41 from rhodecode.lib.vcs.backends.base import EmptyCommit
42 from rhodecode.lib import helpers as h
42 from rhodecode.lib import helpers as h
43
43
44 from rhodecode.lib.auth import (
44 from rhodecode.lib.auth import (
45 HasRepoPermissionAny, HasRepoGroupPermissionAny,
45 HasRepoPermissionAny, HasRepoGroupPermissionAny,
46 HasUserGroupPermissionAny)
46 HasUserGroupPermissionAny)
47 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
47 from rhodecode.lib.exceptions import NonRelativePathError, IMCCommitError
48 from rhodecode.lib import hooks_utils, caches
48 from rhodecode.lib import hooks_utils, caches
49 from rhodecode.lib.utils import (
49 from rhodecode.lib.utils import (
50 get_filesystem_repos, action_logger, make_db_config)
50 get_filesystem_repos, make_db_config)
51 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
51 from rhodecode.lib.utils2 import (safe_str, safe_unicode)
52 from rhodecode.lib.system_info import get_system_info
52 from rhodecode.lib.system_info import get_system_info
53 from rhodecode.model import BaseModel
53 from rhodecode.model import BaseModel
54 from rhodecode.model.db import (
54 from rhodecode.model.db import (
55 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
55 Repository, CacheKey, UserFollowing, UserLog, User, RepoGroup,
56 PullRequest)
56 PullRequest)
57 from rhodecode.model.settings import VcsSettingsModel
57 from rhodecode.model.settings import VcsSettingsModel
58
58
59 log = logging.getLogger(__name__)
59 log = logging.getLogger(__name__)
60
60
61
61
62 class UserTemp(object):
62 class UserTemp(object):
63 def __init__(self, user_id):
63 def __init__(self, user_id):
64 self.user_id = user_id
64 self.user_id = user_id
65
65
66 def __repr__(self):
66 def __repr__(self):
67 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
67 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
68
68
69
69
70 class RepoTemp(object):
70 class RepoTemp(object):
71 def __init__(self, repo_id):
71 def __init__(self, repo_id):
72 self.repo_id = repo_id
72 self.repo_id = repo_id
73
73
74 def __repr__(self):
74 def __repr__(self):
75 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
75 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
76
76
77
77
78 class SimpleCachedRepoList(object):
78 class SimpleCachedRepoList(object):
79 """
79 """
80 Lighter version of of iteration of repos without the scm initialisation,
80 Lighter version of of iteration of repos without the scm initialisation,
81 and with cache usage
81 and with cache usage
82 """
82 """
83 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
83 def __init__(self, db_repo_list, repos_path, order_by=None, perm_set=None):
84 self.db_repo_list = db_repo_list
84 self.db_repo_list = db_repo_list
85 self.repos_path = repos_path
85 self.repos_path = repos_path
86 self.order_by = order_by
86 self.order_by = order_by
87 self.reversed = (order_by or '').startswith('-')
87 self.reversed = (order_by or '').startswith('-')
88 if not perm_set:
88 if not perm_set:
89 perm_set = ['repository.read', 'repository.write',
89 perm_set = ['repository.read', 'repository.write',
90 'repository.admin']
90 'repository.admin']
91 self.perm_set = perm_set
91 self.perm_set = perm_set
92
92
93 def __len__(self):
93 def __len__(self):
94 return len(self.db_repo_list)
94 return len(self.db_repo_list)
95
95
96 def __repr__(self):
96 def __repr__(self):
97 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
97 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
98
98
99 def __iter__(self):
99 def __iter__(self):
100 for dbr in self.db_repo_list:
100 for dbr in self.db_repo_list:
101 # check permission at this level
101 # check permission at this level
102 has_perm = HasRepoPermissionAny(*self.perm_set)(
102 has_perm = HasRepoPermissionAny(*self.perm_set)(
103 dbr.repo_name, 'SimpleCachedRepoList check')
103 dbr.repo_name, 'SimpleCachedRepoList check')
104 if not has_perm:
104 if not has_perm:
105 continue
105 continue
106
106
107 tmp_d = {
107 tmp_d = {
108 'name': dbr.repo_name,
108 'name': dbr.repo_name,
109 'dbrepo': dbr.get_dict(),
109 'dbrepo': dbr.get_dict(),
110 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
110 'dbrepo_fork': dbr.fork.get_dict() if dbr.fork else {}
111 }
111 }
112 yield tmp_d
112 yield tmp_d
113
113
114
114
115 class _PermCheckIterator(object):
115 class _PermCheckIterator(object):
116
116
117 def __init__(
117 def __init__(
118 self, obj_list, obj_attr, perm_set, perm_checker,
118 self, obj_list, obj_attr, perm_set, perm_checker,
119 extra_kwargs=None):
119 extra_kwargs=None):
120 """
120 """
121 Creates iterator from given list of objects, additionally
121 Creates iterator from given list of objects, additionally
122 checking permission for them from perm_set var
122 checking permission for them from perm_set var
123
123
124 :param obj_list: list of db objects
124 :param obj_list: list of db objects
125 :param obj_attr: attribute of object to pass into perm_checker
125 :param obj_attr: attribute of object to pass into perm_checker
126 :param perm_set: list of permissions to check
126 :param perm_set: list of permissions to check
127 :param perm_checker: callable to check permissions against
127 :param perm_checker: callable to check permissions against
128 """
128 """
129 self.obj_list = obj_list
129 self.obj_list = obj_list
130 self.obj_attr = obj_attr
130 self.obj_attr = obj_attr
131 self.perm_set = perm_set
131 self.perm_set = perm_set
132 self.perm_checker = perm_checker
132 self.perm_checker = perm_checker
133 self.extra_kwargs = extra_kwargs or {}
133 self.extra_kwargs = extra_kwargs or {}
134
134
135 def __len__(self):
135 def __len__(self):
136 return len(self.obj_list)
136 return len(self.obj_list)
137
137
138 def __repr__(self):
138 def __repr__(self):
139 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
139 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
140
140
141 def __iter__(self):
141 def __iter__(self):
142 checker = self.perm_checker(*self.perm_set)
142 checker = self.perm_checker(*self.perm_set)
143 for db_obj in self.obj_list:
143 for db_obj in self.obj_list:
144 # check permission at this level
144 # check permission at this level
145 name = getattr(db_obj, self.obj_attr, None)
145 name = getattr(db_obj, self.obj_attr, None)
146 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
146 if not checker(name, self.__class__.__name__, **self.extra_kwargs):
147 continue
147 continue
148
148
149 yield db_obj
149 yield db_obj
150
150
151
151
152 class RepoList(_PermCheckIterator):
152 class RepoList(_PermCheckIterator):
153
153
154 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
154 def __init__(self, db_repo_list, perm_set=None, extra_kwargs=None):
155 if not perm_set:
155 if not perm_set:
156 perm_set = [
156 perm_set = [
157 'repository.read', 'repository.write', 'repository.admin']
157 'repository.read', 'repository.write', 'repository.admin']
158
158
159 super(RepoList, self).__init__(
159 super(RepoList, self).__init__(
160 obj_list=db_repo_list,
160 obj_list=db_repo_list,
161 obj_attr='repo_name', perm_set=perm_set,
161 obj_attr='repo_name', perm_set=perm_set,
162 perm_checker=HasRepoPermissionAny,
162 perm_checker=HasRepoPermissionAny,
163 extra_kwargs=extra_kwargs)
163 extra_kwargs=extra_kwargs)
164
164
165
165
166 class RepoGroupList(_PermCheckIterator):
166 class RepoGroupList(_PermCheckIterator):
167
167
168 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
168 def __init__(self, db_repo_group_list, perm_set=None, extra_kwargs=None):
169 if not perm_set:
169 if not perm_set:
170 perm_set = ['group.read', 'group.write', 'group.admin']
170 perm_set = ['group.read', 'group.write', 'group.admin']
171
171
172 super(RepoGroupList, self).__init__(
172 super(RepoGroupList, self).__init__(
173 obj_list=db_repo_group_list,
173 obj_list=db_repo_group_list,
174 obj_attr='group_name', perm_set=perm_set,
174 obj_attr='group_name', perm_set=perm_set,
175 perm_checker=HasRepoGroupPermissionAny,
175 perm_checker=HasRepoGroupPermissionAny,
176 extra_kwargs=extra_kwargs)
176 extra_kwargs=extra_kwargs)
177
177
178
178
179 class UserGroupList(_PermCheckIterator):
179 class UserGroupList(_PermCheckIterator):
180
180
181 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
181 def __init__(self, db_user_group_list, perm_set=None, extra_kwargs=None):
182 if not perm_set:
182 if not perm_set:
183 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
183 perm_set = ['usergroup.read', 'usergroup.write', 'usergroup.admin']
184
184
185 super(UserGroupList, self).__init__(
185 super(UserGroupList, self).__init__(
186 obj_list=db_user_group_list,
186 obj_list=db_user_group_list,
187 obj_attr='users_group_name', perm_set=perm_set,
187 obj_attr='users_group_name', perm_set=perm_set,
188 perm_checker=HasUserGroupPermissionAny,
188 perm_checker=HasUserGroupPermissionAny,
189 extra_kwargs=extra_kwargs)
189 extra_kwargs=extra_kwargs)
190
190
191
191
192 class ScmModel(BaseModel):
192 class ScmModel(BaseModel):
193 """
193 """
194 Generic Scm Model
194 Generic Scm Model
195 """
195 """
196
196
197 @LazyProperty
197 @LazyProperty
198 def repos_path(self):
198 def repos_path(self):
199 """
199 """
200 Gets the repositories root path from database
200 Gets the repositories root path from database
201 """
201 """
202
202
203 settings_model = VcsSettingsModel(sa=self.sa)
203 settings_model = VcsSettingsModel(sa=self.sa)
204 return settings_model.get_repos_location()
204 return settings_model.get_repos_location()
205
205
206 def repo_scan(self, repos_path=None):
206 def repo_scan(self, repos_path=None):
207 """
207 """
208 Listing of repositories in given path. This path should not be a
208 Listing of repositories in given path. This path should not be a
209 repository itself. Return a dictionary of repository objects
209 repository itself. Return a dictionary of repository objects
210
210
211 :param repos_path: path to directory containing repositories
211 :param repos_path: path to directory containing repositories
212 """
212 """
213
213
214 if repos_path is None:
214 if repos_path is None:
215 repos_path = self.repos_path
215 repos_path = self.repos_path
216
216
217 log.info('scanning for repositories in %s', repos_path)
217 log.info('scanning for repositories in %s', repos_path)
218
218
219 config = make_db_config()
219 config = make_db_config()
220 config.set('extensions', 'largefiles', '')
220 config.set('extensions', 'largefiles', '')
221 repos = {}
221 repos = {}
222
222
223 for name, path in get_filesystem_repos(repos_path, recursive=True):
223 for name, path in get_filesystem_repos(repos_path, recursive=True):
224 # name need to be decomposed and put back together using the /
224 # name need to be decomposed and put back together using the /
225 # since this is internal storage separator for rhodecode
225 # since this is internal storage separator for rhodecode
226 name = Repository.normalize_repo_name(name)
226 name = Repository.normalize_repo_name(name)
227
227
228 try:
228 try:
229 if name in repos:
229 if name in repos:
230 raise RepositoryError('Duplicate repository name %s '
230 raise RepositoryError('Duplicate repository name %s '
231 'found in %s' % (name, path))
231 'found in %s' % (name, path))
232 elif path[0] in rhodecode.BACKENDS:
232 elif path[0] in rhodecode.BACKENDS:
233 klass = get_backend(path[0])
233 klass = get_backend(path[0])
234 repos[name] = klass(path[1], config=config)
234 repos[name] = klass(path[1], config=config)
235 except OSError:
235 except OSError:
236 continue
236 continue
237 log.debug('found %s paths with repositories', len(repos))
237 log.debug('found %s paths with repositories', len(repos))
238 return repos
238 return repos
239
239
240 def get_repos(self, all_repos=None, sort_key=None):
240 def get_repos(self, all_repos=None, sort_key=None):
241 """
241 """
242 Get all repositories from db and for each repo create it's
242 Get all repositories from db and for each repo create it's
243 backend instance and fill that backed with information from database
243 backend instance and fill that backed with information from database
244
244
245 :param all_repos: list of repository names as strings
245 :param all_repos: list of repository names as strings
246 give specific repositories list, good for filtering
246 give specific repositories list, good for filtering
247
247
248 :param sort_key: initial sorting of repositories
248 :param sort_key: initial sorting of repositories
249 """
249 """
250 if all_repos is None:
250 if all_repos is None:
251 all_repos = self.sa.query(Repository)\
251 all_repos = self.sa.query(Repository)\
252 .filter(Repository.group_id == None)\
252 .filter(Repository.group_id == None)\
253 .order_by(func.lower(Repository.repo_name)).all()
253 .order_by(func.lower(Repository.repo_name)).all()
254 repo_iter = SimpleCachedRepoList(
254 repo_iter = SimpleCachedRepoList(
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
255 all_repos, repos_path=self.repos_path, order_by=sort_key)
256 return repo_iter
256 return repo_iter
257
257
258 def get_repo_groups(self, all_groups=None):
258 def get_repo_groups(self, all_groups=None):
259 if all_groups is None:
259 if all_groups is None:
260 all_groups = RepoGroup.query()\
260 all_groups = RepoGroup.query()\
261 .filter(RepoGroup.group_parent_id == None).all()
261 .filter(RepoGroup.group_parent_id == None).all()
262 return [x for x in RepoGroupList(all_groups)]
262 return [x for x in RepoGroupList(all_groups)]
263
263
264 def mark_for_invalidation(self, repo_name, delete=False):
264 def mark_for_invalidation(self, repo_name, delete=False):
265 """
265 """
266 Mark caches of this repo invalid in the database. `delete` flag
266 Mark caches of this repo invalid in the database. `delete` flag
267 removes the cache entries
267 removes the cache entries
268
268
269 :param repo_name: the repo_name for which caches should be marked
269 :param repo_name: the repo_name for which caches should be marked
270 invalid, or deleted
270 invalid, or deleted
271 :param delete: delete the entry keys instead of setting bool
271 :param delete: delete the entry keys instead of setting bool
272 flag on them
272 flag on them
273 """
273 """
274 CacheKey.set_invalidate(repo_name, delete=delete)
274 CacheKey.set_invalidate(repo_name, delete=delete)
275 repo = Repository.get_by_repo_name(repo_name)
275 repo = Repository.get_by_repo_name(repo_name)
276
276
277 if repo:
277 if repo:
278 config = repo._config
278 config = repo._config
279 config.set('extensions', 'largefiles', '')
279 config.set('extensions', 'largefiles', '')
280 repo.update_commit_cache(config=config, cs_cache=None)
280 repo.update_commit_cache(config=config, cs_cache=None)
281 caches.clear_repo_caches(repo_name)
281 caches.clear_repo_caches(repo_name)
282
282
283 def toggle_following_repo(self, follow_repo_id, user_id):
283 def toggle_following_repo(self, follow_repo_id, user_id):
284
284
285 f = self.sa.query(UserFollowing)\
285 f = self.sa.query(UserFollowing)\
286 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
286 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
287 .filter(UserFollowing.user_id == user_id).scalar()
287 .filter(UserFollowing.user_id == user_id).scalar()
288
288
289 if f is not None:
289 if f is not None:
290 try:
290 try:
291 self.sa.delete(f)
291 self.sa.delete(f)
292 action_logger(UserTemp(user_id),
293 'stopped_following_repo',
294 RepoTemp(follow_repo_id))
295 return
292 return
296 except Exception:
293 except Exception:
297 log.error(traceback.format_exc())
294 log.error(traceback.format_exc())
298 raise
295 raise
299
296
300 try:
297 try:
301 f = UserFollowing()
298 f = UserFollowing()
302 f.user_id = user_id
299 f.user_id = user_id
303 f.follows_repo_id = follow_repo_id
300 f.follows_repo_id = follow_repo_id
304 self.sa.add(f)
301 self.sa.add(f)
305
306 action_logger(UserTemp(user_id),
307 'started_following_repo',
308 RepoTemp(follow_repo_id))
309 except Exception:
302 except Exception:
310 log.error(traceback.format_exc())
303 log.error(traceback.format_exc())
311 raise
304 raise
312
305
313 def toggle_following_user(self, follow_user_id, user_id):
306 def toggle_following_user(self, follow_user_id, user_id):
314 f = self.sa.query(UserFollowing)\
307 f = self.sa.query(UserFollowing)\
315 .filter(UserFollowing.follows_user_id == follow_user_id)\
308 .filter(UserFollowing.follows_user_id == follow_user_id)\
316 .filter(UserFollowing.user_id == user_id).scalar()
309 .filter(UserFollowing.user_id == user_id).scalar()
317
310
318 if f is not None:
311 if f is not None:
319 try:
312 try:
320 self.sa.delete(f)
313 self.sa.delete(f)
321 return
314 return
322 except Exception:
315 except Exception:
323 log.error(traceback.format_exc())
316 log.error(traceback.format_exc())
324 raise
317 raise
325
318
326 try:
319 try:
327 f = UserFollowing()
320 f = UserFollowing()
328 f.user_id = user_id
321 f.user_id = user_id
329 f.follows_user_id = follow_user_id
322 f.follows_user_id = follow_user_id
330 self.sa.add(f)
323 self.sa.add(f)
331 except Exception:
324 except Exception:
332 log.error(traceback.format_exc())
325 log.error(traceback.format_exc())
333 raise
326 raise
334
327
335 def is_following_repo(self, repo_name, user_id, cache=False):
328 def is_following_repo(self, repo_name, user_id, cache=False):
336 r = self.sa.query(Repository)\
329 r = self.sa.query(Repository)\
337 .filter(Repository.repo_name == repo_name).scalar()
330 .filter(Repository.repo_name == repo_name).scalar()
338
331
339 f = self.sa.query(UserFollowing)\
332 f = self.sa.query(UserFollowing)\
340 .filter(UserFollowing.follows_repository == r)\
333 .filter(UserFollowing.follows_repository == r)\
341 .filter(UserFollowing.user_id == user_id).scalar()
334 .filter(UserFollowing.user_id == user_id).scalar()
342
335
343 return f is not None
336 return f is not None
344
337
345 def is_following_user(self, username, user_id, cache=False):
338 def is_following_user(self, username, user_id, cache=False):
346 u = User.get_by_username(username)
339 u = User.get_by_username(username)
347
340
348 f = self.sa.query(UserFollowing)\
341 f = self.sa.query(UserFollowing)\
349 .filter(UserFollowing.follows_user == u)\
342 .filter(UserFollowing.follows_user == u)\
350 .filter(UserFollowing.user_id == user_id).scalar()
343 .filter(UserFollowing.user_id == user_id).scalar()
351
344
352 return f is not None
345 return f is not None
353
346
354 def get_followers(self, repo):
347 def get_followers(self, repo):
355 repo = self._get_repo(repo)
348 repo = self._get_repo(repo)
356
349
357 return self.sa.query(UserFollowing)\
350 return self.sa.query(UserFollowing)\
358 .filter(UserFollowing.follows_repository == repo).count()
351 .filter(UserFollowing.follows_repository == repo).count()
359
352
360 def get_forks(self, repo):
353 def get_forks(self, repo):
361 repo = self._get_repo(repo)
354 repo = self._get_repo(repo)
362 return self.sa.query(Repository)\
355 return self.sa.query(Repository)\
363 .filter(Repository.fork == repo).count()
356 .filter(Repository.fork == repo).count()
364
357
365 def get_pull_requests(self, repo):
358 def get_pull_requests(self, repo):
366 repo = self._get_repo(repo)
359 repo = self._get_repo(repo)
367 return self.sa.query(PullRequest)\
360 return self.sa.query(PullRequest)\
368 .filter(PullRequest.target_repo == repo)\
361 .filter(PullRequest.target_repo == repo)\
369 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
362 .filter(PullRequest.status != PullRequest.STATUS_CLOSED).count()
370
363
371 def mark_as_fork(self, repo, fork, user):
364 def mark_as_fork(self, repo, fork, user):
372 repo = self._get_repo(repo)
365 repo = self._get_repo(repo)
373 fork = self._get_repo(fork)
366 fork = self._get_repo(fork)
374 if fork and repo.repo_id == fork.repo_id:
367 if fork and repo.repo_id == fork.repo_id:
375 raise Exception("Cannot set repository as fork of itself")
368 raise Exception("Cannot set repository as fork of itself")
376
369
377 if fork and repo.repo_type != fork.repo_type:
370 if fork and repo.repo_type != fork.repo_type:
378 raise RepositoryError(
371 raise RepositoryError(
379 "Cannot set repository as fork of repository with other type")
372 "Cannot set repository as fork of repository with other type")
380
373
381 repo.fork = fork
374 repo.fork = fork
382 self.sa.add(repo)
375 self.sa.add(repo)
383 return repo
376 return repo
384
377
385 def pull_changes(self, repo, username):
378 def pull_changes(self, repo, username):
386 dbrepo = self._get_repo(repo)
379 dbrepo = self._get_repo(repo)
387 clone_uri = dbrepo.clone_uri
380 clone_uri = dbrepo.clone_uri
388 if not clone_uri:
381 if not clone_uri:
389 raise Exception("This repository doesn't have a clone uri")
382 raise Exception("This repository doesn't have a clone uri")
390
383
391 repo = dbrepo.scm_instance(cache=False)
384 repo = dbrepo.scm_instance(cache=False)
392 # TODO: marcink fix this an re-enable since we need common logic
385 # TODO: marcink fix this an re-enable since we need common logic
393 # for hg/git remove hooks so we don't trigger them on fetching
386 # for hg/git remove hooks so we don't trigger them on fetching
394 # commits from remote
387 # commits from remote
395 repo.config.clear_section('hooks')
388 repo.config.clear_section('hooks')
396
389
397 repo_name = dbrepo.repo_name
390 repo_name = dbrepo.repo_name
398 try:
391 try:
399 # TODO: we need to make sure those operations call proper hooks !
392 # TODO: we need to make sure those operations call proper hooks !
400 repo.pull(clone_uri)
393 repo.pull(clone_uri)
401
394
402 self.mark_for_invalidation(repo_name)
395 self.mark_for_invalidation(repo_name)
403 except Exception:
396 except Exception:
404 log.error(traceback.format_exc())
397 log.error(traceback.format_exc())
405 raise
398 raise
406
399
407 def commit_change(self, repo, repo_name, commit, user, author, message,
400 def commit_change(self, repo, repo_name, commit, user, author, message,
408 content, f_path):
401 content, f_path):
409 """
402 """
410 Commits changes
403 Commits changes
411
404
412 :param repo: SCM instance
405 :param repo: SCM instance
413
406
414 """
407 """
415 user = self._get_user(user)
408 user = self._get_user(user)
416
409
417 # decoding here will force that we have proper encoded values
410 # decoding here will force that we have proper encoded values
418 # in any other case this will throw exceptions and deny commit
411 # in any other case this will throw exceptions and deny commit
419 content = safe_str(content)
412 content = safe_str(content)
420 path = safe_str(f_path)
413 path = safe_str(f_path)
421 # message and author needs to be unicode
414 # message and author needs to be unicode
422 # proper backend should then translate that into required type
415 # proper backend should then translate that into required type
423 message = safe_unicode(message)
416 message = safe_unicode(message)
424 author = safe_unicode(author)
417 author = safe_unicode(author)
425 imc = repo.in_memory_commit
418 imc = repo.in_memory_commit
426 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
419 imc.change(FileNode(path, content, mode=commit.get_file_mode(f_path)))
427 try:
420 try:
428 # TODO: handle pre-push action !
421 # TODO: handle pre-push action !
429 tip = imc.commit(
422 tip = imc.commit(
430 message=message, author=author, parents=[commit],
423 message=message, author=author, parents=[commit],
431 branch=commit.branch)
424 branch=commit.branch)
432 except Exception as e:
425 except Exception as e:
433 log.error(traceback.format_exc())
426 log.error(traceback.format_exc())
434 raise IMCCommitError(str(e))
427 raise IMCCommitError(str(e))
435 finally:
428 finally:
436 # always clear caches, if commit fails we want fresh object also
429 # always clear caches, if commit fails we want fresh object also
437 self.mark_for_invalidation(repo_name)
430 self.mark_for_invalidation(repo_name)
438
431
439 # We trigger the post-push action
432 # We trigger the post-push action
440 hooks_utils.trigger_post_push_hook(
433 hooks_utils.trigger_post_push_hook(
441 username=user.username, action='push_local', repo_name=repo_name,
434 username=user.username, action='push_local', repo_name=repo_name,
442 repo_alias=repo.alias, commit_ids=[tip.raw_id])
435 repo_alias=repo.alias, commit_ids=[tip.raw_id])
443 return tip
436 return tip
444
437
445 def _sanitize_path(self, f_path):
438 def _sanitize_path(self, f_path):
446 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
439 if f_path.startswith('/') or f_path.startswith('./') or '../' in f_path:
447 raise NonRelativePathError('%s is not an relative path' % f_path)
440 raise NonRelativePathError('%s is not an relative path' % f_path)
448 if f_path:
441 if f_path:
449 f_path = os.path.normpath(f_path)
442 f_path = os.path.normpath(f_path)
450 return f_path
443 return f_path
451
444
452 def get_dirnode_metadata(self, commit, dir_node):
445 def get_dirnode_metadata(self, commit, dir_node):
453 if not dir_node.is_dir():
446 if not dir_node.is_dir():
454 return []
447 return []
455
448
456 data = []
449 data = []
457 for node in dir_node:
450 for node in dir_node:
458 if not node.is_file():
451 if not node.is_file():
459 # we skip file-nodes
452 # we skip file-nodes
460 continue
453 continue
461
454
462 last_commit = node.last_commit
455 last_commit = node.last_commit
463 last_commit_date = last_commit.date
456 last_commit_date = last_commit.date
464 data.append({
457 data.append({
465 'name': node.name,
458 'name': node.name,
466 'size': h.format_byte_size_binary(node.size),
459 'size': h.format_byte_size_binary(node.size),
467 'modified_at': h.format_date(last_commit_date),
460 'modified_at': h.format_date(last_commit_date),
468 'modified_ts': last_commit_date.isoformat(),
461 'modified_ts': last_commit_date.isoformat(),
469 'revision': last_commit.revision,
462 'revision': last_commit.revision,
470 'short_id': last_commit.short_id,
463 'short_id': last_commit.short_id,
471 'message': h.escape(last_commit.message),
464 'message': h.escape(last_commit.message),
472 'author': h.escape(last_commit.author),
465 'author': h.escape(last_commit.author),
473 'user_profile': h.gravatar_with_user(last_commit.author),
466 'user_profile': h.gravatar_with_user(last_commit.author),
474 })
467 })
475
468
476 return data
469 return data
477
470
478 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
471 def get_nodes(self, repo_name, commit_id, root_path='/', flat=True,
479 extended_info=False, content=False, max_file_bytes=None):
472 extended_info=False, content=False, max_file_bytes=None):
480 """
473 """
481 recursive walk in root dir and return a set of all path in that dir
474 recursive walk in root dir and return a set of all path in that dir
482 based on repository walk function
475 based on repository walk function
483
476
484 :param repo_name: name of repository
477 :param repo_name: name of repository
485 :param commit_id: commit id for which to list nodes
478 :param commit_id: commit id for which to list nodes
486 :param root_path: root path to list
479 :param root_path: root path to list
487 :param flat: return as a list, if False returns a dict with description
480 :param flat: return as a list, if False returns a dict with description
488 :param max_file_bytes: will not return file contents over this limit
481 :param max_file_bytes: will not return file contents over this limit
489
482
490 """
483 """
491 _files = list()
484 _files = list()
492 _dirs = list()
485 _dirs = list()
493 try:
486 try:
494 _repo = self._get_repo(repo_name)
487 _repo = self._get_repo(repo_name)
495 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
488 commit = _repo.scm_instance().get_commit(commit_id=commit_id)
496 root_path = root_path.lstrip('/')
489 root_path = root_path.lstrip('/')
497 for __, dirs, files in commit.walk(root_path):
490 for __, dirs, files in commit.walk(root_path):
498 for f in files:
491 for f in files:
499 _content = None
492 _content = None
500 _data = f.unicode_path
493 _data = f.unicode_path
501 over_size_limit = (max_file_bytes is not None
494 over_size_limit = (max_file_bytes is not None
502 and f.size > max_file_bytes)
495 and f.size > max_file_bytes)
503
496
504 if not flat:
497 if not flat:
505 _data = {
498 _data = {
506 "name": f.unicode_path,
499 "name": f.unicode_path,
507 "type": "file",
500 "type": "file",
508 }
501 }
509 if extended_info:
502 if extended_info:
510 _data.update({
503 _data.update({
511 "md5": f.md5,
504 "md5": f.md5,
512 "binary": f.is_binary,
505 "binary": f.is_binary,
513 "size": f.size,
506 "size": f.size,
514 "extension": f.extension,
507 "extension": f.extension,
515 "mimetype": f.mimetype,
508 "mimetype": f.mimetype,
516 "lines": f.lines()[0]
509 "lines": f.lines()[0]
517 })
510 })
518
511
519 if content:
512 if content:
520 full_content = None
513 full_content = None
521 if not f.is_binary and not over_size_limit:
514 if not f.is_binary and not over_size_limit:
522 full_content = safe_str(f.content)
515 full_content = safe_str(f.content)
523
516
524 _data.update({
517 _data.update({
525 "content": full_content,
518 "content": full_content,
526 })
519 })
527 _files.append(_data)
520 _files.append(_data)
528 for d in dirs:
521 for d in dirs:
529 _data = d.unicode_path
522 _data = d.unicode_path
530 if not flat:
523 if not flat:
531 _data = {
524 _data = {
532 "name": d.unicode_path,
525 "name": d.unicode_path,
533 "type": "dir",
526 "type": "dir",
534 }
527 }
535 if extended_info:
528 if extended_info:
536 _data.update({
529 _data.update({
537 "md5": None,
530 "md5": None,
538 "binary": None,
531 "binary": None,
539 "size": None,
532 "size": None,
540 "extension": None,
533 "extension": None,
541 })
534 })
542 if content:
535 if content:
543 _data.update({
536 _data.update({
544 "content": None
537 "content": None
545 })
538 })
546 _dirs.append(_data)
539 _dirs.append(_data)
547 except RepositoryError:
540 except RepositoryError:
548 log.debug("Exception in get_nodes", exc_info=True)
541 log.debug("Exception in get_nodes", exc_info=True)
549 raise
542 raise
550
543
551 return _dirs, _files
544 return _dirs, _files
552
545
553 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
546 def create_nodes(self, user, repo, message, nodes, parent_commit=None,
554 author=None, trigger_push_hook=True):
547 author=None, trigger_push_hook=True):
555 """
548 """
556 Commits given multiple nodes into repo
549 Commits given multiple nodes into repo
557
550
558 :param user: RhodeCode User object or user_id, the commiter
551 :param user: RhodeCode User object or user_id, the commiter
559 :param repo: RhodeCode Repository object
552 :param repo: RhodeCode Repository object
560 :param message: commit message
553 :param message: commit message
561 :param nodes: mapping {filename:{'content':content},...}
554 :param nodes: mapping {filename:{'content':content},...}
562 :param parent_commit: parent commit, can be empty than it's
555 :param parent_commit: parent commit, can be empty than it's
563 initial commit
556 initial commit
564 :param author: author of commit, cna be different that commiter
557 :param author: author of commit, cna be different that commiter
565 only for git
558 only for git
566 :param trigger_push_hook: trigger push hooks
559 :param trigger_push_hook: trigger push hooks
567
560
568 :returns: new commited commit
561 :returns: new commited commit
569 """
562 """
570
563
571 user = self._get_user(user)
564 user = self._get_user(user)
572 scm_instance = repo.scm_instance(cache=False)
565 scm_instance = repo.scm_instance(cache=False)
573
566
574 processed_nodes = []
567 processed_nodes = []
575 for f_path in nodes:
568 for f_path in nodes:
576 f_path = self._sanitize_path(f_path)
569 f_path = self._sanitize_path(f_path)
577 content = nodes[f_path]['content']
570 content = nodes[f_path]['content']
578 f_path = safe_str(f_path)
571 f_path = safe_str(f_path)
579 # decoding here will force that we have proper encoded values
572 # decoding here will force that we have proper encoded values
580 # in any other case this will throw exceptions and deny commit
573 # in any other case this will throw exceptions and deny commit
581 if isinstance(content, (basestring,)):
574 if isinstance(content, (basestring,)):
582 content = safe_str(content)
575 content = safe_str(content)
583 elif isinstance(content, (file, cStringIO.OutputType,)):
576 elif isinstance(content, (file, cStringIO.OutputType,)):
584 content = content.read()
577 content = content.read()
585 else:
578 else:
586 raise Exception('Content is of unrecognized type %s' % (
579 raise Exception('Content is of unrecognized type %s' % (
587 type(content)
580 type(content)
588 ))
581 ))
589 processed_nodes.append((f_path, content))
582 processed_nodes.append((f_path, content))
590
583
591 message = safe_unicode(message)
584 message = safe_unicode(message)
592 commiter = user.full_contact
585 commiter = user.full_contact
593 author = safe_unicode(author) if author else commiter
586 author = safe_unicode(author) if author else commiter
594
587
595 imc = scm_instance.in_memory_commit
588 imc = scm_instance.in_memory_commit
596
589
597 if not parent_commit:
590 if not parent_commit:
598 parent_commit = EmptyCommit(alias=scm_instance.alias)
591 parent_commit = EmptyCommit(alias=scm_instance.alias)
599
592
600 if isinstance(parent_commit, EmptyCommit):
593 if isinstance(parent_commit, EmptyCommit):
601 # EmptyCommit means we we're editing empty repository
594 # EmptyCommit means we we're editing empty repository
602 parents = None
595 parents = None
603 else:
596 else:
604 parents = [parent_commit]
597 parents = [parent_commit]
605 # add multiple nodes
598 # add multiple nodes
606 for path, content in processed_nodes:
599 for path, content in processed_nodes:
607 imc.add(FileNode(path, content=content))
600 imc.add(FileNode(path, content=content))
608 # TODO: handle pre push scenario
601 # TODO: handle pre push scenario
609 tip = imc.commit(message=message,
602 tip = imc.commit(message=message,
610 author=author,
603 author=author,
611 parents=parents,
604 parents=parents,
612 branch=parent_commit.branch)
605 branch=parent_commit.branch)
613
606
614 self.mark_for_invalidation(repo.repo_name)
607 self.mark_for_invalidation(repo.repo_name)
615 if trigger_push_hook:
608 if trigger_push_hook:
616 hooks_utils.trigger_post_push_hook(
609 hooks_utils.trigger_post_push_hook(
617 username=user.username, action='push_local',
610 username=user.username, action='push_local',
618 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
611 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
619 commit_ids=[tip.raw_id])
612 commit_ids=[tip.raw_id])
620 return tip
613 return tip
621
614
622 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
615 def update_nodes(self, user, repo, message, nodes, parent_commit=None,
623 author=None, trigger_push_hook=True):
616 author=None, trigger_push_hook=True):
624 user = self._get_user(user)
617 user = self._get_user(user)
625 scm_instance = repo.scm_instance(cache=False)
618 scm_instance = repo.scm_instance(cache=False)
626
619
627 message = safe_unicode(message)
620 message = safe_unicode(message)
628 commiter = user.full_contact
621 commiter = user.full_contact
629 author = safe_unicode(author) if author else commiter
622 author = safe_unicode(author) if author else commiter
630
623
631 imc = scm_instance.in_memory_commit
624 imc = scm_instance.in_memory_commit
632
625
633 if not parent_commit:
626 if not parent_commit:
634 parent_commit = EmptyCommit(alias=scm_instance.alias)
627 parent_commit = EmptyCommit(alias=scm_instance.alias)
635
628
636 if isinstance(parent_commit, EmptyCommit):
629 if isinstance(parent_commit, EmptyCommit):
637 # EmptyCommit means we we're editing empty repository
630 # EmptyCommit means we we're editing empty repository
638 parents = None
631 parents = None
639 else:
632 else:
640 parents = [parent_commit]
633 parents = [parent_commit]
641
634
642 # add multiple nodes
635 # add multiple nodes
643 for _filename, data in nodes.items():
636 for _filename, data in nodes.items():
644 # new filename, can be renamed from the old one, also sanitaze
637 # new filename, can be renamed from the old one, also sanitaze
645 # the path for any hack around relative paths like ../../ etc.
638 # the path for any hack around relative paths like ../../ etc.
646 filename = self._sanitize_path(data['filename'])
639 filename = self._sanitize_path(data['filename'])
647 old_filename = self._sanitize_path(_filename)
640 old_filename = self._sanitize_path(_filename)
648 content = data['content']
641 content = data['content']
649
642
650 filenode = FileNode(old_filename, content=content)
643 filenode = FileNode(old_filename, content=content)
651 op = data['op']
644 op = data['op']
652 if op == 'add':
645 if op == 'add':
653 imc.add(filenode)
646 imc.add(filenode)
654 elif op == 'del':
647 elif op == 'del':
655 imc.remove(filenode)
648 imc.remove(filenode)
656 elif op == 'mod':
649 elif op == 'mod':
657 if filename != old_filename:
650 if filename != old_filename:
658 # TODO: handle renames more efficient, needs vcs lib
651 # TODO: handle renames more efficient, needs vcs lib
659 # changes
652 # changes
660 imc.remove(filenode)
653 imc.remove(filenode)
661 imc.add(FileNode(filename, content=content))
654 imc.add(FileNode(filename, content=content))
662 else:
655 else:
663 imc.change(filenode)
656 imc.change(filenode)
664
657
665 try:
658 try:
666 # TODO: handle pre push scenario
659 # TODO: handle pre push scenario
667 # commit changes
660 # commit changes
668 tip = imc.commit(message=message,
661 tip = imc.commit(message=message,
669 author=author,
662 author=author,
670 parents=parents,
663 parents=parents,
671 branch=parent_commit.branch)
664 branch=parent_commit.branch)
672 except NodeNotChangedError:
665 except NodeNotChangedError:
673 raise
666 raise
674 except Exception as e:
667 except Exception as e:
675 log.exception("Unexpected exception during call to imc.commit")
668 log.exception("Unexpected exception during call to imc.commit")
676 raise IMCCommitError(str(e))
669 raise IMCCommitError(str(e))
677 finally:
670 finally:
678 # always clear caches, if commit fails we want fresh object also
671 # always clear caches, if commit fails we want fresh object also
679 self.mark_for_invalidation(repo.repo_name)
672 self.mark_for_invalidation(repo.repo_name)
680
673
681 if trigger_push_hook:
674 if trigger_push_hook:
682 hooks_utils.trigger_post_push_hook(
675 hooks_utils.trigger_post_push_hook(
683 username=user.username, action='push_local',
676 username=user.username, action='push_local',
684 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
677 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
685 commit_ids=[tip.raw_id])
678 commit_ids=[tip.raw_id])
686
679
687 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
680 def delete_nodes(self, user, repo, message, nodes, parent_commit=None,
688 author=None, trigger_push_hook=True):
681 author=None, trigger_push_hook=True):
689 """
682 """
690 Deletes given multiple nodes into `repo`
683 Deletes given multiple nodes into `repo`
691
684
692 :param user: RhodeCode User object or user_id, the committer
685 :param user: RhodeCode User object or user_id, the committer
693 :param repo: RhodeCode Repository object
686 :param repo: RhodeCode Repository object
694 :param message: commit message
687 :param message: commit message
695 :param nodes: mapping {filename:{'content':content},...}
688 :param nodes: mapping {filename:{'content':content},...}
696 :param parent_commit: parent commit, can be empty than it's initial
689 :param parent_commit: parent commit, can be empty than it's initial
697 commit
690 commit
698 :param author: author of commit, cna be different that commiter only
691 :param author: author of commit, cna be different that commiter only
699 for git
692 for git
700 :param trigger_push_hook: trigger push hooks
693 :param trigger_push_hook: trigger push hooks
701
694
702 :returns: new commit after deletion
695 :returns: new commit after deletion
703 """
696 """
704
697
705 user = self._get_user(user)
698 user = self._get_user(user)
706 scm_instance = repo.scm_instance(cache=False)
699 scm_instance = repo.scm_instance(cache=False)
707
700
708 processed_nodes = []
701 processed_nodes = []
709 for f_path in nodes:
702 for f_path in nodes:
710 f_path = self._sanitize_path(f_path)
703 f_path = self._sanitize_path(f_path)
711 # content can be empty but for compatabilty it allows same dicts
704 # content can be empty but for compatabilty it allows same dicts
712 # structure as add_nodes
705 # structure as add_nodes
713 content = nodes[f_path].get('content')
706 content = nodes[f_path].get('content')
714 processed_nodes.append((f_path, content))
707 processed_nodes.append((f_path, content))
715
708
716 message = safe_unicode(message)
709 message = safe_unicode(message)
717 commiter = user.full_contact
710 commiter = user.full_contact
718 author = safe_unicode(author) if author else commiter
711 author = safe_unicode(author) if author else commiter
719
712
720 imc = scm_instance.in_memory_commit
713 imc = scm_instance.in_memory_commit
721
714
722 if not parent_commit:
715 if not parent_commit:
723 parent_commit = EmptyCommit(alias=scm_instance.alias)
716 parent_commit = EmptyCommit(alias=scm_instance.alias)
724
717
725 if isinstance(parent_commit, EmptyCommit):
718 if isinstance(parent_commit, EmptyCommit):
726 # EmptyCommit means we we're editing empty repository
719 # EmptyCommit means we we're editing empty repository
727 parents = None
720 parents = None
728 else:
721 else:
729 parents = [parent_commit]
722 parents = [parent_commit]
730 # add multiple nodes
723 # add multiple nodes
731 for path, content in processed_nodes:
724 for path, content in processed_nodes:
732 imc.remove(FileNode(path, content=content))
725 imc.remove(FileNode(path, content=content))
733
726
734 # TODO: handle pre push scenario
727 # TODO: handle pre push scenario
735 tip = imc.commit(message=message,
728 tip = imc.commit(message=message,
736 author=author,
729 author=author,
737 parents=parents,
730 parents=parents,
738 branch=parent_commit.branch)
731 branch=parent_commit.branch)
739
732
740 self.mark_for_invalidation(repo.repo_name)
733 self.mark_for_invalidation(repo.repo_name)
741 if trigger_push_hook:
734 if trigger_push_hook:
742 hooks_utils.trigger_post_push_hook(
735 hooks_utils.trigger_post_push_hook(
743 username=user.username, action='push_local',
736 username=user.username, action='push_local',
744 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
737 repo_name=repo.repo_name, repo_alias=scm_instance.alias,
745 commit_ids=[tip.raw_id])
738 commit_ids=[tip.raw_id])
746 return tip
739 return tip
747
740
748 def strip(self, repo, commit_id, branch):
741 def strip(self, repo, commit_id, branch):
749 scm_instance = repo.scm_instance(cache=False)
742 scm_instance = repo.scm_instance(cache=False)
750 scm_instance.config.clear_section('hooks')
743 scm_instance.config.clear_section('hooks')
751 scm_instance.strip(commit_id, branch)
744 scm_instance.strip(commit_id, branch)
752 self.mark_for_invalidation(repo.repo_name)
745 self.mark_for_invalidation(repo.repo_name)
753
746
754 def get_unread_journal(self):
747 def get_unread_journal(self):
755 return self.sa.query(UserLog).count()
748 return self.sa.query(UserLog).count()
756
749
757 def get_repo_landing_revs(self, repo=None):
750 def get_repo_landing_revs(self, repo=None):
758 """
751 """
759 Generates select option with tags branches and bookmarks (for hg only)
752 Generates select option with tags branches and bookmarks (for hg only)
760 grouped by type
753 grouped by type
761
754
762 :param repo:
755 :param repo:
763 """
756 """
764
757
765 repo = self._get_repo(repo)
758 repo = self._get_repo(repo)
766
759
767 hist_l = [
760 hist_l = [
768 ['rev:tip', _('latest tip')]
761 ['rev:tip', _('latest tip')]
769 ]
762 ]
770 choices = [
763 choices = [
771 'rev:tip'
764 'rev:tip'
772 ]
765 ]
773
766
774 if not repo:
767 if not repo:
775 return choices, hist_l
768 return choices, hist_l
776
769
777 repo = repo.scm_instance()
770 repo = repo.scm_instance()
778
771
779 branches_group = (
772 branches_group = (
780 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
773 [(u'branch:%s' % safe_unicode(b), safe_unicode(b))
781 for b in repo.branches],
774 for b in repo.branches],
782 _("Branches"))
775 _("Branches"))
783 hist_l.append(branches_group)
776 hist_l.append(branches_group)
784 choices.extend([x[0] for x in branches_group[0]])
777 choices.extend([x[0] for x in branches_group[0]])
785
778
786 if repo.alias == 'hg':
779 if repo.alias == 'hg':
787 bookmarks_group = (
780 bookmarks_group = (
788 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
781 [(u'book:%s' % safe_unicode(b), safe_unicode(b))
789 for b in repo.bookmarks],
782 for b in repo.bookmarks],
790 _("Bookmarks"))
783 _("Bookmarks"))
791 hist_l.append(bookmarks_group)
784 hist_l.append(bookmarks_group)
792 choices.extend([x[0] for x in bookmarks_group[0]])
785 choices.extend([x[0] for x in bookmarks_group[0]])
793
786
794 tags_group = (
787 tags_group = (
795 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
788 [(u'tag:%s' % safe_unicode(t), safe_unicode(t))
796 for t in repo.tags],
789 for t in repo.tags],
797 _("Tags"))
790 _("Tags"))
798 hist_l.append(tags_group)
791 hist_l.append(tags_group)
799 choices.extend([x[0] for x in tags_group[0]])
792 choices.extend([x[0] for x in tags_group[0]])
800
793
801 return choices, hist_l
794 return choices, hist_l
802
795
803 def install_git_hook(self, repo, force_create=False):
796 def install_git_hook(self, repo, force_create=False):
804 """
797 """
805 Creates a rhodecode hook inside a git repository
798 Creates a rhodecode hook inside a git repository
806
799
807 :param repo: Instance of VCS repo
800 :param repo: Instance of VCS repo
808 :param force_create: Create even if same name hook exists
801 :param force_create: Create even if same name hook exists
809 """
802 """
810
803
811 loc = os.path.join(repo.path, 'hooks')
804 loc = os.path.join(repo.path, 'hooks')
812 if not repo.bare:
805 if not repo.bare:
813 loc = os.path.join(repo.path, '.git', 'hooks')
806 loc = os.path.join(repo.path, '.git', 'hooks')
814 if not os.path.isdir(loc):
807 if not os.path.isdir(loc):
815 os.makedirs(loc, mode=0777)
808 os.makedirs(loc, mode=0777)
816
809
817 tmpl_post = pkg_resources.resource_string(
810 tmpl_post = pkg_resources.resource_string(
818 'rhodecode', '/'.join(
811 'rhodecode', '/'.join(
819 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
812 ('config', 'hook_templates', 'git_post_receive.py.tmpl')))
820 tmpl_pre = pkg_resources.resource_string(
813 tmpl_pre = pkg_resources.resource_string(
821 'rhodecode', '/'.join(
814 'rhodecode', '/'.join(
822 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
815 ('config', 'hook_templates', 'git_pre_receive.py.tmpl')))
823
816
824 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
817 for h_type, tmpl in [('pre', tmpl_pre), ('post', tmpl_post)]:
825 _hook_file = os.path.join(loc, '%s-receive' % h_type)
818 _hook_file = os.path.join(loc, '%s-receive' % h_type)
826 log.debug('Installing git hook in repo %s', repo)
819 log.debug('Installing git hook in repo %s', repo)
827 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
820 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
828
821
829 if _rhodecode_hook or force_create:
822 if _rhodecode_hook or force_create:
830 log.debug('writing %s hook file !', h_type)
823 log.debug('writing %s hook file !', h_type)
831 try:
824 try:
832 with open(_hook_file, 'wb') as f:
825 with open(_hook_file, 'wb') as f:
833 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
826 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
834 tmpl = tmpl.replace('_ENV_', sys.executable)
827 tmpl = tmpl.replace('_ENV_', sys.executable)
835 f.write(tmpl)
828 f.write(tmpl)
836 os.chmod(_hook_file, 0755)
829 os.chmod(_hook_file, 0755)
837 except IOError:
830 except IOError:
838 log.exception('error writing hook file %s', _hook_file)
831 log.exception('error writing hook file %s', _hook_file)
839 else:
832 else:
840 log.debug('skipping writing hook file')
833 log.debug('skipping writing hook file')
841
834
842 def install_svn_hooks(self, repo, force_create=False):
835 def install_svn_hooks(self, repo, force_create=False):
843 """
836 """
844 Creates rhodecode hooks inside a svn repository
837 Creates rhodecode hooks inside a svn repository
845
838
846 :param repo: Instance of VCS repo
839 :param repo: Instance of VCS repo
847 :param force_create: Create even if same name hook exists
840 :param force_create: Create even if same name hook exists
848 """
841 """
849 hooks_path = os.path.join(repo.path, 'hooks')
842 hooks_path = os.path.join(repo.path, 'hooks')
850 if not os.path.isdir(hooks_path):
843 if not os.path.isdir(hooks_path):
851 os.makedirs(hooks_path)
844 os.makedirs(hooks_path)
852 post_commit_tmpl = pkg_resources.resource_string(
845 post_commit_tmpl = pkg_resources.resource_string(
853 'rhodecode', '/'.join(
846 'rhodecode', '/'.join(
854 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
847 ('config', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
855 pre_commit_template = pkg_resources.resource_string(
848 pre_commit_template = pkg_resources.resource_string(
856 'rhodecode', '/'.join(
849 'rhodecode', '/'.join(
857 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
850 ('config', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
858 templates = {
851 templates = {
859 'post-commit': post_commit_tmpl,
852 'post-commit': post_commit_tmpl,
860 'pre-commit': pre_commit_template
853 'pre-commit': pre_commit_template
861 }
854 }
862 for filename in templates:
855 for filename in templates:
863 _hook_file = os.path.join(hooks_path, filename)
856 _hook_file = os.path.join(hooks_path, filename)
864 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
857 _rhodecode_hook = _check_rhodecode_hook(_hook_file)
865 if _rhodecode_hook or force_create:
858 if _rhodecode_hook or force_create:
866 log.debug('writing %s hook file !', filename)
859 log.debug('writing %s hook file !', filename)
867 template = templates[filename]
860 template = templates[filename]
868 try:
861 try:
869 with open(_hook_file, 'wb') as f:
862 with open(_hook_file, 'wb') as f:
870 template = template.replace(
863 template = template.replace(
871 '_TMPL_', rhodecode.__version__)
864 '_TMPL_', rhodecode.__version__)
872 template = template.replace('_ENV_', sys.executable)
865 template = template.replace('_ENV_', sys.executable)
873 f.write(template)
866 f.write(template)
874 os.chmod(_hook_file, 0755)
867 os.chmod(_hook_file, 0755)
875 except IOError:
868 except IOError:
876 log.exception('error writing hook file %s', filename)
869 log.exception('error writing hook file %s', filename)
877 else:
870 else:
878 log.debug('skipping writing hook file')
871 log.debug('skipping writing hook file')
879
872
880 def install_hooks(self, repo, repo_type):
873 def install_hooks(self, repo, repo_type):
881 if repo_type == 'git':
874 if repo_type == 'git':
882 self.install_git_hook(repo)
875 self.install_git_hook(repo)
883 elif repo_type == 'svn':
876 elif repo_type == 'svn':
884 self.install_svn_hooks(repo)
877 self.install_svn_hooks(repo)
885
878
886 def get_server_info(self, environ=None):
879 def get_server_info(self, environ=None):
887 server_info = get_system_info(environ)
880 server_info = get_system_info(environ)
888 return server_info
881 return server_info
889
882
890
883
891 def _check_rhodecode_hook(hook_path):
884 def _check_rhodecode_hook(hook_path):
892 """
885 """
893 Check if the hook was created by RhodeCode
886 Check if the hook was created by RhodeCode
894 """
887 """
895 if not os.path.exists(hook_path):
888 if not os.path.exists(hook_path):
896 return True
889 return True
897
890
898 log.debug('hook exists, checking if it is from rhodecode')
891 log.debug('hook exists, checking if it is from rhodecode')
899 hook_content = _read_hook(hook_path)
892 hook_content = _read_hook(hook_path)
900 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
893 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
901 if matches:
894 if matches:
902 try:
895 try:
903 version = matches.groups()[0]
896 version = matches.groups()[0]
904 log.debug('got %s, it is rhodecode', version)
897 log.debug('got %s, it is rhodecode', version)
905 return True
898 return True
906 except Exception:
899 except Exception:
907 log.exception("Exception while reading the hook version.")
900 log.exception("Exception while reading the hook version.")
908
901
909 return False
902 return False
910
903
911
904
912 def _read_hook(hook_path):
905 def _read_hook(hook_path):
913 with open(hook_path, 'rb') as f:
906 with open(hook_path, 'rb') as f:
914 content = f.read()
907 content = f.read()
915 return content
908 return content
General Comments 0
You need to be logged in to leave comments. Login now