##// END OF EJS Templates
use raw_id in push_local action
marcink -
r2655:5a39eb37 beta
parent child Browse files
Show More
@@ -1,600 +1,595
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 """
2 """
3 rhodecode.model.scm
3 rhodecode.model.scm
4 ~~~~~~~~~~~~~~~~~~~
4 ~~~~~~~~~~~~~~~~~~~
5
5
6 Scm model for RhodeCode
6 Scm model for RhodeCode
7
7
8 :created_on: Apr 9, 2010
8 :created_on: Apr 9, 2010
9 :author: marcink
9 :author: marcink
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
10 :copyright: (C) 2010-2012 Marcin Kuzminski <marcin@python-works.com>
11 :license: GPLv3, see COPYING for more details.
11 :license: GPLv3, see COPYING for more details.
12 """
12 """
13 # This program is free software: you can redistribute it and/or modify
13 # This program is free software: you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation, either version 3 of the License, or
15 # the Free Software Foundation, either version 3 of the License, or
16 # (at your option) any later version.
16 # (at your option) any later version.
17 #
17 #
18 # This program is distributed in the hope that it will be useful,
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
21 # GNU General Public License for more details.
22 #
22 #
23 # You should have received a copy of the GNU General Public License
23 # You should have received a copy of the GNU General Public License
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
24 # along with this program. If not, see <http://www.gnu.org/licenses/>.
25 from __future__ import with_statement
25 from __future__ import with_statement
26 import os
26 import os
27 import re
27 import re
28 import time
28 import time
29 import traceback
29 import traceback
30 import logging
30 import logging
31 import cStringIO
31 import cStringIO
32 import pkg_resources
32 import pkg_resources
33 from os.path import dirname as dn, join as jn
33 from os.path import dirname as dn, join as jn
34
34
35 from sqlalchemy import func
35 from sqlalchemy import func
36 from pylons.i18n.translation import _
36 from pylons.i18n.translation import _
37
37
38 import rhodecode
38 import rhodecode
39 from rhodecode.lib.vcs import get_backend
39 from rhodecode.lib.vcs import get_backend
40 from rhodecode.lib.vcs.exceptions import RepositoryError
40 from rhodecode.lib.vcs.exceptions import RepositoryError
41 from rhodecode.lib.vcs.utils.lazy import LazyProperty
41 from rhodecode.lib.vcs.utils.lazy import LazyProperty
42 from rhodecode.lib.vcs.nodes import FileNode
42 from rhodecode.lib.vcs.nodes import FileNode
43
43
44 from rhodecode import BACKENDS
44 from rhodecode import BACKENDS
45 from rhodecode.lib import helpers as h
45 from rhodecode.lib import helpers as h
46 from rhodecode.lib.utils2 import safe_str, safe_unicode
46 from rhodecode.lib.utils2 import safe_str, safe_unicode
47 from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
47 from rhodecode.lib.auth import HasRepoPermissionAny, HasReposGroupPermissionAny
48 from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \
48 from rhodecode.lib.utils import get_repos as get_filesystem_repos, make_ui, \
49 action_logger, EmptyChangeset, REMOVED_REPO_PAT
49 action_logger, EmptyChangeset, REMOVED_REPO_PAT
50 from rhodecode.model import BaseModel
50 from rhodecode.model import BaseModel
51 from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
51 from rhodecode.model.db import Repository, RhodeCodeUi, CacheInvalidation, \
52 UserFollowing, UserLog, User, RepoGroup, PullRequest
52 UserFollowing, UserLog, User, RepoGroup, PullRequest
53
53
54 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
55
55
56
56
57 class UserTemp(object):
57 class UserTemp(object):
58 def __init__(self, user_id):
58 def __init__(self, user_id):
59 self.user_id = user_id
59 self.user_id = user_id
60
60
61 def __repr__(self):
61 def __repr__(self):
62 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
62 return "<%s('id:%s')>" % (self.__class__.__name__, self.user_id)
63
63
64
64
65 class RepoTemp(object):
65 class RepoTemp(object):
66 def __init__(self, repo_id):
66 def __init__(self, repo_id):
67 self.repo_id = repo_id
67 self.repo_id = repo_id
68
68
69 def __repr__(self):
69 def __repr__(self):
70 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
70 return "<%s('id:%s')>" % (self.__class__.__name__, self.repo_id)
71
71
72
72
73 class CachedRepoList(object):
73 class CachedRepoList(object):
74 """
74 """
75 Cached repo list, uses in-memory cache after initialization, that is
75 Cached repo list, uses in-memory cache after initialization, that is
76 super fast
76 super fast
77 """
77 """
78
78
79 def __init__(self, db_repo_list, repos_path, order_by=None):
79 def __init__(self, db_repo_list, repos_path, order_by=None):
80 self.db_repo_list = db_repo_list
80 self.db_repo_list = db_repo_list
81 self.repos_path = repos_path
81 self.repos_path = repos_path
82 self.order_by = order_by
82 self.order_by = order_by
83 self.reversed = (order_by or '').startswith('-')
83 self.reversed = (order_by or '').startswith('-')
84
84
85 def __len__(self):
85 def __len__(self):
86 return len(self.db_repo_list)
86 return len(self.db_repo_list)
87
87
88 def __repr__(self):
88 def __repr__(self):
89 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
89 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
90
90
91 def __iter__(self):
91 def __iter__(self):
92 # pre-propagated cache_map to save executing select statements
92 # pre-propagated cache_map to save executing select statements
93 # for each repo
93 # for each repo
94 cache_map = CacheInvalidation.get_cache_map()
94 cache_map = CacheInvalidation.get_cache_map()
95
95
96 for dbr in self.db_repo_list:
96 for dbr in self.db_repo_list:
97 scmr = dbr.scm_instance_cached(cache_map)
97 scmr = dbr.scm_instance_cached(cache_map)
98 # check permission at this level
98 # check permission at this level
99 if not HasRepoPermissionAny(
99 if not HasRepoPermissionAny(
100 'repository.read', 'repository.write', 'repository.admin'
100 'repository.read', 'repository.write', 'repository.admin'
101 )(dbr.repo_name, 'get repo check'):
101 )(dbr.repo_name, 'get repo check'):
102 continue
102 continue
103
103
104 if scmr is None:
104 if scmr is None:
105 log.error(
105 log.error(
106 '%s this repository is present in database but it '
106 '%s this repository is present in database but it '
107 'cannot be created as an scm instance' % dbr.repo_name
107 'cannot be created as an scm instance' % dbr.repo_name
108 )
108 )
109 continue
109 continue
110
110
111 last_change = scmr.last_change
111 last_change = scmr.last_change
112 tip = h.get_changeset_safe(scmr, 'tip')
112 tip = h.get_changeset_safe(scmr, 'tip')
113
113
114 tmp_d = {}
114 tmp_d = {}
115 tmp_d['name'] = dbr.repo_name
115 tmp_d['name'] = dbr.repo_name
116 tmp_d['name_sort'] = tmp_d['name'].lower()
116 tmp_d['name_sort'] = tmp_d['name'].lower()
117 tmp_d['description'] = dbr.description
117 tmp_d['description'] = dbr.description
118 tmp_d['description_sort'] = tmp_d['description'].lower()
118 tmp_d['description_sort'] = tmp_d['description'].lower()
119 tmp_d['last_change'] = last_change
119 tmp_d['last_change'] = last_change
120 tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
120 tmp_d['last_change_sort'] = time.mktime(last_change.timetuple())
121 tmp_d['tip'] = tip.raw_id
121 tmp_d['tip'] = tip.raw_id
122 tmp_d['tip_sort'] = tip.revision
122 tmp_d['tip_sort'] = tip.revision
123 tmp_d['rev'] = tip.revision
123 tmp_d['rev'] = tip.revision
124 tmp_d['contact'] = dbr.user.full_contact
124 tmp_d['contact'] = dbr.user.full_contact
125 tmp_d['contact_sort'] = tmp_d['contact']
125 tmp_d['contact_sort'] = tmp_d['contact']
126 tmp_d['owner_sort'] = tmp_d['contact']
126 tmp_d['owner_sort'] = tmp_d['contact']
127 tmp_d['repo_archives'] = list(scmr._get_archives())
127 tmp_d['repo_archives'] = list(scmr._get_archives())
128 tmp_d['last_msg'] = tip.message
128 tmp_d['last_msg'] = tip.message
129 tmp_d['author'] = tip.author
129 tmp_d['author'] = tip.author
130 tmp_d['dbrepo'] = dbr.get_dict()
130 tmp_d['dbrepo'] = dbr.get_dict()
131 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
131 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
132 yield tmp_d
132 yield tmp_d
133
133
134
134
135 class SimpleCachedRepoList(CachedRepoList):
135 class SimpleCachedRepoList(CachedRepoList):
136 """
136 """
137 Lighter version of CachedRepoList without the scm initialisation
137 Lighter version of CachedRepoList without the scm initialisation
138 """
138 """
139
139
140 def __iter__(self):
140 def __iter__(self):
141 for dbr in self.db_repo_list:
141 for dbr in self.db_repo_list:
142 # check permission at this level
142 # check permission at this level
143 if not HasRepoPermissionAny(
143 if not HasRepoPermissionAny(
144 'repository.read', 'repository.write', 'repository.admin'
144 'repository.read', 'repository.write', 'repository.admin'
145 )(dbr.repo_name, 'get repo check'):
145 )(dbr.repo_name, 'get repo check'):
146 continue
146 continue
147
147
148 tmp_d = {}
148 tmp_d = {}
149 tmp_d['name'] = dbr.repo_name
149 tmp_d['name'] = dbr.repo_name
150 tmp_d['name_sort'] = tmp_d['name'].lower()
150 tmp_d['name_sort'] = tmp_d['name'].lower()
151 tmp_d['description'] = dbr.description
151 tmp_d['description'] = dbr.description
152 tmp_d['description_sort'] = tmp_d['description'].lower()
152 tmp_d['description_sort'] = tmp_d['description'].lower()
153 tmp_d['dbrepo'] = dbr.get_dict()
153 tmp_d['dbrepo'] = dbr.get_dict()
154 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
154 tmp_d['dbrepo_fork'] = dbr.fork.get_dict() if dbr.fork else {}
155 yield tmp_d
155 yield tmp_d
156
156
157
157
158 class GroupList(object):
158 class GroupList(object):
159
159
160 def __init__(self, db_repo_group_list):
160 def __init__(self, db_repo_group_list):
161 self.db_repo_group_list = db_repo_group_list
161 self.db_repo_group_list = db_repo_group_list
162
162
163 def __len__(self):
163 def __len__(self):
164 return len(self.db_repo_group_list)
164 return len(self.db_repo_group_list)
165
165
166 def __repr__(self):
166 def __repr__(self):
167 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
167 return '<%s (%s)>' % (self.__class__.__name__, self.__len__())
168
168
169 def __iter__(self):
169 def __iter__(self):
170 for dbgr in self.db_repo_group_list:
170 for dbgr in self.db_repo_group_list:
171 # check permission at this level
171 # check permission at this level
172 if not HasReposGroupPermissionAny(
172 if not HasReposGroupPermissionAny(
173 'group.read', 'group.write', 'group.admin'
173 'group.read', 'group.write', 'group.admin'
174 )(dbgr.group_name, 'get group repo check'):
174 )(dbgr.group_name, 'get group repo check'):
175 continue
175 continue
176
176
177 yield dbgr
177 yield dbgr
178
178
179
179
180 class ScmModel(BaseModel):
180 class ScmModel(BaseModel):
181 """
181 """
182 Generic Scm Model
182 Generic Scm Model
183 """
183 """
184
184
185 def __get_repo(self, instance):
185 def __get_repo(self, instance):
186 cls = Repository
186 cls = Repository
187 if isinstance(instance, cls):
187 if isinstance(instance, cls):
188 return instance
188 return instance
189 elif isinstance(instance, int) or str(instance).isdigit():
189 elif isinstance(instance, int) or str(instance).isdigit():
190 return cls.get(instance)
190 return cls.get(instance)
191 elif isinstance(instance, basestring):
191 elif isinstance(instance, basestring):
192 return cls.get_by_repo_name(instance)
192 return cls.get_by_repo_name(instance)
193 elif instance:
193 elif instance:
194 raise Exception('given object must be int, basestr or Instance'
194 raise Exception('given object must be int, basestr or Instance'
195 ' of %s got %s' % (type(cls), type(instance)))
195 ' of %s got %s' % (type(cls), type(instance)))
196
196
197 @LazyProperty
197 @LazyProperty
198 def repos_path(self):
198 def repos_path(self):
199 """
199 """
200 Get's the repositories root path from database
200 Get's the repositories root path from database
201 """
201 """
202
202
203 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
203 q = self.sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one()
204
204
205 return q.ui_value
205 return q.ui_value
206
206
207 def repo_scan(self, repos_path=None):
207 def repo_scan(self, repos_path=None):
208 """
208 """
209 Listing of repositories in given path. This path should not be a
209 Listing of repositories in given path. This path should not be a
210 repository itself. Return a dictionary of repository objects
210 repository itself. Return a dictionary of repository objects
211
211
212 :param repos_path: path to directory containing repositories
212 :param repos_path: path to directory containing repositories
213 """
213 """
214
214
215 if repos_path is None:
215 if repos_path is None:
216 repos_path = self.repos_path
216 repos_path = self.repos_path
217
217
218 log.info('scanning for repositories in %s' % repos_path)
218 log.info('scanning for repositories in %s' % repos_path)
219
219
220 baseui = make_ui('db')
220 baseui = make_ui('db')
221 repos = {}
221 repos = {}
222
222
223 for name, path in get_filesystem_repos(repos_path, recursive=True):
223 for name, path in get_filesystem_repos(repos_path, recursive=True):
224 # skip removed repos
224 # skip removed repos
225 if REMOVED_REPO_PAT.match(name):
225 if REMOVED_REPO_PAT.match(name):
226 continue
226 continue
227
227
228 # name need to be decomposed and put back together using the /
228 # name need to be decomposed and put back together using the /
229 # since this is internal storage separator for rhodecode
229 # since this is internal storage separator for rhodecode
230 name = Repository.url_sep().join(name.split(os.sep))
230 name = Repository.url_sep().join(name.split(os.sep))
231
231
232 try:
232 try:
233 if name in repos:
233 if name in repos:
234 raise RepositoryError('Duplicate repository name %s '
234 raise RepositoryError('Duplicate repository name %s '
235 'found in %s' % (name, path))
235 'found in %s' % (name, path))
236 else:
236 else:
237
237
238 klass = get_backend(path[0])
238 klass = get_backend(path[0])
239
239
240 if path[0] == 'hg' and path[0] in BACKENDS.keys():
240 if path[0] == 'hg' and path[0] in BACKENDS.keys():
241 repos[name] = klass(safe_str(path[1]), baseui=baseui)
241 repos[name] = klass(safe_str(path[1]), baseui=baseui)
242
242
243 if path[0] == 'git' and path[0] in BACKENDS.keys():
243 if path[0] == 'git' and path[0] in BACKENDS.keys():
244 repos[name] = klass(path[1])
244 repos[name] = klass(path[1])
245 except OSError:
245 except OSError:
246 continue
246 continue
247
247
248 return repos
248 return repos
249
249
250 def get_repos(self, all_repos=None, sort_key=None, simple=False):
250 def get_repos(self, all_repos=None, sort_key=None, simple=False):
251 """
251 """
252 Get all repos from db and for each repo create it's
252 Get all repos from db and for each repo create it's
253 backend instance and fill that backed with information from database
253 backend instance and fill that backed with information from database
254
254
255 :param all_repos: list of repository names as strings
255 :param all_repos: list of repository names as strings
256 give specific repositories list, good for filtering
256 give specific repositories list, good for filtering
257
257
258 :param sort_key: initial sorting of repos
258 :param sort_key: initial sorting of repos
259 :param simple: use SimpleCachedList - one without the SCM info
259 :param simple: use SimpleCachedList - one without the SCM info
260 """
260 """
261 if all_repos is None:
261 if all_repos is None:
262 all_repos = self.sa.query(Repository)\
262 all_repos = self.sa.query(Repository)\
263 .filter(Repository.group_id == None)\
263 .filter(Repository.group_id == None)\
264 .order_by(func.lower(Repository.repo_name)).all()
264 .order_by(func.lower(Repository.repo_name)).all()
265 if simple:
265 if simple:
266 repo_iter = SimpleCachedRepoList(all_repos,
266 repo_iter = SimpleCachedRepoList(all_repos,
267 repos_path=self.repos_path,
267 repos_path=self.repos_path,
268 order_by=sort_key)
268 order_by=sort_key)
269 else:
269 else:
270 repo_iter = CachedRepoList(all_repos,
270 repo_iter = CachedRepoList(all_repos,
271 repos_path=self.repos_path,
271 repos_path=self.repos_path,
272 order_by=sort_key)
272 order_by=sort_key)
273
273
274 return repo_iter
274 return repo_iter
275
275
276 def get_repos_groups(self, all_groups=None):
276 def get_repos_groups(self, all_groups=None):
277 if all_groups is None:
277 if all_groups is None:
278 all_groups = RepoGroup.query()\
278 all_groups = RepoGroup.query()\
279 .filter(RepoGroup.group_parent_id == None).all()
279 .filter(RepoGroup.group_parent_id == None).all()
280 group_iter = GroupList(all_groups)
280 group_iter = GroupList(all_groups)
281
281
282 return group_iter
282 return group_iter
283
283
284 def mark_for_invalidation(self, repo_name):
284 def mark_for_invalidation(self, repo_name):
285 """
285 """
286 Puts cache invalidation task into db for
286 Puts cache invalidation task into db for
287 further global cache invalidation
287 further global cache invalidation
288
288
289 :param repo_name: this repo that should invalidation take place
289 :param repo_name: this repo that should invalidation take place
290 """
290 """
291 CacheInvalidation.set_invalidate(repo_name)
291 CacheInvalidation.set_invalidate(repo_name)
292
292
293 def toggle_following_repo(self, follow_repo_id, user_id):
293 def toggle_following_repo(self, follow_repo_id, user_id):
294
294
295 f = self.sa.query(UserFollowing)\
295 f = self.sa.query(UserFollowing)\
296 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
296 .filter(UserFollowing.follows_repo_id == follow_repo_id)\
297 .filter(UserFollowing.user_id == user_id).scalar()
297 .filter(UserFollowing.user_id == user_id).scalar()
298
298
299 if f is not None:
299 if f is not None:
300 try:
300 try:
301 self.sa.delete(f)
301 self.sa.delete(f)
302 action_logger(UserTemp(user_id),
302 action_logger(UserTemp(user_id),
303 'stopped_following_repo',
303 'stopped_following_repo',
304 RepoTemp(follow_repo_id))
304 RepoTemp(follow_repo_id))
305 return
305 return
306 except:
306 except:
307 log.error(traceback.format_exc())
307 log.error(traceback.format_exc())
308 raise
308 raise
309
309
310 try:
310 try:
311 f = UserFollowing()
311 f = UserFollowing()
312 f.user_id = user_id
312 f.user_id = user_id
313 f.follows_repo_id = follow_repo_id
313 f.follows_repo_id = follow_repo_id
314 self.sa.add(f)
314 self.sa.add(f)
315
315
316 action_logger(UserTemp(user_id),
316 action_logger(UserTemp(user_id),
317 'started_following_repo',
317 'started_following_repo',
318 RepoTemp(follow_repo_id))
318 RepoTemp(follow_repo_id))
319 except:
319 except:
320 log.error(traceback.format_exc())
320 log.error(traceback.format_exc())
321 raise
321 raise
322
322
323 def toggle_following_user(self, follow_user_id, user_id):
323 def toggle_following_user(self, follow_user_id, user_id):
324 f = self.sa.query(UserFollowing)\
324 f = self.sa.query(UserFollowing)\
325 .filter(UserFollowing.follows_user_id == follow_user_id)\
325 .filter(UserFollowing.follows_user_id == follow_user_id)\
326 .filter(UserFollowing.user_id == user_id).scalar()
326 .filter(UserFollowing.user_id == user_id).scalar()
327
327
328 if f is not None:
328 if f is not None:
329 try:
329 try:
330 self.sa.delete(f)
330 self.sa.delete(f)
331 return
331 return
332 except:
332 except:
333 log.error(traceback.format_exc())
333 log.error(traceback.format_exc())
334 raise
334 raise
335
335
336 try:
336 try:
337 f = UserFollowing()
337 f = UserFollowing()
338 f.user_id = user_id
338 f.user_id = user_id
339 f.follows_user_id = follow_user_id
339 f.follows_user_id = follow_user_id
340 self.sa.add(f)
340 self.sa.add(f)
341 except:
341 except:
342 log.error(traceback.format_exc())
342 log.error(traceback.format_exc())
343 raise
343 raise
344
344
345 def is_following_repo(self, repo_name, user_id, cache=False):
345 def is_following_repo(self, repo_name, user_id, cache=False):
346 r = self.sa.query(Repository)\
346 r = self.sa.query(Repository)\
347 .filter(Repository.repo_name == repo_name).scalar()
347 .filter(Repository.repo_name == repo_name).scalar()
348
348
349 f = self.sa.query(UserFollowing)\
349 f = self.sa.query(UserFollowing)\
350 .filter(UserFollowing.follows_repository == r)\
350 .filter(UserFollowing.follows_repository == r)\
351 .filter(UserFollowing.user_id == user_id).scalar()
351 .filter(UserFollowing.user_id == user_id).scalar()
352
352
353 return f is not None
353 return f is not None
354
354
355 def is_following_user(self, username, user_id, cache=False):
355 def is_following_user(self, username, user_id, cache=False):
356 u = User.get_by_username(username)
356 u = User.get_by_username(username)
357
357
358 f = self.sa.query(UserFollowing)\
358 f = self.sa.query(UserFollowing)\
359 .filter(UserFollowing.follows_user == u)\
359 .filter(UserFollowing.follows_user == u)\
360 .filter(UserFollowing.user_id == user_id).scalar()
360 .filter(UserFollowing.user_id == user_id).scalar()
361
361
362 return f is not None
362 return f is not None
363
363
364 def get_followers(self, repo):
364 def get_followers(self, repo):
365 repo = self._get_repo(repo)
365 repo = self._get_repo(repo)
366
366
367 return self.sa.query(UserFollowing)\
367 return self.sa.query(UserFollowing)\
368 .filter(UserFollowing.follows_repository == repo).count()
368 .filter(UserFollowing.follows_repository == repo).count()
369
369
370 def get_forks(self, repo):
370 def get_forks(self, repo):
371 repo = self._get_repo(repo)
371 repo = self._get_repo(repo)
372 return self.sa.query(Repository)\
372 return self.sa.query(Repository)\
373 .filter(Repository.fork == repo).count()
373 .filter(Repository.fork == repo).count()
374
374
375 def get_pull_requests(self, repo):
375 def get_pull_requests(self, repo):
376 repo = self._get_repo(repo)
376 repo = self._get_repo(repo)
377 return self.sa.query(PullRequest)\
377 return self.sa.query(PullRequest)\
378 .filter(PullRequest.other_repo == repo).count()
378 .filter(PullRequest.other_repo == repo).count()
379
379
380 def mark_as_fork(self, repo, fork, user):
380 def mark_as_fork(self, repo, fork, user):
381 repo = self.__get_repo(repo)
381 repo = self.__get_repo(repo)
382 fork = self.__get_repo(fork)
382 fork = self.__get_repo(fork)
383 if fork and repo.repo_id == fork.repo_id:
383 if fork and repo.repo_id == fork.repo_id:
384 raise Exception("Cannot set repository as fork of itself")
384 raise Exception("Cannot set repository as fork of itself")
385 repo.fork = fork
385 repo.fork = fork
386 self.sa.add(repo)
386 self.sa.add(repo)
387 return repo
387 return repo
388
388
389 def pull_changes(self, repo, username):
389 def pull_changes(self, repo, username):
390 dbrepo = self.__get_repo(repo)
390 dbrepo = self.__get_repo(repo)
391 clone_uri = dbrepo.clone_uri
391 clone_uri = dbrepo.clone_uri
392 if not clone_uri:
392 if not clone_uri:
393 raise Exception("This repository doesn't have a clone uri")
393 raise Exception("This repository doesn't have a clone uri")
394
394
395 repo = dbrepo.scm_instance
395 repo = dbrepo.scm_instance
396 try:
396 try:
397 extras = {
397 extras = {
398 'ip': '',
398 'ip': '',
399 'username': username,
399 'username': username,
400 'action': 'push_remote',
400 'action': 'push_remote',
401 'repository': dbrepo.repo_name,
401 'repository': dbrepo.repo_name,
402 'scm': repo.alias,
402 'scm': repo.alias,
403 }
403 }
404 Repository.inject_ui(repo, extras=extras)
404 Repository.inject_ui(repo, extras=extras)
405
405
406 if repo.alias == 'git':
406 if repo.alias == 'git':
407 repo.fetch(clone_uri)
407 repo.fetch(clone_uri)
408 else:
408 else:
409 repo.pull(clone_uri)
409 repo.pull(clone_uri)
410 self.mark_for_invalidation(dbrepo.repo_name)
410 self.mark_for_invalidation(dbrepo.repo_name)
411 except:
411 except:
412 log.error(traceback.format_exc())
412 log.error(traceback.format_exc())
413 raise
413 raise
414
414
415 def commit_change(self, repo, repo_name, cs, user, author, message,
415 def commit_change(self, repo, repo_name, cs, user, author, message,
416 content, f_path):
416 content, f_path):
417
417
418 if repo.alias == 'hg':
418 if repo.alias == 'hg':
419 from rhodecode.lib.vcs.backends.hg import \
419 from rhodecode.lib.vcs.backends.hg import \
420 MercurialInMemoryChangeset as IMC
420 MercurialInMemoryChangeset as IMC
421 elif repo.alias == 'git':
421 elif repo.alias == 'git':
422 from rhodecode.lib.vcs.backends.git import \
422 from rhodecode.lib.vcs.backends.git import \
423 GitInMemoryChangeset as IMC
423 GitInMemoryChangeset as IMC
424
424
425 # decoding here will force that we have proper encoded values
425 # decoding here will force that we have proper encoded values
426 # in any other case this will throw exceptions and deny commit
426 # in any other case this will throw exceptions and deny commit
427 content = safe_str(content)
427 content = safe_str(content)
428 path = safe_str(f_path)
428 path = safe_str(f_path)
429 # message and author needs to be unicode
429 # message and author needs to be unicode
430 # proper backend should then translate that into required type
430 # proper backend should then translate that into required type
431 message = safe_unicode(message)
431 message = safe_unicode(message)
432 author = safe_unicode(author)
432 author = safe_unicode(author)
433 m = IMC(repo)
433 m = IMC(repo)
434 m.change(FileNode(path, content))
434 m.change(FileNode(path, content))
435 tip = m.commit(message=message,
435 tip = m.commit(message=message,
436 author=author,
436 author=author,
437 parents=[cs], branch=cs.branch)
437 parents=[cs], branch=cs.branch)
438
438
439 new_cs = tip.short_id
439 action = 'push_local:%s' % tip.raw_id
440 action = 'push_local:%s' % new_cs
441
442 action_logger(user, action, repo_name)
440 action_logger(user, action, repo_name)
443
444 self.mark_for_invalidation(repo_name)
441 self.mark_for_invalidation(repo_name)
445
442
446 def create_node(self, repo, repo_name, cs, user, author, message, content,
443 def create_node(self, repo, repo_name, cs, user, author, message, content,
447 f_path):
444 f_path):
448 if repo.alias == 'hg':
445 if repo.alias == 'hg':
449 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
446 from rhodecode.lib.vcs.backends.hg import MercurialInMemoryChangeset as IMC
450 elif repo.alias == 'git':
447 elif repo.alias == 'git':
451 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
448 from rhodecode.lib.vcs.backends.git import GitInMemoryChangeset as IMC
452 # decoding here will force that we have proper encoded values
449 # decoding here will force that we have proper encoded values
453 # in any other case this will throw exceptions and deny commit
450 # in any other case this will throw exceptions and deny commit
454
451
455 if isinstance(content, (basestring,)):
452 if isinstance(content, (basestring,)):
456 content = safe_str(content)
453 content = safe_str(content)
457 elif isinstance(content, (file, cStringIO.OutputType,)):
454 elif isinstance(content, (file, cStringIO.OutputType,)):
458 content = content.read()
455 content = content.read()
459 else:
456 else:
460 raise Exception('Content is of unrecognized type %s' % (
457 raise Exception('Content is of unrecognized type %s' % (
461 type(content)
458 type(content)
462 ))
459 ))
463
460
464 message = safe_unicode(message)
461 message = safe_unicode(message)
465 author = safe_unicode(author)
462 author = safe_unicode(author)
466 path = safe_str(f_path)
463 path = safe_str(f_path)
467 m = IMC(repo)
464 m = IMC(repo)
468
465
469 if isinstance(cs, EmptyChangeset):
466 if isinstance(cs, EmptyChangeset):
470 # EmptyChangeset means we we're editing empty repository
467 # EmptyChangeset means we we're editing empty repository
471 parents = None
468 parents = None
472 else:
469 else:
473 parents = [cs]
470 parents = [cs]
474
471
475 m.add(FileNode(path, content=content))
472 m.add(FileNode(path, content=content))
476 tip = m.commit(message=message,
473 tip = m.commit(message=message,
477 author=author,
474 author=author,
478 parents=parents, branch=cs.branch)
475 parents=parents, branch=cs.branch)
479 new_cs = tip.short_id
480 action = 'push_local:%s' % new_cs
481
476
477 action = 'push_local:%s' % tip.raw_id
482 action_logger(user, action, repo_name)
478 action_logger(user, action, repo_name)
483
484 self.mark_for_invalidation(repo_name)
479 self.mark_for_invalidation(repo_name)
485
480
486 def get_nodes(self, repo_name, revision, root_path='/', flat=True):
481 def get_nodes(self, repo_name, revision, root_path='/', flat=True):
487 """
482 """
488 recursive walk in root dir and return a set of all path in that dir
483 recursive walk in root dir and return a set of all path in that dir
489 based on repository walk function
484 based on repository walk function
490
485
491 :param repo_name: name of repository
486 :param repo_name: name of repository
492 :param revision: revision for which to list nodes
487 :param revision: revision for which to list nodes
493 :param root_path: root path to list
488 :param root_path: root path to list
494 :param flat: return as a list, if False returns a dict with decription
489 :param flat: return as a list, if False returns a dict with decription
495
490
496 """
491 """
497 _files = list()
492 _files = list()
498 _dirs = list()
493 _dirs = list()
499 try:
494 try:
500 _repo = self.__get_repo(repo_name)
495 _repo = self.__get_repo(repo_name)
501 changeset = _repo.scm_instance.get_changeset(revision)
496 changeset = _repo.scm_instance.get_changeset(revision)
502 root_path = root_path.lstrip('/')
497 root_path = root_path.lstrip('/')
503 for topnode, dirs, files in changeset.walk(root_path):
498 for topnode, dirs, files in changeset.walk(root_path):
504 for f in files:
499 for f in files:
505 _files.append(f.path if flat else {"name": f.path,
500 _files.append(f.path if flat else {"name": f.path,
506 "type": "file"})
501 "type": "file"})
507 for d in dirs:
502 for d in dirs:
508 _dirs.append(d.path if flat else {"name": d.path,
503 _dirs.append(d.path if flat else {"name": d.path,
509 "type": "dir"})
504 "type": "dir"})
510 except RepositoryError:
505 except RepositoryError:
511 log.debug(traceback.format_exc())
506 log.debug(traceback.format_exc())
512 raise
507 raise
513
508
514 return _dirs, _files
509 return _dirs, _files
515
510
516 def get_unread_journal(self):
511 def get_unread_journal(self):
517 return self.sa.query(UserLog).count()
512 return self.sa.query(UserLog).count()
518
513
519 def get_repo_landing_revs(self, repo=None):
514 def get_repo_landing_revs(self, repo=None):
520 """
515 """
521 Generates select option with tags branches and bookmarks (for hg only)
516 Generates select option with tags branches and bookmarks (for hg only)
522 grouped by type
517 grouped by type
523
518
524 :param repo:
519 :param repo:
525 :type repo:
520 :type repo:
526 """
521 """
527
522
528 hist_l = []
523 hist_l = []
529 choices = []
524 choices = []
530 repo = self.__get_repo(repo)
525 repo = self.__get_repo(repo)
531 hist_l.append(['tip', _('latest tip')])
526 hist_l.append(['tip', _('latest tip')])
532 choices.append('tip')
527 choices.append('tip')
533 if not repo:
528 if not repo:
534 return choices, hist_l
529 return choices, hist_l
535
530
536 repo = repo.scm_instance
531 repo = repo.scm_instance
537
532
538 branches_group = ([(k, k) for k, v in
533 branches_group = ([(k, k) for k, v in
539 repo.branches.iteritems()], _("Branches"))
534 repo.branches.iteritems()], _("Branches"))
540 hist_l.append(branches_group)
535 hist_l.append(branches_group)
541 choices.extend([x[0] for x in branches_group[0]])
536 choices.extend([x[0] for x in branches_group[0]])
542
537
543 if repo.alias == 'hg':
538 if repo.alias == 'hg':
544 bookmarks_group = ([(k, k) for k, v in
539 bookmarks_group = ([(k, k) for k, v in
545 repo.bookmarks.iteritems()], _("Bookmarks"))
540 repo.bookmarks.iteritems()], _("Bookmarks"))
546 hist_l.append(bookmarks_group)
541 hist_l.append(bookmarks_group)
547 choices.extend([x[0] for x in bookmarks_group[0]])
542 choices.extend([x[0] for x in bookmarks_group[0]])
548
543
549 tags_group = ([(k, k) for k, v in
544 tags_group = ([(k, k) for k, v in
550 repo.tags.iteritems()], _("Tags"))
545 repo.tags.iteritems()], _("Tags"))
551 hist_l.append(tags_group)
546 hist_l.append(tags_group)
552 choices.extend([x[0] for x in tags_group[0]])
547 choices.extend([x[0] for x in tags_group[0]])
553
548
554 return choices, hist_l
549 return choices, hist_l
555
550
556 def install_git_hook(self, repo, force_create=False):
551 def install_git_hook(self, repo, force_create=False):
557 """
552 """
558 Creates a rhodecode hook inside a git repository
553 Creates a rhodecode hook inside a git repository
559
554
560 :param repo: Instance of VCS repo
555 :param repo: Instance of VCS repo
561 :param force_create: Create even if same name hook exists
556 :param force_create: Create even if same name hook exists
562 """
557 """
563
558
564 loc = jn(repo.path, 'hooks')
559 loc = jn(repo.path, 'hooks')
565 if not repo.bare:
560 if not repo.bare:
566 loc = jn(repo.path, '.git', 'hooks')
561 loc = jn(repo.path, '.git', 'hooks')
567 if not os.path.isdir(loc):
562 if not os.path.isdir(loc):
568 os.makedirs(loc)
563 os.makedirs(loc)
569
564
570 tmpl = pkg_resources.resource_string(
565 tmpl = pkg_resources.resource_string(
571 'rhodecode', jn('config', 'post_receive_tmpl.py')
566 'rhodecode', jn('config', 'post_receive_tmpl.py')
572 )
567 )
573
568
574 _hook_file = jn(loc, 'post-receive')
569 _hook_file = jn(loc, 'post-receive')
575 _rhodecode_hook = False
570 _rhodecode_hook = False
576 log.debug('Installing git hook in repo %s' % repo)
571 log.debug('Installing git hook in repo %s' % repo)
577 if os.path.exists(_hook_file):
572 if os.path.exists(_hook_file):
578 # let's take a look at this hook, maybe it's rhodecode ?
573 # let's take a look at this hook, maybe it's rhodecode ?
579 log.debug('hook exists, checking if it is from rhodecode')
574 log.debug('hook exists, checking if it is from rhodecode')
580 _HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER')
575 _HOOK_VER_PAT = re.compile(r'^RC_HOOK_VER')
581 with open(_hook_file, 'rb') as f:
576 with open(_hook_file, 'rb') as f:
582 data = f.read()
577 data = f.read()
583 matches = re.compile(r'(?:%s)\s*=\s*(.*)'
578 matches = re.compile(r'(?:%s)\s*=\s*(.*)'
584 % 'RC_HOOK_VER').search(data)
579 % 'RC_HOOK_VER').search(data)
585 if matches:
580 if matches:
586 try:
581 try:
587 ver = matches.groups()[0]
582 ver = matches.groups()[0]
588 log.debug('got %s it is rhodecode' % (ver))
583 log.debug('got %s it is rhodecode' % (ver))
589 _rhodecode_hook = True
584 _rhodecode_hook = True
590 except:
585 except:
591 log.error(traceback.format_exc())
586 log.error(traceback.format_exc())
592
587
593 if _rhodecode_hook or force_create:
588 if _rhodecode_hook or force_create:
594 log.debug('writing hook file !')
589 log.debug('writing hook file !')
595 with open(_hook_file, 'wb') as f:
590 with open(_hook_file, 'wb') as f:
596 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
591 tmpl = tmpl.replace('_TMPL_', rhodecode.__version__)
597 f.write(tmpl)
592 f.write(tmpl)
598 os.chmod(_hook_file, 0755)
593 os.chmod(_hook_file, 0755)
599 else:
594 else:
600 log.debug('skipping writing hook file') No newline at end of file
595 log.debug('skipping writing hook file')
General Comments 0
You need to be logged in to leave comments. Login now