##// END OF EJS Templates
git: fixed readme searcher for libgit2
marcink -
r4345:dc3889b3 default
parent child Browse files
Show More
@@ -1,493 +1,494 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT commit module
23 23 """
24 24
25 25 import re
26 26 import stat
27 27 from itertools import chain
28 28 from StringIO import StringIO
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 33 from rhodecode.lib.utils import safe_unicode, safe_str
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib.vcs.conf import settings
36 36 from rhodecode.lib.vcs.backends import base
37 37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 38 from rhodecode.lib.vcs.nodes import (
39 39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 41 RemovedFileNodesGenerator, LargeFileNode)
42 42 from rhodecode.lib.vcs.compat import configparser
43 43
44 44
45 45 class GitCommit(base.BaseCommit):
46 46 """
47 47 Represents state of the repository at single commit id.
48 48 """
49 49
50 50 _filter_pre_load = [
51 51 # done through a more complex tree walk on parents
52 52 "affected_files",
53 53 # done through subprocess not remote call
54 54 "children",
55 55 # done through a more complex tree walk on parents
56 56 "status",
57 57 # mercurial specific property not supported here
58 58 "_file_paths",
59 59 # mercurial specific property not supported here
60 60 'obsolete',
61 61 # mercurial specific property not supported here
62 62 'phase',
63 63 # mercurial specific property not supported here
64 64 'hidden'
65 65 ]
66 66
67 67 def __init__(self, repository, raw_id, idx, pre_load=None):
68 68 self.repository = repository
69 69 self._remote = repository._remote
70 70 # TODO: johbo: Tweak of raw_id should not be necessary
71 71 self.raw_id = safe_str(raw_id)
72 72 self.idx = idx
73 73
74 74 self._set_bulk_properties(pre_load)
75 75
76 76 # caches
77 77 self._stat_modes = {} # stat info for paths
78 78 self._paths = {} # path processed with parse_tree
79 79 self.nodes = {}
80 80 self._submodules = None
81 81
82 82 def _set_bulk_properties(self, pre_load):
83 83
84 84 if not pre_load:
85 85 return
86 86 pre_load = [entry for entry in pre_load
87 87 if entry not in self._filter_pre_load]
88 88 if not pre_load:
89 89 return
90 90
91 91 result = self._remote.bulk_request(self.raw_id, pre_load)
92 92 for attr, value in result.items():
93 93 if attr in ["author", "message"]:
94 94 if value:
95 95 value = safe_unicode(value)
96 96 elif attr == "date":
97 97 value = utcdate_fromtimestamp(*value)
98 98 elif attr == "parents":
99 99 value = self._make_commits(value)
100 100 elif attr == "branch":
101 101 value = value[0] if value else None
102 102 self.__dict__[attr] = value
103 103
104 104 @LazyProperty
105 105 def _commit(self):
106 106 return self._remote[self.raw_id]
107 107
108 108 @LazyProperty
109 109 def _tree_id(self):
110 110 return self._remote[self._commit['tree']]['id']
111 111
112 112 @LazyProperty
113 113 def id(self):
114 114 return self.raw_id
115 115
116 116 @LazyProperty
117 117 def short_id(self):
118 118 return self.raw_id[:12]
119 119
120 120 @LazyProperty
121 121 def message(self):
122 122 return safe_unicode(self._remote.message(self.id))
123 123
124 124 @LazyProperty
125 125 def committer(self):
126 126 return safe_unicode(self._remote.author(self.id))
127 127
128 128 @LazyProperty
129 129 def author(self):
130 130 return safe_unicode(self._remote.author(self.id))
131 131
132 132 @LazyProperty
133 133 def date(self):
134 134 unix_ts, tz = self._remote.date(self.raw_id)
135 135 return utcdate_fromtimestamp(unix_ts, tz)
136 136
137 137 @LazyProperty
138 138 def status(self):
139 139 """
140 140 Returns modified, added, removed, deleted files for current commit
141 141 """
142 142 return self.changed, self.added, self.removed
143 143
144 144 @LazyProperty
145 145 def tags(self):
146 146 tags = [safe_unicode(name) for name,
147 147 commit_id in self.repository.tags.iteritems()
148 148 if commit_id == self.raw_id]
149 149 return tags
150 150
151 151 @LazyProperty
152 152 def commit_branches(self):
153 153 branches = []
154 154 for name, commit_id in self.repository.branches.iteritems():
155 155 if commit_id == self.raw_id:
156 156 branches.append(name)
157 157 return branches
158 158
159 159 @LazyProperty
160 160 def branch(self):
161 161 branches = self._remote.branch(self.raw_id)
162 162
163 163 if branches:
164 164 # actually commit can have multiple branches in git
165 165 return safe_unicode(branches[0])
166 166
167 167 def _get_tree_id_for_path(self, path):
168 168 path = safe_str(path)
169 169 if path in self._paths:
170 170 return self._paths[path]
171 171
172 172 tree_id = self._tree_id
173 173
174 174 path = path.strip('/')
175 175 if path == '':
176 176 data = [tree_id, "tree"]
177 177 self._paths[''] = data
178 178 return data
179 179
180 180 tree_id, tree_type, tree_mode = \
181 181 self._remote.tree_and_type_for_path(self.raw_id, path)
182 182 if tree_id is None:
183 183 raise self.no_node_at_path(path)
184 184
185 185 self._paths[path] = [tree_id, tree_type]
186 186 self._stat_modes[path] = tree_mode
187 187
188 188 if path not in self._paths:
189 189 raise self.no_node_at_path(path)
190 190
191 191 return self._paths[path]
192 192
193 193 def _get_kind(self, path):
194 194 tree_id, type_ = self._get_tree_id_for_path(path)
195 195 if type_ == 'blob':
196 196 return NodeKind.FILE
197 197 elif type_ == 'tree':
198 198 return NodeKind.DIR
199 199 elif type_ == 'link':
200 200 return NodeKind.SUBMODULE
201 201 return None
202 202
203 203 def _get_filectx(self, path):
204 204 path = self._fix_path(path)
205 205 if self._get_kind(path) != NodeKind.FILE:
206 206 raise CommitError(
207 207 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
208 208 return path
209 209
210 210 def _get_file_nodes(self):
211 211 return chain(*(t[2] for t in self.walk()))
212 212
213 213 @LazyProperty
214 214 def parents(self):
215 215 """
216 216 Returns list of parent commits.
217 217 """
218 218 parent_ids = self._remote.parents(self.id)
219 219 return self._make_commits(parent_ids)
220 220
221 221 @LazyProperty
222 222 def children(self):
223 223 """
224 224 Returns list of child commits.
225 225 """
226 226
227 227 children = self._remote.children(self.raw_id)
228 228 return self._make_commits(children)
229 229
230 230 def _make_commits(self, commit_ids):
231 231 def commit_maker(_commit_id):
232 232 return self.repository.get_commit(commit_id=commit_id)
233 233
234 234 return [commit_maker(commit_id) for commit_id in commit_ids]
235 235
236 236 def get_file_mode(self, path):
237 237 """
238 238 Returns stat mode of the file at the given `path`.
239 239 """
240 240 path = safe_str(path)
241 241 # ensure path is traversed
242 242 self._get_tree_id_for_path(path)
243 243 return self._stat_modes[path]
244 244
245 245 def is_link(self, path):
246 246 return stat.S_ISLNK(self.get_file_mode(path))
247 247
248 248 def is_node_binary(self, path):
249 249 tree_id, _ = self._get_tree_id_for_path(path)
250 250 return self._remote.is_binary(tree_id)
251 251
252 252 def get_file_content(self, path):
253 253 """
254 254 Returns content of the file at given `path`.
255 255 """
256 256 tree_id, _ = self._get_tree_id_for_path(path)
257 257 return self._remote.blob_as_pretty_string(tree_id)
258 258
259 259 def get_file_content_streamed(self, path):
260 260 tree_id, _ = self._get_tree_id_for_path(path)
261 261 stream_method = getattr(self._remote, 'stream:blob_as_pretty_string')
262 262 return stream_method(tree_id)
263 263
264 264 def get_file_size(self, path):
265 265 """
266 266 Returns size of the file at given `path`.
267 267 """
268 268 tree_id, _ = self._get_tree_id_for_path(path)
269 269 return self._remote.blob_raw_length(tree_id)
270 270
271 271 def get_path_history(self, path, limit=None, pre_load=None):
272 272 """
273 273 Returns history of file as reversed list of `GitCommit` objects for
274 274 which file at given `path` has been modified.
275 275 """
276 276
277 277 path = self._get_filectx(path)
278 278 hist = self._remote.node_history(self.raw_id, path, limit)
279 279 return [
280 280 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
281 281 for commit_id in hist]
282 282
283 283 def get_file_annotate(self, path, pre_load=None):
284 284 """
285 285 Returns a generator of four element tuples with
286 286 lineno, commit_id, commit lazy loader and line
287 287 """
288 288
289 289 result = self._remote.node_annotate(self.raw_id, path)
290 290
291 291 for ln_no, commit_id, content in result:
292 292 yield (
293 293 ln_no, commit_id,
294 294 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
295 295 content)
296 296
297 297 def get_nodes(self, path):
298 298
299 299 if self._get_kind(path) != NodeKind.DIR:
300 300 raise CommitError(
301 301 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
302 302 path = self._fix_path(path)
303 303
304 304 tree_id, _ = self._get_tree_id_for_path(path)
305 305
306 306 dirnodes = []
307 307 filenodes = []
308 308
309 309 # extracted tree ID gives us our files...
310 bytes_path = safe_str(path) # libgit operates on bytes
310 311 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
311 312 if type_ == 'link':
312 url = self._get_submodule_url('/'.join((path, name)))
313 url = self._get_submodule_url('/'.join((bytes_path, name)))
313 314 dirnodes.append(SubModuleNode(
314 315 name, url=url, commit=id_, alias=self.repository.alias))
315 316 continue
316 317
317 if path != '':
318 obj_path = '/'.join((path, name))
318 if bytes_path != '':
319 obj_path = '/'.join((bytes_path, name))
319 320 else:
320 321 obj_path = name
321 322 if obj_path not in self._stat_modes:
322 323 self._stat_modes[obj_path] = stat_
323 324
324 325 if type_ == 'tree':
325 326 dirnodes.append(DirNode(obj_path, commit=self))
326 327 elif type_ == 'blob':
327 328 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
328 329 else:
329 330 raise CommitError(
330 331 "Requested object should be Tree or Blob, is %s", type_)
331 332
332 333 nodes = dirnodes + filenodes
333 334 for node in nodes:
334 335 if node.path not in self.nodes:
335 336 self.nodes[node.path] = node
336 337 nodes.sort()
337 338 return nodes
338 339
339 340 def get_node(self, path, pre_load=None):
340 341 if isinstance(path, unicode):
341 342 path = path.encode('utf-8')
342 343 path = self._fix_path(path)
343 344 if path not in self.nodes:
344 345 try:
345 346 tree_id, type_ = self._get_tree_id_for_path(path)
346 347 except CommitError:
347 348 raise NodeDoesNotExistError(
348 349 "Cannot find one of parents' directories for a given "
349 350 "path: %s" % path)
350 351
351 352 if type_ in ['link', 'commit']:
352 353 url = self._get_submodule_url(path)
353 354 node = SubModuleNode(path, url=url, commit=tree_id,
354 355 alias=self.repository.alias)
355 356 elif type_ == 'tree':
356 357 if path == '':
357 358 node = RootNode(commit=self)
358 359 else:
359 360 node = DirNode(path, commit=self)
360 361 elif type_ == 'blob':
361 362 node = FileNode(path, commit=self, pre_load=pre_load)
362 363 self._stat_modes[path] = node.mode
363 364 else:
364 365 raise self.no_node_at_path(path)
365 366
366 367 # cache node
367 368 self.nodes[path] = node
368 369
369 370 return self.nodes[path]
370 371
371 372 def get_largefile_node(self, path):
372 373 tree_id, _ = self._get_tree_id_for_path(path)
373 374 pointer_spec = self._remote.is_large_file(tree_id)
374 375
375 376 if pointer_spec:
376 377 # content of that file regular FileNode is the hash of largefile
377 378 file_id = pointer_spec.get('oid_hash')
378 379 if self._remote.in_largefiles_store(file_id):
379 380 lf_path = self._remote.store_path(file_id)
380 381 return LargeFileNode(lf_path, commit=self, org_path=path)
381 382
382 383 @LazyProperty
383 384 def affected_files(self):
384 385 """
385 386 Gets a fast accessible file changes for given commit
386 387 """
387 388 added, modified, deleted = self._changes_cache
388 389 return list(added.union(modified).union(deleted))
389 390
390 391 @LazyProperty
391 392 def _changes_cache(self):
392 393 added = set()
393 394 modified = set()
394 395 deleted = set()
395 396 _r = self._remote
396 397
397 398 parents = self.parents
398 399 if not self.parents:
399 400 parents = [base.EmptyCommit()]
400 401 for parent in parents:
401 402 if isinstance(parent, base.EmptyCommit):
402 403 oid = None
403 404 else:
404 405 oid = parent.raw_id
405 406 changes = _r.tree_changes(oid, self.raw_id)
406 407 for (oldpath, newpath), (_, _), (_, _) in changes:
407 408 if newpath and oldpath:
408 409 modified.add(newpath)
409 410 elif newpath and not oldpath:
410 411 added.add(newpath)
411 412 elif not newpath and oldpath:
412 413 deleted.add(oldpath)
413 414 return added, modified, deleted
414 415
415 416 def _get_paths_for_status(self, status):
416 417 """
417 418 Returns sorted list of paths for given ``status``.
418 419
419 420 :param status: one of: *added*, *modified* or *deleted*
420 421 """
421 422 added, modified, deleted = self._changes_cache
422 423 return sorted({
423 424 'added': list(added),
424 425 'modified': list(modified),
425 426 'deleted': list(deleted)}[status]
426 427 )
427 428
428 429 @LazyProperty
429 430 def added(self):
430 431 """
431 432 Returns list of added ``FileNode`` objects.
432 433 """
433 434 if not self.parents:
434 435 return list(self._get_file_nodes())
435 436 return AddedFileNodesGenerator(self.added_paths, self)
436 437
437 438 @LazyProperty
438 439 def added_paths(self):
439 440 return [n for n in self._get_paths_for_status('added')]
440 441
441 442 @LazyProperty
442 443 def changed(self):
443 444 """
444 445 Returns list of modified ``FileNode`` objects.
445 446 """
446 447 if not self.parents:
447 448 return []
448 449 return ChangedFileNodesGenerator(self.changed_paths, self)
449 450
450 451 @LazyProperty
451 452 def changed_paths(self):
452 453 return [n for n in self._get_paths_for_status('modified')]
453 454
454 455 @LazyProperty
455 456 def removed(self):
456 457 """
457 458 Returns list of removed ``FileNode`` objects.
458 459 """
459 460 if not self.parents:
460 461 return []
461 462 return RemovedFileNodesGenerator(self.removed_paths, self)
462 463
463 464 @LazyProperty
464 465 def removed_paths(self):
465 466 return [n for n in self._get_paths_for_status('deleted')]
466 467
467 468 def _get_submodule_url(self, submodule_path):
468 469 git_modules_path = '.gitmodules'
469 470
470 471 if self._submodules is None:
471 472 self._submodules = {}
472 473
473 474 try:
474 475 submodules_node = self.get_node(git_modules_path)
475 476 except NodeDoesNotExistError:
476 477 return None
477 478
478 479 # ConfigParser fails if there are whitespaces, also it needs an iterable
479 480 # file like content
480 481 def iter_content(_content):
481 482 for line in _content.splitlines():
482 483 yield line
483 484
484 485 parser = configparser.RawConfigParser()
485 486 parser.read_file(iter_content(submodules_node.content))
486 487
487 488 for section in parser.sections():
488 489 path = parser.get(section, 'path')
489 490 url = parser.get(section, 'url')
490 491 if path and url:
491 492 self._submodules[path.strip('/')] = url
492 493
493 494 return self._submodules.get(submodule_path.strip('/'))
@@ -1,1172 +1,1172 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import os
22 22 import re
23 23 import shutil
24 24 import time
25 25 import logging
26 26 import traceback
27 27 import datetime
28 28
29 29 from pyramid.threadlocal import get_current_request
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode import events
33 33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 34 from rhodecode.lib.caching_query import FromCache
35 35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 36 from rhodecode.lib.hooks_base import log_delete_repository
37 37 from rhodecode.lib.user_log_filter import user_log_filter
38 38 from rhodecode.lib.utils import make_db_config
39 39 from rhodecode.lib.utils2 import (
40 40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 41 get_current_rhodecode_user, safe_int, action_logger_generic)
42 42 from rhodecode.lib.vcs.backends import get_backend
43 43 from rhodecode.model import BaseModel
44 44 from rhodecode.model.db import (
45 45 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 49 from rhodecode.model.settings import VcsSettingsModel
50 50
51 51 log = logging.getLogger(__name__)
52 52
53 53
54 54 class RepoModel(BaseModel):
55 55
56 56 cls = Repository
57 57
58 58 def _get_user_group(self, users_group):
59 59 return self._get_instance(UserGroup, users_group,
60 60 callback=UserGroup.get_by_group_name)
61 61
62 62 def _get_repo_group(self, repo_group):
63 63 return self._get_instance(RepoGroup, repo_group,
64 64 callback=RepoGroup.get_by_group_name)
65 65
66 66 def _create_default_perms(self, repository, private):
67 67 # create default permission
68 68 default = 'repository.read'
69 69 def_user = User.get_default_user()
70 70 for p in def_user.user_perms:
71 71 if p.permission.permission_name.startswith('repository.'):
72 72 default = p.permission.permission_name
73 73 break
74 74
75 75 default_perm = 'repository.none' if private else default
76 76
77 77 repo_to_perm = UserRepoToPerm()
78 78 repo_to_perm.permission = Permission.get_by_key(default_perm)
79 79
80 80 repo_to_perm.repository = repository
81 81 repo_to_perm.user_id = def_user.user_id
82 82
83 83 return repo_to_perm
84 84
85 85 @LazyProperty
86 86 def repos_path(self):
87 87 """
88 88 Gets the repositories root path from database
89 89 """
90 90 settings_model = VcsSettingsModel(sa=self.sa)
91 91 return settings_model.get_repos_location()
92 92
93 93 def get(self, repo_id):
94 94 repo = self.sa.query(Repository) \
95 95 .filter(Repository.repo_id == repo_id)
96 96
97 97 return repo.scalar()
98 98
99 99 def get_repo(self, repository):
100 100 return self._get_repo(repository)
101 101
102 102 def get_by_repo_name(self, repo_name, cache=False):
103 103 repo = self.sa.query(Repository) \
104 104 .filter(Repository.repo_name == repo_name)
105 105
106 106 if cache:
107 107 name_key = _hash_key(repo_name)
108 108 repo = repo.options(
109 109 FromCache("sql_cache_short", "get_repo_%s" % name_key))
110 110 return repo.scalar()
111 111
112 112 def _extract_id_from_repo_name(self, repo_name):
113 113 if repo_name.startswith('/'):
114 114 repo_name = repo_name.lstrip('/')
115 115 by_id_match = re.match(r'^_(\d{1,})', repo_name)
116 116 if by_id_match:
117 117 return by_id_match.groups()[0]
118 118
119 119 def get_repo_by_id(self, repo_name):
120 120 """
121 121 Extracts repo_name by id from special urls.
122 122 Example url is _11/repo_name
123 123
124 124 :param repo_name:
125 125 :return: repo object if matched else None
126 126 """
127 127
128 128 try:
129 129 _repo_id = self._extract_id_from_repo_name(repo_name)
130 130 if _repo_id:
131 131 return self.get(_repo_id)
132 132 except Exception:
133 133 log.exception('Failed to extract repo_name from URL')
134 134
135 135 return None
136 136
137 137 def get_repos_for_root(self, root, traverse=False):
138 138 if traverse:
139 139 like_expression = u'{}%'.format(safe_unicode(root))
140 140 repos = Repository.query().filter(
141 141 Repository.repo_name.like(like_expression)).all()
142 142 else:
143 143 if root and not isinstance(root, RepoGroup):
144 144 raise ValueError(
145 145 'Root must be an instance '
146 146 'of RepoGroup, got:{} instead'.format(type(root)))
147 147 repos = Repository.query().filter(Repository.group == root).all()
148 148 return repos
149 149
150 150 def get_url(self, repo, request=None, permalink=False):
151 151 if not request:
152 152 request = get_current_request()
153 153
154 154 if not request:
155 155 return
156 156
157 157 if permalink:
158 158 return request.route_url(
159 159 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
160 160 else:
161 161 return request.route_url(
162 162 'repo_summary', repo_name=safe_str(repo.repo_name))
163 163
164 164 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
165 165 if not request:
166 166 request = get_current_request()
167 167
168 168 if not request:
169 169 return
170 170
171 171 if permalink:
172 172 return request.route_url(
173 173 'repo_commit', repo_name=safe_str(repo.repo_id),
174 174 commit_id=commit_id)
175 175
176 176 else:
177 177 return request.route_url(
178 178 'repo_commit', repo_name=safe_str(repo.repo_name),
179 179 commit_id=commit_id)
180 180
181 181 def get_repo_log(self, repo, filter_term):
182 182 repo_log = UserLog.query()\
183 183 .filter(or_(UserLog.repository_id == repo.repo_id,
184 184 UserLog.repository_name == repo.repo_name))\
185 185 .options(joinedload(UserLog.user))\
186 186 .options(joinedload(UserLog.repository))\
187 187 .order_by(UserLog.action_date.desc())
188 188
189 189 repo_log = user_log_filter(repo_log, filter_term)
190 190 return repo_log
191 191
192 192 @classmethod
193 193 def update_commit_cache(cls, repositories=None):
194 194 if not repositories:
195 195 repositories = Repository.getAll()
196 196 for repo in repositories:
197 197 repo.update_commit_cache()
198 198
199 199 def get_repos_as_dict(self, repo_list=None, admin=False,
200 200 super_user_actions=False, short_name=None):
201 201
202 202 _render = get_current_request().get_partial_renderer(
203 203 'rhodecode:templates/data_table/_dt_elements.mako')
204 204 c = _render.get_call_context()
205 205 h = _render.get_helpers()
206 206
207 207 def quick_menu(repo_name):
208 208 return _render('quick_menu', repo_name)
209 209
210 210 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
211 211 if short_name is not None:
212 212 short_name_var = short_name
213 213 else:
214 214 short_name_var = not admin
215 215 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
216 216 short_name=short_name_var, admin=False)
217 217
218 218 def last_change(last_change):
219 219 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
220 220 ts = time.time()
221 221 utc_offset = (datetime.datetime.fromtimestamp(ts)
222 222 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
223 223 last_change = last_change + datetime.timedelta(seconds=utc_offset)
224 224
225 225 return _render("last_change", last_change)
226 226
227 227 def rss_lnk(repo_name):
228 228 return _render("rss", repo_name)
229 229
230 230 def atom_lnk(repo_name):
231 231 return _render("atom", repo_name)
232 232
233 233 def last_rev(repo_name, cs_cache):
234 234 return _render('revision', repo_name, cs_cache.get('revision'),
235 235 cs_cache.get('raw_id'), cs_cache.get('author'),
236 236 cs_cache.get('message'), cs_cache.get('date'))
237 237
238 238 def desc(desc):
239 239 return _render('repo_desc', desc, c.visual.stylify_metatags)
240 240
241 241 def state(repo_state):
242 242 return _render("repo_state", repo_state)
243 243
244 244 def repo_actions(repo_name):
245 245 return _render('repo_actions', repo_name, super_user_actions)
246 246
247 247 def user_profile(username):
248 248 return _render('user_profile', username)
249 249
250 250 repos_data = []
251 251 for repo in repo_list:
252 252 # NOTE(marcink): because we use only raw column we need to load it like that
253 253 changeset_cache = Repository._load_changeset_cache(
254 254 repo.repo_id, repo._changeset_cache)
255 255
256 256 row = {
257 257 "menu": quick_menu(repo.repo_name),
258 258
259 259 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
260 260 repo.private, repo.archived, repo.fork),
261 261
262 262 "desc": desc(h.escape(repo.description)),
263 263
264 264 "last_change": last_change(repo.updated_on),
265 265
266 266 "last_changeset": last_rev(repo.repo_name, changeset_cache),
267 267 "last_changeset_raw": changeset_cache.get('revision'),
268 268
269 269 "owner": user_profile(repo.User.username),
270 270
271 271 "state": state(repo.repo_state),
272 272 "rss": rss_lnk(repo.repo_name),
273 273 "atom": atom_lnk(repo.repo_name),
274 274 }
275 275 if admin:
276 276 row.update({
277 277 "action": repo_actions(repo.repo_name),
278 278 })
279 279 repos_data.append(row)
280 280
281 281 return repos_data
282 282
283 283 def get_repos_data_table(
284 284 self, draw, start, limit,
285 285 search_q, order_by, order_dir,
286 286 auth_user, repo_group_id):
287 287 from rhodecode.model.scm import RepoList
288 288
289 289 _perms = ['repository.read', 'repository.write', 'repository.admin']
290 290
291 291 repos = Repository.query() \
292 292 .filter(Repository.group_id == repo_group_id) \
293 293 .all()
294 294 auth_repo_list = RepoList(
295 295 repos, perm_set=_perms,
296 296 extra_kwargs=dict(user=auth_user))
297 297
298 298 allowed_ids = [-1]
299 299 for repo in auth_repo_list:
300 300 allowed_ids.append(repo.repo_id)
301 301
302 302 repos_data_total_count = Repository.query() \
303 303 .filter(Repository.group_id == repo_group_id) \
304 304 .filter(or_(
305 305 # generate multiple IN to fix limitation problems
306 306 *in_filter_generator(Repository.repo_id, allowed_ids))
307 307 ) \
308 308 .count()
309 309
310 310 base_q = Session.query(
311 311 Repository.repo_id,
312 312 Repository.repo_name,
313 313 Repository.description,
314 314 Repository.repo_type,
315 315 Repository.repo_state,
316 316 Repository.private,
317 317 Repository.archived,
318 318 Repository.fork,
319 319 Repository.updated_on,
320 320 Repository._changeset_cache,
321 321 User,
322 322 ) \
323 323 .filter(Repository.group_id == repo_group_id) \
324 324 .filter(or_(
325 325 # generate multiple IN to fix limitation problems
326 326 *in_filter_generator(Repository.repo_id, allowed_ids))
327 327 ) \
328 328 .join(User, User.user_id == Repository.user_id) \
329 329 .group_by(Repository, User)
330 330
331 331 repos_data_total_filtered_count = base_q.count()
332 332
333 333 sort_defined = False
334 334 if order_by == 'repo_name':
335 335 sort_col = func.lower(Repository.repo_name)
336 336 sort_defined = True
337 337 elif order_by == 'user_username':
338 338 sort_col = User.username
339 339 else:
340 340 sort_col = getattr(Repository, order_by, None)
341 341
342 342 if sort_defined or sort_col:
343 343 if order_dir == 'asc':
344 344 sort_col = sort_col.asc()
345 345 else:
346 346 sort_col = sort_col.desc()
347 347
348 348 base_q = base_q.order_by(sort_col)
349 349 base_q = base_q.offset(start).limit(limit)
350 350
351 351 repos_list = base_q.all()
352 352
353 353 repos_data = RepoModel().get_repos_as_dict(
354 354 repo_list=repos_list, admin=False)
355 355
356 356 data = ({
357 357 'draw': draw,
358 358 'data': repos_data,
359 359 'recordsTotal': repos_data_total_count,
360 360 'recordsFiltered': repos_data_total_filtered_count,
361 361 })
362 362 return data
363 363
364 364 def _get_defaults(self, repo_name):
365 365 """
366 366 Gets information about repository, and returns a dict for
367 367 usage in forms
368 368
369 369 :param repo_name:
370 370 """
371 371
372 372 repo_info = Repository.get_by_repo_name(repo_name)
373 373
374 374 if repo_info is None:
375 375 return None
376 376
377 377 defaults = repo_info.get_dict()
378 378 defaults['repo_name'] = repo_info.just_name
379 379
380 380 groups = repo_info.groups_with_parents
381 381 parent_group = groups[-1] if groups else None
382 382
383 383 # we use -1 as this is how in HTML, we mark an empty group
384 384 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
385 385
386 386 keys_to_process = (
387 387 {'k': 'repo_type', 'strip': False},
388 388 {'k': 'repo_enable_downloads', 'strip': True},
389 389 {'k': 'repo_description', 'strip': True},
390 390 {'k': 'repo_enable_locking', 'strip': True},
391 391 {'k': 'repo_landing_rev', 'strip': True},
392 392 {'k': 'clone_uri', 'strip': False},
393 393 {'k': 'push_uri', 'strip': False},
394 394 {'k': 'repo_private', 'strip': True},
395 395 {'k': 'repo_enable_statistics', 'strip': True}
396 396 )
397 397
398 398 for item in keys_to_process:
399 399 attr = item['k']
400 400 if item['strip']:
401 401 attr = remove_prefix(item['k'], 'repo_')
402 402
403 403 val = defaults[attr]
404 404 if item['k'] == 'repo_landing_rev':
405 405 val = ':'.join(defaults[attr])
406 406 defaults[item['k']] = val
407 407 if item['k'] == 'clone_uri':
408 408 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
409 409 if item['k'] == 'push_uri':
410 410 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
411 411
412 412 # fill owner
413 413 if repo_info.user:
414 414 defaults.update({'user': repo_info.user.username})
415 415 else:
416 416 replacement_user = User.get_first_super_admin().username
417 417 defaults.update({'user': replacement_user})
418 418
419 419 return defaults
420 420
421 421 def update(self, repo, **kwargs):
422 422 try:
423 423 cur_repo = self._get_repo(repo)
424 424 source_repo_name = cur_repo.repo_name
425 425 if 'user' in kwargs:
426 426 cur_repo.user = User.get_by_username(kwargs['user'])
427 427
428 428 if 'repo_group' in kwargs:
429 429 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
430 430 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
431 431
432 432 update_keys = [
433 433 (1, 'repo_description'),
434 434 (1, 'repo_landing_rev'),
435 435 (1, 'repo_private'),
436 436 (1, 'repo_enable_downloads'),
437 437 (1, 'repo_enable_locking'),
438 438 (1, 'repo_enable_statistics'),
439 439 (0, 'clone_uri'),
440 440 (0, 'push_uri'),
441 441 (0, 'fork_id')
442 442 ]
443 443 for strip, k in update_keys:
444 444 if k in kwargs:
445 445 val = kwargs[k]
446 446 if strip:
447 447 k = remove_prefix(k, 'repo_')
448 448
449 449 setattr(cur_repo, k, val)
450 450
451 451 new_name = cur_repo.get_new_name(kwargs['repo_name'])
452 452 cur_repo.repo_name = new_name
453 453
454 454 # if private flag is set, reset default permission to NONE
455 455 if kwargs.get('repo_private'):
456 456 EMPTY_PERM = 'repository.none'
457 457 RepoModel().grant_user_permission(
458 458 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
459 459 )
460 460
461 461 # handle extra fields
462 462 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
463 463 k = RepositoryField.un_prefix_key(field)
464 464 ex_field = RepositoryField.get_by_key_name(
465 465 key=k, repo=cur_repo)
466 466 if ex_field:
467 467 ex_field.field_value = kwargs[field]
468 468 self.sa.add(ex_field)
469 469
470 470 self.sa.add(cur_repo)
471 471
472 472 if source_repo_name != new_name:
473 473 # rename repository
474 474 self._rename_filesystem_repo(
475 475 old=source_repo_name, new=new_name)
476 476
477 477 return cur_repo
478 478 except Exception:
479 479 log.error(traceback.format_exc())
480 480 raise
481 481
482 482 def _create_repo(self, repo_name, repo_type, description, owner,
483 483 private=False, clone_uri=None, repo_group=None,
484 484 landing_rev='rev:tip', fork_of=None,
485 485 copy_fork_permissions=False, enable_statistics=False,
486 486 enable_locking=False, enable_downloads=False,
487 487 copy_group_permissions=False,
488 488 state=Repository.STATE_PENDING):
489 489 """
490 490 Create repository inside database with PENDING state, this should be
491 491 only executed by create() repo. With exception of importing existing
492 492 repos
493 493 """
494 494 from rhodecode.model.scm import ScmModel
495 495
496 496 owner = self._get_user(owner)
497 497 fork_of = self._get_repo(fork_of)
498 498 repo_group = self._get_repo_group(safe_int(repo_group))
499 499
500 500 try:
501 501 repo_name = safe_unicode(repo_name)
502 502 description = safe_unicode(description)
503 503 # repo name is just a name of repository
504 504 # while repo_name_full is a full qualified name that is combined
505 505 # with name and path of group
506 506 repo_name_full = repo_name
507 507 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
508 508
509 509 new_repo = Repository()
510 510 new_repo.repo_state = state
511 511 new_repo.enable_statistics = False
512 512 new_repo.repo_name = repo_name_full
513 513 new_repo.repo_type = repo_type
514 514 new_repo.user = owner
515 515 new_repo.group = repo_group
516 516 new_repo.description = description or repo_name
517 517 new_repo.private = private
518 518 new_repo.archived = False
519 519 new_repo.clone_uri = clone_uri
520 520 new_repo.landing_rev = landing_rev
521 521
522 522 new_repo.enable_statistics = enable_statistics
523 523 new_repo.enable_locking = enable_locking
524 524 new_repo.enable_downloads = enable_downloads
525 525
526 526 if repo_group:
527 527 new_repo.enable_locking = repo_group.enable_locking
528 528
529 529 if fork_of:
530 530 parent_repo = fork_of
531 531 new_repo.fork = parent_repo
532 532
533 533 events.trigger(events.RepoPreCreateEvent(new_repo))
534 534
535 535 self.sa.add(new_repo)
536 536
537 537 EMPTY_PERM = 'repository.none'
538 538 if fork_of and copy_fork_permissions:
539 539 repo = fork_of
540 540 user_perms = UserRepoToPerm.query() \
541 541 .filter(UserRepoToPerm.repository == repo).all()
542 542 group_perms = UserGroupRepoToPerm.query() \
543 543 .filter(UserGroupRepoToPerm.repository == repo).all()
544 544
545 545 for perm in user_perms:
546 546 UserRepoToPerm.create(
547 547 perm.user, new_repo, perm.permission)
548 548
549 549 for perm in group_perms:
550 550 UserGroupRepoToPerm.create(
551 551 perm.users_group, new_repo, perm.permission)
552 552 # in case we copy permissions and also set this repo to private
553 553 # override the default user permission to make it a private repo
554 554 if private:
555 555 RepoModel(self.sa).grant_user_permission(
556 556 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
557 557
558 558 elif repo_group and copy_group_permissions:
559 559 user_perms = UserRepoGroupToPerm.query() \
560 560 .filter(UserRepoGroupToPerm.group == repo_group).all()
561 561
562 562 group_perms = UserGroupRepoGroupToPerm.query() \
563 563 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
564 564
565 565 for perm in user_perms:
566 566 perm_name = perm.permission.permission_name.replace(
567 567 'group.', 'repository.')
568 568 perm_obj = Permission.get_by_key(perm_name)
569 569 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
570 570
571 571 for perm in group_perms:
572 572 perm_name = perm.permission.permission_name.replace(
573 573 'group.', 'repository.')
574 574 perm_obj = Permission.get_by_key(perm_name)
575 575 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
576 576
577 577 if private:
578 578 RepoModel(self.sa).grant_user_permission(
579 579 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
580 580
581 581 else:
582 582 perm_obj = self._create_default_perms(new_repo, private)
583 583 self.sa.add(perm_obj)
584 584
585 585 # now automatically start following this repository as owner
586 586 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
587 587
588 588 # we need to flush here, in order to check if database won't
589 589 # throw any exceptions, create filesystem dirs at the very end
590 590 self.sa.flush()
591 591 events.trigger(events.RepoCreateEvent(new_repo))
592 592 return new_repo
593 593
594 594 except Exception:
595 595 log.error(traceback.format_exc())
596 596 raise
597 597
598 598 def create(self, form_data, cur_user):
599 599 """
600 600 Create repository using celery tasks
601 601
602 602 :param form_data:
603 603 :param cur_user:
604 604 """
605 605 from rhodecode.lib.celerylib import tasks, run_task
606 606 return run_task(tasks.create_repo, form_data, cur_user)
607 607
608 608 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
609 609 perm_deletions=None, check_perms=True,
610 610 cur_user=None):
611 611 if not perm_additions:
612 612 perm_additions = []
613 613 if not perm_updates:
614 614 perm_updates = []
615 615 if not perm_deletions:
616 616 perm_deletions = []
617 617
618 618 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
619 619
620 620 changes = {
621 621 'added': [],
622 622 'updated': [],
623 623 'deleted': [],
624 624 'default_user_changed': None
625 625 }
626 626
627 627 repo = self._get_repo(repo)
628 628
629 629 # update permissions
630 630 for member_id, perm, member_type in perm_updates:
631 631 member_id = int(member_id)
632 632 if member_type == 'user':
633 633 member_name = User.get(member_id).username
634 634 if member_name == User.DEFAULT_USER:
635 635 # NOTE(dan): detect if we changed permissions for default user
636 636 perm_obj = self.sa.query(UserRepoToPerm) \
637 637 .filter(UserRepoToPerm.user_id == member_id) \
638 638 .filter(UserRepoToPerm.repository == repo) \
639 639 .scalar()
640 640 if perm_obj and perm_obj.permission.permission_name != perm:
641 641 changes['default_user_changed'] = True
642 642
643 643 # this updates also current one if found
644 644 self.grant_user_permission(
645 645 repo=repo, user=member_id, perm=perm)
646 646 elif member_type == 'user_group':
647 647 # check if we have permissions to alter this usergroup
648 648 member_name = UserGroup.get(member_id).users_group_name
649 649 if not check_perms or HasUserGroupPermissionAny(
650 650 *req_perms)(member_name, user=cur_user):
651 651 self.grant_user_group_permission(
652 652 repo=repo, group_name=member_id, perm=perm)
653 653 else:
654 654 raise ValueError("member_type must be 'user' or 'user_group' "
655 655 "got {} instead".format(member_type))
656 656 changes['updated'].append({'type': member_type, 'id': member_id,
657 657 'name': member_name, 'new_perm': perm})
658 658
659 659 # set new permissions
660 660 for member_id, perm, member_type in perm_additions:
661 661 member_id = int(member_id)
662 662 if member_type == 'user':
663 663 member_name = User.get(member_id).username
664 664 self.grant_user_permission(
665 665 repo=repo, user=member_id, perm=perm)
666 666 elif member_type == 'user_group':
667 667 # check if we have permissions to alter this usergroup
668 668 member_name = UserGroup.get(member_id).users_group_name
669 669 if not check_perms or HasUserGroupPermissionAny(
670 670 *req_perms)(member_name, user=cur_user):
671 671 self.grant_user_group_permission(
672 672 repo=repo, group_name=member_id, perm=perm)
673 673 else:
674 674 raise ValueError("member_type must be 'user' or 'user_group' "
675 675 "got {} instead".format(member_type))
676 676
677 677 changes['added'].append({'type': member_type, 'id': member_id,
678 678 'name': member_name, 'new_perm': perm})
679 679 # delete permissions
680 680 for member_id, perm, member_type in perm_deletions:
681 681 member_id = int(member_id)
682 682 if member_type == 'user':
683 683 member_name = User.get(member_id).username
684 684 self.revoke_user_permission(repo=repo, user=member_id)
685 685 elif member_type == 'user_group':
686 686 # check if we have permissions to alter this usergroup
687 687 member_name = UserGroup.get(member_id).users_group_name
688 688 if not check_perms or HasUserGroupPermissionAny(
689 689 *req_perms)(member_name, user=cur_user):
690 690 self.revoke_user_group_permission(
691 691 repo=repo, group_name=member_id)
692 692 else:
693 693 raise ValueError("member_type must be 'user' or 'user_group' "
694 694 "got {} instead".format(member_type))
695 695
696 696 changes['deleted'].append({'type': member_type, 'id': member_id,
697 697 'name': member_name, 'new_perm': perm})
698 698 return changes
699 699
700 700 def create_fork(self, form_data, cur_user):
701 701 """
702 702 Simple wrapper into executing celery task for fork creation
703 703
704 704 :param form_data:
705 705 :param cur_user:
706 706 """
707 707 from rhodecode.lib.celerylib import tasks, run_task
708 708 return run_task(tasks.create_repo_fork, form_data, cur_user)
709 709
710 710 def archive(self, repo):
711 711 """
712 712 Archive given repository. Set archive flag.
713 713
714 714 :param repo:
715 715 """
716 716 repo = self._get_repo(repo)
717 717 if repo:
718 718
719 719 try:
720 720 repo.archived = True
721 721 self.sa.add(repo)
722 722 self.sa.commit()
723 723 except Exception:
724 724 log.error(traceback.format_exc())
725 725 raise
726 726
727 727 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
728 728 """
729 729 Delete given repository, forks parameter defines what do do with
730 730 attached forks. Throws AttachedForksError if deleted repo has attached
731 731 forks
732 732
733 733 :param repo:
734 734 :param forks: str 'delete' or 'detach'
735 735 :param pull_requests: str 'delete' or None
736 736 :param fs_remove: remove(archive) repo from filesystem
737 737 """
738 738 if not cur_user:
739 739 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
740 740 repo = self._get_repo(repo)
741 741 if repo:
742 742 if forks == 'detach':
743 743 for r in repo.forks:
744 744 r.fork = None
745 745 self.sa.add(r)
746 746 elif forks == 'delete':
747 747 for r in repo.forks:
748 748 self.delete(r, forks='delete')
749 749 elif [f for f in repo.forks]:
750 750 raise AttachedForksError()
751 751
752 752 # check for pull requests
753 753 pr_sources = repo.pull_requests_source
754 754 pr_targets = repo.pull_requests_target
755 755 if pull_requests != 'delete' and (pr_sources or pr_targets):
756 756 raise AttachedPullRequestsError()
757 757
758 758 old_repo_dict = repo.get_dict()
759 759 events.trigger(events.RepoPreDeleteEvent(repo))
760 760 try:
761 761 self.sa.delete(repo)
762 762 if fs_remove:
763 763 self._delete_filesystem_repo(repo)
764 764 else:
765 765 log.debug('skipping removal from filesystem')
766 766 old_repo_dict.update({
767 767 'deleted_by': cur_user,
768 768 'deleted_on': time.time(),
769 769 })
770 770 log_delete_repository(**old_repo_dict)
771 771 events.trigger(events.RepoDeleteEvent(repo))
772 772 except Exception:
773 773 log.error(traceback.format_exc())
774 774 raise
775 775
776 776 def grant_user_permission(self, repo, user, perm):
777 777 """
778 778 Grant permission for user on given repository, or update existing one
779 779 if found
780 780
781 781 :param repo: Instance of Repository, repository_id, or repository name
782 782 :param user: Instance of User, user_id or username
783 783 :param perm: Instance of Permission, or permission_name
784 784 """
785 785 user = self._get_user(user)
786 786 repo = self._get_repo(repo)
787 787 permission = self._get_perm(perm)
788 788
789 789 # check if we have that permission already
790 790 obj = self.sa.query(UserRepoToPerm) \
791 791 .filter(UserRepoToPerm.user == user) \
792 792 .filter(UserRepoToPerm.repository == repo) \
793 793 .scalar()
794 794 if obj is None:
795 795 # create new !
796 796 obj = UserRepoToPerm()
797 797 obj.repository = repo
798 798 obj.user = user
799 799 obj.permission = permission
800 800 self.sa.add(obj)
801 801 log.debug('Granted perm %s to %s on %s', perm, user, repo)
802 802 action_logger_generic(
803 803 'granted permission: {} to user: {} on repo: {}'.format(
804 804 perm, user, repo), namespace='security.repo')
805 805 return obj
806 806
807 807 def revoke_user_permission(self, repo, user):
808 808 """
809 809 Revoke permission for user on given repository
810 810
811 811 :param repo: Instance of Repository, repository_id, or repository name
812 812 :param user: Instance of User, user_id or username
813 813 """
814 814
815 815 user = self._get_user(user)
816 816 repo = self._get_repo(repo)
817 817
818 818 obj = self.sa.query(UserRepoToPerm) \
819 819 .filter(UserRepoToPerm.repository == repo) \
820 820 .filter(UserRepoToPerm.user == user) \
821 821 .scalar()
822 822 if obj:
823 823 self.sa.delete(obj)
824 824 log.debug('Revoked perm on %s on %s', repo, user)
825 825 action_logger_generic(
826 826 'revoked permission from user: {} on repo: {}'.format(
827 827 user, repo), namespace='security.repo')
828 828
829 829 def grant_user_group_permission(self, repo, group_name, perm):
830 830 """
831 831 Grant permission for user group on given repository, or update
832 832 existing one if found
833 833
834 834 :param repo: Instance of Repository, repository_id, or repository name
835 835 :param group_name: Instance of UserGroup, users_group_id,
836 836 or user group name
837 837 :param perm: Instance of Permission, or permission_name
838 838 """
839 839 repo = self._get_repo(repo)
840 840 group_name = self._get_user_group(group_name)
841 841 permission = self._get_perm(perm)
842 842
843 843 # check if we have that permission already
844 844 obj = self.sa.query(UserGroupRepoToPerm) \
845 845 .filter(UserGroupRepoToPerm.users_group == group_name) \
846 846 .filter(UserGroupRepoToPerm.repository == repo) \
847 847 .scalar()
848 848
849 849 if obj is None:
850 850 # create new
851 851 obj = UserGroupRepoToPerm()
852 852
853 853 obj.repository = repo
854 854 obj.users_group = group_name
855 855 obj.permission = permission
856 856 self.sa.add(obj)
857 857 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
858 858 action_logger_generic(
859 859 'granted permission: {} to usergroup: {} on repo: {}'.format(
860 860 perm, group_name, repo), namespace='security.repo')
861 861
862 862 return obj
863 863
864 864 def revoke_user_group_permission(self, repo, group_name):
865 865 """
866 866 Revoke permission for user group on given repository
867 867
868 868 :param repo: Instance of Repository, repository_id, or repository name
869 869 :param group_name: Instance of UserGroup, users_group_id,
870 870 or user group name
871 871 """
872 872 repo = self._get_repo(repo)
873 873 group_name = self._get_user_group(group_name)
874 874
875 875 obj = self.sa.query(UserGroupRepoToPerm) \
876 876 .filter(UserGroupRepoToPerm.repository == repo) \
877 877 .filter(UserGroupRepoToPerm.users_group == group_name) \
878 878 .scalar()
879 879 if obj:
880 880 self.sa.delete(obj)
881 881 log.debug('Revoked perm to %s on %s', repo, group_name)
882 882 action_logger_generic(
883 883 'revoked permission from usergroup: {} on repo: {}'.format(
884 884 group_name, repo), namespace='security.repo')
885 885
886 886 def delete_stats(self, repo_name):
887 887 """
888 888 removes stats for given repo
889 889
890 890 :param repo_name:
891 891 """
892 892 repo = self._get_repo(repo_name)
893 893 try:
894 894 obj = self.sa.query(Statistics) \
895 895 .filter(Statistics.repository == repo).scalar()
896 896 if obj:
897 897 self.sa.delete(obj)
898 898 except Exception:
899 899 log.error(traceback.format_exc())
900 900 raise
901 901
902 902 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
903 903 field_type='str', field_desc=''):
904 904
905 905 repo = self._get_repo(repo_name)
906 906
907 907 new_field = RepositoryField()
908 908 new_field.repository = repo
909 909 new_field.field_key = field_key
910 910 new_field.field_type = field_type # python type
911 911 new_field.field_value = field_value
912 912 new_field.field_desc = field_desc
913 913 new_field.field_label = field_label
914 914 self.sa.add(new_field)
915 915 return new_field
916 916
917 917 def delete_repo_field(self, repo_name, field_key):
918 918 repo = self._get_repo(repo_name)
919 919 field = RepositoryField.get_by_key_name(field_key, repo)
920 920 if field:
921 921 self.sa.delete(field)
922 922
923 923 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
924 924 clone_uri=None, repo_store_location=None,
925 925 use_global_config=False, install_hooks=True):
926 926 """
927 927 makes repository on filesystem. It's group aware means it'll create
928 928 a repository within a group, and alter the paths accordingly of
929 929 group location
930 930
931 931 :param repo_name:
932 932 :param alias:
933 933 :param parent:
934 934 :param clone_uri:
935 935 :param repo_store_location:
936 936 """
937 937 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
938 938 from rhodecode.model.scm import ScmModel
939 939
940 940 if Repository.NAME_SEP in repo_name:
941 941 raise ValueError(
942 942 'repo_name must not contain groups got `%s`' % repo_name)
943 943
944 944 if isinstance(repo_group, RepoGroup):
945 945 new_parent_path = os.sep.join(repo_group.full_path_splitted)
946 946 else:
947 947 new_parent_path = repo_group or ''
948 948
949 949 if repo_store_location:
950 950 _paths = [repo_store_location]
951 951 else:
952 952 _paths = [self.repos_path, new_parent_path, repo_name]
953 953 # we need to make it str for mercurial
954 954 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
955 955
956 956 # check if this path is not a repository
957 957 if is_valid_repo(repo_path, self.repos_path):
958 958 raise Exception('This path %s is a valid repository' % repo_path)
959 959
960 960 # check if this path is a group
961 961 if is_valid_repo_group(repo_path, self.repos_path):
962 962 raise Exception('This path %s is a valid group' % repo_path)
963 963
964 964 log.info('creating repo %s in %s from url: `%s`',
965 965 repo_name, safe_unicode(repo_path),
966 966 obfuscate_url_pw(clone_uri))
967 967
968 968 backend = get_backend(repo_type)
969 969
970 970 config_repo = None if use_global_config else repo_name
971 971 if config_repo and new_parent_path:
972 972 config_repo = Repository.NAME_SEP.join(
973 973 (new_parent_path, config_repo))
974 974 config = make_db_config(clear_session=False, repo=config_repo)
975 975 config.set('extensions', 'largefiles', '')
976 976
977 977 # patch and reset hooks section of UI config to not run any
978 978 # hooks on creating remote repo
979 979 config.clear_section('hooks')
980 980
981 981 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
982 982 if repo_type == 'git':
983 983 repo = backend(
984 984 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
985 985 with_wire={"cache": False})
986 986 else:
987 987 repo = backend(
988 988 repo_path, config=config, create=True, src_url=clone_uri,
989 989 with_wire={"cache": False})
990 990
991 991 if install_hooks:
992 992 repo.install_hooks()
993 993
994 994 log.debug('Created repo %s with %s backend',
995 995 safe_unicode(repo_name), safe_unicode(repo_type))
996 996 return repo
997 997
998 998 def _rename_filesystem_repo(self, old, new):
999 999 """
1000 1000 renames repository on filesystem
1001 1001
1002 1002 :param old: old name
1003 1003 :param new: new name
1004 1004 """
1005 1005 log.info('renaming repo from %s to %s', old, new)
1006 1006
1007 1007 old_path = os.path.join(self.repos_path, old)
1008 1008 new_path = os.path.join(self.repos_path, new)
1009 1009 if os.path.isdir(new_path):
1010 1010 raise Exception(
1011 1011 'Was trying to rename to already existing dir %s' % new_path
1012 1012 )
1013 1013 shutil.move(old_path, new_path)
1014 1014
1015 1015 def _delete_filesystem_repo(self, repo):
1016 1016 """
1017 1017 removes repo from filesystem, the removal is acctually made by
1018 1018 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1019 1019 repository is no longer valid for rhodecode, can be undeleted later on
1020 1020 by reverting the renames on this repository
1021 1021
1022 1022 :param repo: repo object
1023 1023 """
1024 1024 rm_path = os.path.join(self.repos_path, repo.repo_name)
1025 1025 repo_group = repo.group
1026 1026 log.info("Removing repository %s", rm_path)
1027 1027 # disable hg/git internal that it doesn't get detected as repo
1028 1028 alias = repo.repo_type
1029 1029
1030 1030 config = make_db_config(clear_session=False)
1031 1031 config.set('extensions', 'largefiles', '')
1032 1032 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1033 1033
1034 1034 # skip this for bare git repos
1035 1035 if not bare:
1036 1036 # disable VCS repo
1037 1037 vcs_path = os.path.join(rm_path, '.%s' % alias)
1038 1038 if os.path.exists(vcs_path):
1039 1039 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1040 1040
1041 1041 _now = datetime.datetime.now()
1042 1042 _ms = str(_now.microsecond).rjust(6, '0')
1043 1043 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1044 1044 repo.just_name)
1045 1045 if repo_group:
1046 1046 # if repository is in group, prefix the removal path with the group
1047 1047 args = repo_group.full_path_splitted + [_d]
1048 1048 _d = os.path.join(*args)
1049 1049
1050 1050 if os.path.isdir(rm_path):
1051 1051 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1052 1052
1053 1053 # finally cleanup diff-cache if it exists
1054 1054 cached_diffs_dir = repo.cached_diffs_dir
1055 1055 if os.path.isdir(cached_diffs_dir):
1056 1056 shutil.rmtree(cached_diffs_dir)
1057 1057
1058 1058
1059 1059 class ReadmeFinder:
1060 1060 """
1061 1061 Utility which knows how to find a readme for a specific commit.
1062 1062
1063 1063 The main idea is that this is a configurable algorithm. When creating an
1064 1064 instance you can define parameters, currently only the `default_renderer`.
1065 1065 Based on this configuration the method :meth:`search` behaves slightly
1066 1066 different.
1067 1067 """
1068 1068
1069 1069 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1070 1070 path_re = re.compile(r'^docs?', re.IGNORECASE)
1071 1071
1072 1072 default_priorities = {
1073 1073 None: 0,
1074 1074 '.text': 2,
1075 1075 '.txt': 3,
1076 1076 '.rst': 1,
1077 1077 '.rest': 2,
1078 1078 '.md': 1,
1079 1079 '.mkdn': 2,
1080 1080 '.mdown': 3,
1081 1081 '.markdown': 4,
1082 1082 }
1083 1083
1084 1084 path_priority = {
1085 1085 'doc': 0,
1086 1086 'docs': 1,
1087 1087 }
1088 1088
1089 1089 FALLBACK_PRIORITY = 99
1090 1090
1091 1091 RENDERER_TO_EXTENSION = {
1092 1092 'rst': ['.rst', '.rest'],
1093 1093 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1094 1094 }
1095 1095
1096 1096 def __init__(self, default_renderer=None):
1097 1097 self._default_renderer = default_renderer
1098 1098 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1099 1099 default_renderer, [])
1100 1100
1101 def search(self, commit, path='/'):
1101 def search(self, commit, path=u'/'):
1102 1102 """
1103 1103 Find a readme in the given `commit`.
1104 1104 """
1105 1105 nodes = commit.get_nodes(path)
1106 1106 matches = self._match_readmes(nodes)
1107 1107 matches = self._sort_according_to_priority(matches)
1108 1108 if matches:
1109 1109 return matches[0].node
1110 1110
1111 1111 paths = self._match_paths(nodes)
1112 1112 paths = self._sort_paths_according_to_priority(paths)
1113 1113 for path in paths:
1114 1114 match = self.search(commit, path=path)
1115 1115 if match:
1116 1116 return match
1117 1117
1118 1118 return None
1119 1119
1120 1120 def _match_readmes(self, nodes):
1121 1121 for node in nodes:
1122 1122 if not node.is_file():
1123 1123 continue
1124 1124 path = node.path.rsplit('/', 1)[-1]
1125 1125 match = self.readme_re.match(path)
1126 1126 if match:
1127 1127 extension = match.group(1)
1128 1128 yield ReadmeMatch(node, match, self._priority(extension))
1129 1129
1130 1130 def _match_paths(self, nodes):
1131 1131 for node in nodes:
1132 1132 if not node.is_dir():
1133 1133 continue
1134 1134 match = self.path_re.match(node.path)
1135 1135 if match:
1136 1136 yield node.path
1137 1137
1138 1138 def _priority(self, extension):
1139 1139 renderer_priority = (
1140 1140 0 if extension in self._renderer_extensions else 1)
1141 1141 extension_priority = self.default_priorities.get(
1142 1142 extension, self.FALLBACK_PRIORITY)
1143 1143 return (renderer_priority, extension_priority)
1144 1144
1145 1145 def _sort_according_to_priority(self, matches):
1146 1146
1147 1147 def priority_and_path(match):
1148 1148 return (match.priority, match.path)
1149 1149
1150 1150 return sorted(matches, key=priority_and_path)
1151 1151
1152 1152 def _sort_paths_according_to_priority(self, paths):
1153 1153
1154 1154 def priority_and_path(path):
1155 1155 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1156 1156
1157 1157 return sorted(paths, key=priority_and_path)
1158 1158
1159 1159
1160 1160 class ReadmeMatch:
1161 1161
1162 1162 def __init__(self, node, match, priority):
1163 1163 self.node = node
1164 1164 self._match = match
1165 1165 self.priority = priority
1166 1166
1167 1167 @property
1168 1168 def path(self):
1169 1169 return self.node.path
1170 1170
1171 1171 def __repr__(self):
1172 1172 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
General Comments 0
You need to be logged in to leave comments. Login now