##// END OF EJS Templates
git: fixed readme searcher for libgit2
marcink -
r4345:dc3889b3 default
parent child Browse files
Show More
@@ -1,493 +1,494 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2020 RhodeCode GmbH
3 # Copyright (C) 2014-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT commit module
22 GIT commit module
23 """
23 """
24
24
25 import re
25 import re
26 import stat
26 import stat
27 from itertools import chain
27 from itertools import chain
28 from StringIO import StringIO
28 from StringIO import StringIO
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 from rhodecode.lib.utils import safe_unicode, safe_str
33 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils2 import safe_int
34 from rhodecode.lib.utils2 import safe_int
35 from rhodecode.lib.vcs.conf import settings
35 from rhodecode.lib.vcs.conf import settings
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 from rhodecode.lib.vcs.nodes import (
38 from rhodecode.lib.vcs.nodes import (
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 RemovedFileNodesGenerator, LargeFileNode)
41 RemovedFileNodesGenerator, LargeFileNode)
42 from rhodecode.lib.vcs.compat import configparser
42 from rhodecode.lib.vcs.compat import configparser
43
43
44
44
45 class GitCommit(base.BaseCommit):
45 class GitCommit(base.BaseCommit):
46 """
46 """
47 Represents state of the repository at single commit id.
47 Represents state of the repository at single commit id.
48 """
48 """
49
49
50 _filter_pre_load = [
50 _filter_pre_load = [
51 # done through a more complex tree walk on parents
51 # done through a more complex tree walk on parents
52 "affected_files",
52 "affected_files",
53 # done through subprocess not remote call
53 # done through subprocess not remote call
54 "children",
54 "children",
55 # done through a more complex tree walk on parents
55 # done through a more complex tree walk on parents
56 "status",
56 "status",
57 # mercurial specific property not supported here
57 # mercurial specific property not supported here
58 "_file_paths",
58 "_file_paths",
59 # mercurial specific property not supported here
59 # mercurial specific property not supported here
60 'obsolete',
60 'obsolete',
61 # mercurial specific property not supported here
61 # mercurial specific property not supported here
62 'phase',
62 'phase',
63 # mercurial specific property not supported here
63 # mercurial specific property not supported here
64 'hidden'
64 'hidden'
65 ]
65 ]
66
66
67 def __init__(self, repository, raw_id, idx, pre_load=None):
67 def __init__(self, repository, raw_id, idx, pre_load=None):
68 self.repository = repository
68 self.repository = repository
69 self._remote = repository._remote
69 self._remote = repository._remote
70 # TODO: johbo: Tweak of raw_id should not be necessary
70 # TODO: johbo: Tweak of raw_id should not be necessary
71 self.raw_id = safe_str(raw_id)
71 self.raw_id = safe_str(raw_id)
72 self.idx = idx
72 self.idx = idx
73
73
74 self._set_bulk_properties(pre_load)
74 self._set_bulk_properties(pre_load)
75
75
76 # caches
76 # caches
77 self._stat_modes = {} # stat info for paths
77 self._stat_modes = {} # stat info for paths
78 self._paths = {} # path processed with parse_tree
78 self._paths = {} # path processed with parse_tree
79 self.nodes = {}
79 self.nodes = {}
80 self._submodules = None
80 self._submodules = None
81
81
82 def _set_bulk_properties(self, pre_load):
82 def _set_bulk_properties(self, pre_load):
83
83
84 if not pre_load:
84 if not pre_load:
85 return
85 return
86 pre_load = [entry for entry in pre_load
86 pre_load = [entry for entry in pre_load
87 if entry not in self._filter_pre_load]
87 if entry not in self._filter_pre_load]
88 if not pre_load:
88 if not pre_load:
89 return
89 return
90
90
91 result = self._remote.bulk_request(self.raw_id, pre_load)
91 result = self._remote.bulk_request(self.raw_id, pre_load)
92 for attr, value in result.items():
92 for attr, value in result.items():
93 if attr in ["author", "message"]:
93 if attr in ["author", "message"]:
94 if value:
94 if value:
95 value = safe_unicode(value)
95 value = safe_unicode(value)
96 elif attr == "date":
96 elif attr == "date":
97 value = utcdate_fromtimestamp(*value)
97 value = utcdate_fromtimestamp(*value)
98 elif attr == "parents":
98 elif attr == "parents":
99 value = self._make_commits(value)
99 value = self._make_commits(value)
100 elif attr == "branch":
100 elif attr == "branch":
101 value = value[0] if value else None
101 value = value[0] if value else None
102 self.__dict__[attr] = value
102 self.__dict__[attr] = value
103
103
104 @LazyProperty
104 @LazyProperty
105 def _commit(self):
105 def _commit(self):
106 return self._remote[self.raw_id]
106 return self._remote[self.raw_id]
107
107
108 @LazyProperty
108 @LazyProperty
109 def _tree_id(self):
109 def _tree_id(self):
110 return self._remote[self._commit['tree']]['id']
110 return self._remote[self._commit['tree']]['id']
111
111
112 @LazyProperty
112 @LazyProperty
113 def id(self):
113 def id(self):
114 return self.raw_id
114 return self.raw_id
115
115
116 @LazyProperty
116 @LazyProperty
117 def short_id(self):
117 def short_id(self):
118 return self.raw_id[:12]
118 return self.raw_id[:12]
119
119
120 @LazyProperty
120 @LazyProperty
121 def message(self):
121 def message(self):
122 return safe_unicode(self._remote.message(self.id))
122 return safe_unicode(self._remote.message(self.id))
123
123
124 @LazyProperty
124 @LazyProperty
125 def committer(self):
125 def committer(self):
126 return safe_unicode(self._remote.author(self.id))
126 return safe_unicode(self._remote.author(self.id))
127
127
128 @LazyProperty
128 @LazyProperty
129 def author(self):
129 def author(self):
130 return safe_unicode(self._remote.author(self.id))
130 return safe_unicode(self._remote.author(self.id))
131
131
132 @LazyProperty
132 @LazyProperty
133 def date(self):
133 def date(self):
134 unix_ts, tz = self._remote.date(self.raw_id)
134 unix_ts, tz = self._remote.date(self.raw_id)
135 return utcdate_fromtimestamp(unix_ts, tz)
135 return utcdate_fromtimestamp(unix_ts, tz)
136
136
137 @LazyProperty
137 @LazyProperty
138 def status(self):
138 def status(self):
139 """
139 """
140 Returns modified, added, removed, deleted files for current commit
140 Returns modified, added, removed, deleted files for current commit
141 """
141 """
142 return self.changed, self.added, self.removed
142 return self.changed, self.added, self.removed
143
143
144 @LazyProperty
144 @LazyProperty
145 def tags(self):
145 def tags(self):
146 tags = [safe_unicode(name) for name,
146 tags = [safe_unicode(name) for name,
147 commit_id in self.repository.tags.iteritems()
147 commit_id in self.repository.tags.iteritems()
148 if commit_id == self.raw_id]
148 if commit_id == self.raw_id]
149 return tags
149 return tags
150
150
151 @LazyProperty
151 @LazyProperty
152 def commit_branches(self):
152 def commit_branches(self):
153 branches = []
153 branches = []
154 for name, commit_id in self.repository.branches.iteritems():
154 for name, commit_id in self.repository.branches.iteritems():
155 if commit_id == self.raw_id:
155 if commit_id == self.raw_id:
156 branches.append(name)
156 branches.append(name)
157 return branches
157 return branches
158
158
159 @LazyProperty
159 @LazyProperty
160 def branch(self):
160 def branch(self):
161 branches = self._remote.branch(self.raw_id)
161 branches = self._remote.branch(self.raw_id)
162
162
163 if branches:
163 if branches:
164 # actually commit can have multiple branches in git
164 # actually commit can have multiple branches in git
165 return safe_unicode(branches[0])
165 return safe_unicode(branches[0])
166
166
167 def _get_tree_id_for_path(self, path):
167 def _get_tree_id_for_path(self, path):
168 path = safe_str(path)
168 path = safe_str(path)
169 if path in self._paths:
169 if path in self._paths:
170 return self._paths[path]
170 return self._paths[path]
171
171
172 tree_id = self._tree_id
172 tree_id = self._tree_id
173
173
174 path = path.strip('/')
174 path = path.strip('/')
175 if path == '':
175 if path == '':
176 data = [tree_id, "tree"]
176 data = [tree_id, "tree"]
177 self._paths[''] = data
177 self._paths[''] = data
178 return data
178 return data
179
179
180 tree_id, tree_type, tree_mode = \
180 tree_id, tree_type, tree_mode = \
181 self._remote.tree_and_type_for_path(self.raw_id, path)
181 self._remote.tree_and_type_for_path(self.raw_id, path)
182 if tree_id is None:
182 if tree_id is None:
183 raise self.no_node_at_path(path)
183 raise self.no_node_at_path(path)
184
184
185 self._paths[path] = [tree_id, tree_type]
185 self._paths[path] = [tree_id, tree_type]
186 self._stat_modes[path] = tree_mode
186 self._stat_modes[path] = tree_mode
187
187
188 if path not in self._paths:
188 if path not in self._paths:
189 raise self.no_node_at_path(path)
189 raise self.no_node_at_path(path)
190
190
191 return self._paths[path]
191 return self._paths[path]
192
192
193 def _get_kind(self, path):
193 def _get_kind(self, path):
194 tree_id, type_ = self._get_tree_id_for_path(path)
194 tree_id, type_ = self._get_tree_id_for_path(path)
195 if type_ == 'blob':
195 if type_ == 'blob':
196 return NodeKind.FILE
196 return NodeKind.FILE
197 elif type_ == 'tree':
197 elif type_ == 'tree':
198 return NodeKind.DIR
198 return NodeKind.DIR
199 elif type_ == 'link':
199 elif type_ == 'link':
200 return NodeKind.SUBMODULE
200 return NodeKind.SUBMODULE
201 return None
201 return None
202
202
203 def _get_filectx(self, path):
203 def _get_filectx(self, path):
204 path = self._fix_path(path)
204 path = self._fix_path(path)
205 if self._get_kind(path) != NodeKind.FILE:
205 if self._get_kind(path) != NodeKind.FILE:
206 raise CommitError(
206 raise CommitError(
207 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
207 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
208 return path
208 return path
209
209
210 def _get_file_nodes(self):
210 def _get_file_nodes(self):
211 return chain(*(t[2] for t in self.walk()))
211 return chain(*(t[2] for t in self.walk()))
212
212
213 @LazyProperty
213 @LazyProperty
214 def parents(self):
214 def parents(self):
215 """
215 """
216 Returns list of parent commits.
216 Returns list of parent commits.
217 """
217 """
218 parent_ids = self._remote.parents(self.id)
218 parent_ids = self._remote.parents(self.id)
219 return self._make_commits(parent_ids)
219 return self._make_commits(parent_ids)
220
220
221 @LazyProperty
221 @LazyProperty
222 def children(self):
222 def children(self):
223 """
223 """
224 Returns list of child commits.
224 Returns list of child commits.
225 """
225 """
226
226
227 children = self._remote.children(self.raw_id)
227 children = self._remote.children(self.raw_id)
228 return self._make_commits(children)
228 return self._make_commits(children)
229
229
230 def _make_commits(self, commit_ids):
230 def _make_commits(self, commit_ids):
231 def commit_maker(_commit_id):
231 def commit_maker(_commit_id):
232 return self.repository.get_commit(commit_id=commit_id)
232 return self.repository.get_commit(commit_id=commit_id)
233
233
234 return [commit_maker(commit_id) for commit_id in commit_ids]
234 return [commit_maker(commit_id) for commit_id in commit_ids]
235
235
236 def get_file_mode(self, path):
236 def get_file_mode(self, path):
237 """
237 """
238 Returns stat mode of the file at the given `path`.
238 Returns stat mode of the file at the given `path`.
239 """
239 """
240 path = safe_str(path)
240 path = safe_str(path)
241 # ensure path is traversed
241 # ensure path is traversed
242 self._get_tree_id_for_path(path)
242 self._get_tree_id_for_path(path)
243 return self._stat_modes[path]
243 return self._stat_modes[path]
244
244
245 def is_link(self, path):
245 def is_link(self, path):
246 return stat.S_ISLNK(self.get_file_mode(path))
246 return stat.S_ISLNK(self.get_file_mode(path))
247
247
248 def is_node_binary(self, path):
248 def is_node_binary(self, path):
249 tree_id, _ = self._get_tree_id_for_path(path)
249 tree_id, _ = self._get_tree_id_for_path(path)
250 return self._remote.is_binary(tree_id)
250 return self._remote.is_binary(tree_id)
251
251
252 def get_file_content(self, path):
252 def get_file_content(self, path):
253 """
253 """
254 Returns content of the file at given `path`.
254 Returns content of the file at given `path`.
255 """
255 """
256 tree_id, _ = self._get_tree_id_for_path(path)
256 tree_id, _ = self._get_tree_id_for_path(path)
257 return self._remote.blob_as_pretty_string(tree_id)
257 return self._remote.blob_as_pretty_string(tree_id)
258
258
259 def get_file_content_streamed(self, path):
259 def get_file_content_streamed(self, path):
260 tree_id, _ = self._get_tree_id_for_path(path)
260 tree_id, _ = self._get_tree_id_for_path(path)
261 stream_method = getattr(self._remote, 'stream:blob_as_pretty_string')
261 stream_method = getattr(self._remote, 'stream:blob_as_pretty_string')
262 return stream_method(tree_id)
262 return stream_method(tree_id)
263
263
264 def get_file_size(self, path):
264 def get_file_size(self, path):
265 """
265 """
266 Returns size of the file at given `path`.
266 Returns size of the file at given `path`.
267 """
267 """
268 tree_id, _ = self._get_tree_id_for_path(path)
268 tree_id, _ = self._get_tree_id_for_path(path)
269 return self._remote.blob_raw_length(tree_id)
269 return self._remote.blob_raw_length(tree_id)
270
270
271 def get_path_history(self, path, limit=None, pre_load=None):
271 def get_path_history(self, path, limit=None, pre_load=None):
272 """
272 """
273 Returns history of file as reversed list of `GitCommit` objects for
273 Returns history of file as reversed list of `GitCommit` objects for
274 which file at given `path` has been modified.
274 which file at given `path` has been modified.
275 """
275 """
276
276
277 path = self._get_filectx(path)
277 path = self._get_filectx(path)
278 hist = self._remote.node_history(self.raw_id, path, limit)
278 hist = self._remote.node_history(self.raw_id, path, limit)
279 return [
279 return [
280 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
280 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
281 for commit_id in hist]
281 for commit_id in hist]
282
282
283 def get_file_annotate(self, path, pre_load=None):
283 def get_file_annotate(self, path, pre_load=None):
284 """
284 """
285 Returns a generator of four element tuples with
285 Returns a generator of four element tuples with
286 lineno, commit_id, commit lazy loader and line
286 lineno, commit_id, commit lazy loader and line
287 """
287 """
288
288
289 result = self._remote.node_annotate(self.raw_id, path)
289 result = self._remote.node_annotate(self.raw_id, path)
290
290
291 for ln_no, commit_id, content in result:
291 for ln_no, commit_id, content in result:
292 yield (
292 yield (
293 ln_no, commit_id,
293 ln_no, commit_id,
294 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
294 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
295 content)
295 content)
296
296
297 def get_nodes(self, path):
297 def get_nodes(self, path):
298
298
299 if self._get_kind(path) != NodeKind.DIR:
299 if self._get_kind(path) != NodeKind.DIR:
300 raise CommitError(
300 raise CommitError(
301 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
301 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
302 path = self._fix_path(path)
302 path = self._fix_path(path)
303
303
304 tree_id, _ = self._get_tree_id_for_path(path)
304 tree_id, _ = self._get_tree_id_for_path(path)
305
305
306 dirnodes = []
306 dirnodes = []
307 filenodes = []
307 filenodes = []
308
308
309 # extracted tree ID gives us our files...
309 # extracted tree ID gives us our files...
310 bytes_path = safe_str(path) # libgit operates on bytes
310 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
311 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
311 if type_ == 'link':
312 if type_ == 'link':
312 url = self._get_submodule_url('/'.join((path, name)))
313 url = self._get_submodule_url('/'.join((bytes_path, name)))
313 dirnodes.append(SubModuleNode(
314 dirnodes.append(SubModuleNode(
314 name, url=url, commit=id_, alias=self.repository.alias))
315 name, url=url, commit=id_, alias=self.repository.alias))
315 continue
316 continue
316
317
317 if path != '':
318 if bytes_path != '':
318 obj_path = '/'.join((path, name))
319 obj_path = '/'.join((bytes_path, name))
319 else:
320 else:
320 obj_path = name
321 obj_path = name
321 if obj_path not in self._stat_modes:
322 if obj_path not in self._stat_modes:
322 self._stat_modes[obj_path] = stat_
323 self._stat_modes[obj_path] = stat_
323
324
324 if type_ == 'tree':
325 if type_ == 'tree':
325 dirnodes.append(DirNode(obj_path, commit=self))
326 dirnodes.append(DirNode(obj_path, commit=self))
326 elif type_ == 'blob':
327 elif type_ == 'blob':
327 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
328 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
328 else:
329 else:
329 raise CommitError(
330 raise CommitError(
330 "Requested object should be Tree or Blob, is %s", type_)
331 "Requested object should be Tree or Blob, is %s", type_)
331
332
332 nodes = dirnodes + filenodes
333 nodes = dirnodes + filenodes
333 for node in nodes:
334 for node in nodes:
334 if node.path not in self.nodes:
335 if node.path not in self.nodes:
335 self.nodes[node.path] = node
336 self.nodes[node.path] = node
336 nodes.sort()
337 nodes.sort()
337 return nodes
338 return nodes
338
339
339 def get_node(self, path, pre_load=None):
340 def get_node(self, path, pre_load=None):
340 if isinstance(path, unicode):
341 if isinstance(path, unicode):
341 path = path.encode('utf-8')
342 path = path.encode('utf-8')
342 path = self._fix_path(path)
343 path = self._fix_path(path)
343 if path not in self.nodes:
344 if path not in self.nodes:
344 try:
345 try:
345 tree_id, type_ = self._get_tree_id_for_path(path)
346 tree_id, type_ = self._get_tree_id_for_path(path)
346 except CommitError:
347 except CommitError:
347 raise NodeDoesNotExistError(
348 raise NodeDoesNotExistError(
348 "Cannot find one of parents' directories for a given "
349 "Cannot find one of parents' directories for a given "
349 "path: %s" % path)
350 "path: %s" % path)
350
351
351 if type_ in ['link', 'commit']:
352 if type_ in ['link', 'commit']:
352 url = self._get_submodule_url(path)
353 url = self._get_submodule_url(path)
353 node = SubModuleNode(path, url=url, commit=tree_id,
354 node = SubModuleNode(path, url=url, commit=tree_id,
354 alias=self.repository.alias)
355 alias=self.repository.alias)
355 elif type_ == 'tree':
356 elif type_ == 'tree':
356 if path == '':
357 if path == '':
357 node = RootNode(commit=self)
358 node = RootNode(commit=self)
358 else:
359 else:
359 node = DirNode(path, commit=self)
360 node = DirNode(path, commit=self)
360 elif type_ == 'blob':
361 elif type_ == 'blob':
361 node = FileNode(path, commit=self, pre_load=pre_load)
362 node = FileNode(path, commit=self, pre_load=pre_load)
362 self._stat_modes[path] = node.mode
363 self._stat_modes[path] = node.mode
363 else:
364 else:
364 raise self.no_node_at_path(path)
365 raise self.no_node_at_path(path)
365
366
366 # cache node
367 # cache node
367 self.nodes[path] = node
368 self.nodes[path] = node
368
369
369 return self.nodes[path]
370 return self.nodes[path]
370
371
371 def get_largefile_node(self, path):
372 def get_largefile_node(self, path):
372 tree_id, _ = self._get_tree_id_for_path(path)
373 tree_id, _ = self._get_tree_id_for_path(path)
373 pointer_spec = self._remote.is_large_file(tree_id)
374 pointer_spec = self._remote.is_large_file(tree_id)
374
375
375 if pointer_spec:
376 if pointer_spec:
376 # content of that file regular FileNode is the hash of largefile
377 # content of that file regular FileNode is the hash of largefile
377 file_id = pointer_spec.get('oid_hash')
378 file_id = pointer_spec.get('oid_hash')
378 if self._remote.in_largefiles_store(file_id):
379 if self._remote.in_largefiles_store(file_id):
379 lf_path = self._remote.store_path(file_id)
380 lf_path = self._remote.store_path(file_id)
380 return LargeFileNode(lf_path, commit=self, org_path=path)
381 return LargeFileNode(lf_path, commit=self, org_path=path)
381
382
382 @LazyProperty
383 @LazyProperty
383 def affected_files(self):
384 def affected_files(self):
384 """
385 """
385 Gets a fast accessible file changes for given commit
386 Gets a fast accessible file changes for given commit
386 """
387 """
387 added, modified, deleted = self._changes_cache
388 added, modified, deleted = self._changes_cache
388 return list(added.union(modified).union(deleted))
389 return list(added.union(modified).union(deleted))
389
390
390 @LazyProperty
391 @LazyProperty
391 def _changes_cache(self):
392 def _changes_cache(self):
392 added = set()
393 added = set()
393 modified = set()
394 modified = set()
394 deleted = set()
395 deleted = set()
395 _r = self._remote
396 _r = self._remote
396
397
397 parents = self.parents
398 parents = self.parents
398 if not self.parents:
399 if not self.parents:
399 parents = [base.EmptyCommit()]
400 parents = [base.EmptyCommit()]
400 for parent in parents:
401 for parent in parents:
401 if isinstance(parent, base.EmptyCommit):
402 if isinstance(parent, base.EmptyCommit):
402 oid = None
403 oid = None
403 else:
404 else:
404 oid = parent.raw_id
405 oid = parent.raw_id
405 changes = _r.tree_changes(oid, self.raw_id)
406 changes = _r.tree_changes(oid, self.raw_id)
406 for (oldpath, newpath), (_, _), (_, _) in changes:
407 for (oldpath, newpath), (_, _), (_, _) in changes:
407 if newpath and oldpath:
408 if newpath and oldpath:
408 modified.add(newpath)
409 modified.add(newpath)
409 elif newpath and not oldpath:
410 elif newpath and not oldpath:
410 added.add(newpath)
411 added.add(newpath)
411 elif not newpath and oldpath:
412 elif not newpath and oldpath:
412 deleted.add(oldpath)
413 deleted.add(oldpath)
413 return added, modified, deleted
414 return added, modified, deleted
414
415
415 def _get_paths_for_status(self, status):
416 def _get_paths_for_status(self, status):
416 """
417 """
417 Returns sorted list of paths for given ``status``.
418 Returns sorted list of paths for given ``status``.
418
419
419 :param status: one of: *added*, *modified* or *deleted*
420 :param status: one of: *added*, *modified* or *deleted*
420 """
421 """
421 added, modified, deleted = self._changes_cache
422 added, modified, deleted = self._changes_cache
422 return sorted({
423 return sorted({
423 'added': list(added),
424 'added': list(added),
424 'modified': list(modified),
425 'modified': list(modified),
425 'deleted': list(deleted)}[status]
426 'deleted': list(deleted)}[status]
426 )
427 )
427
428
428 @LazyProperty
429 @LazyProperty
429 def added(self):
430 def added(self):
430 """
431 """
431 Returns list of added ``FileNode`` objects.
432 Returns list of added ``FileNode`` objects.
432 """
433 """
433 if not self.parents:
434 if not self.parents:
434 return list(self._get_file_nodes())
435 return list(self._get_file_nodes())
435 return AddedFileNodesGenerator(self.added_paths, self)
436 return AddedFileNodesGenerator(self.added_paths, self)
436
437
437 @LazyProperty
438 @LazyProperty
438 def added_paths(self):
439 def added_paths(self):
439 return [n for n in self._get_paths_for_status('added')]
440 return [n for n in self._get_paths_for_status('added')]
440
441
441 @LazyProperty
442 @LazyProperty
442 def changed(self):
443 def changed(self):
443 """
444 """
444 Returns list of modified ``FileNode`` objects.
445 Returns list of modified ``FileNode`` objects.
445 """
446 """
446 if not self.parents:
447 if not self.parents:
447 return []
448 return []
448 return ChangedFileNodesGenerator(self.changed_paths, self)
449 return ChangedFileNodesGenerator(self.changed_paths, self)
449
450
450 @LazyProperty
451 @LazyProperty
451 def changed_paths(self):
452 def changed_paths(self):
452 return [n for n in self._get_paths_for_status('modified')]
453 return [n for n in self._get_paths_for_status('modified')]
453
454
454 @LazyProperty
455 @LazyProperty
455 def removed(self):
456 def removed(self):
456 """
457 """
457 Returns list of removed ``FileNode`` objects.
458 Returns list of removed ``FileNode`` objects.
458 """
459 """
459 if not self.parents:
460 if not self.parents:
460 return []
461 return []
461 return RemovedFileNodesGenerator(self.removed_paths, self)
462 return RemovedFileNodesGenerator(self.removed_paths, self)
462
463
463 @LazyProperty
464 @LazyProperty
464 def removed_paths(self):
465 def removed_paths(self):
465 return [n for n in self._get_paths_for_status('deleted')]
466 return [n for n in self._get_paths_for_status('deleted')]
466
467
467 def _get_submodule_url(self, submodule_path):
468 def _get_submodule_url(self, submodule_path):
468 git_modules_path = '.gitmodules'
469 git_modules_path = '.gitmodules'
469
470
470 if self._submodules is None:
471 if self._submodules is None:
471 self._submodules = {}
472 self._submodules = {}
472
473
473 try:
474 try:
474 submodules_node = self.get_node(git_modules_path)
475 submodules_node = self.get_node(git_modules_path)
475 except NodeDoesNotExistError:
476 except NodeDoesNotExistError:
476 return None
477 return None
477
478
478 # ConfigParser fails if there are whitespaces, also it needs an iterable
479 # ConfigParser fails if there are whitespaces, also it needs an iterable
479 # file like content
480 # file like content
480 def iter_content(_content):
481 def iter_content(_content):
481 for line in _content.splitlines():
482 for line in _content.splitlines():
482 yield line
483 yield line
483
484
484 parser = configparser.RawConfigParser()
485 parser = configparser.RawConfigParser()
485 parser.read_file(iter_content(submodules_node.content))
486 parser.read_file(iter_content(submodules_node.content))
486
487
487 for section in parser.sections():
488 for section in parser.sections():
488 path = parser.get(section, 'path')
489 path = parser.get(section, 'path')
489 url = parser.get(section, 'url')
490 url = parser.get(section, 'url')
490 if path and url:
491 if path and url:
491 self._submodules[path.strip('/')] = url
492 self._submodules[path.strip('/')] = url
492
493
493 return self._submodules.get(submodule_path.strip('/'))
494 return self._submodules.get(submodule_path.strip('/'))
@@ -1,1172 +1,1172 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2020 RhodeCode GmbH
3 # Copyright (C) 2010-2020 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import os
21 import os
22 import re
22 import re
23 import shutil
23 import shutil
24 import time
24 import time
25 import logging
25 import logging
26 import traceback
26 import traceback
27 import datetime
27 import datetime
28
28
29 from pyramid.threadlocal import get_current_request
29 from pyramid.threadlocal import get_current_request
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode import events
32 from rhodecode import events
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
33 from rhodecode.lib.auth import HasUserGroupPermissionAny
34 from rhodecode.lib.caching_query import FromCache
34 from rhodecode.lib.caching_query import FromCache
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
35 from rhodecode.lib.exceptions import AttachedForksError, AttachedPullRequestsError
36 from rhodecode.lib.hooks_base import log_delete_repository
36 from rhodecode.lib.hooks_base import log_delete_repository
37 from rhodecode.lib.user_log_filter import user_log_filter
37 from rhodecode.lib.user_log_filter import user_log_filter
38 from rhodecode.lib.utils import make_db_config
38 from rhodecode.lib.utils import make_db_config
39 from rhodecode.lib.utils2 import (
39 from rhodecode.lib.utils2 import (
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
40 safe_str, safe_unicode, remove_prefix, obfuscate_url_pw,
41 get_current_rhodecode_user, safe_int, action_logger_generic)
41 get_current_rhodecode_user, safe_int, action_logger_generic)
42 from rhodecode.lib.vcs.backends import get_backend
42 from rhodecode.lib.vcs.backends import get_backend
43 from rhodecode.model import BaseModel
43 from rhodecode.model import BaseModel
44 from rhodecode.model.db import (
44 from rhodecode.model.db import (
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
45 _hash_key, func, case, joinedload, or_, in_filter_generator,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
46 Session, Repository, UserRepoToPerm, UserGroupRepoToPerm,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
47 UserRepoGroupToPerm, UserGroupRepoGroupToPerm, User, Permission,
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
48 Statistics, UserGroup, RepoGroup, RepositoryField, UserLog)
49 from rhodecode.model.settings import VcsSettingsModel
49 from rhodecode.model.settings import VcsSettingsModel
50
50
51 log = logging.getLogger(__name__)
51 log = logging.getLogger(__name__)
52
52
53
53
54 class RepoModel(BaseModel):
54 class RepoModel(BaseModel):
55
55
56 cls = Repository
56 cls = Repository
57
57
58 def _get_user_group(self, users_group):
58 def _get_user_group(self, users_group):
59 return self._get_instance(UserGroup, users_group,
59 return self._get_instance(UserGroup, users_group,
60 callback=UserGroup.get_by_group_name)
60 callback=UserGroup.get_by_group_name)
61
61
62 def _get_repo_group(self, repo_group):
62 def _get_repo_group(self, repo_group):
63 return self._get_instance(RepoGroup, repo_group,
63 return self._get_instance(RepoGroup, repo_group,
64 callback=RepoGroup.get_by_group_name)
64 callback=RepoGroup.get_by_group_name)
65
65
66 def _create_default_perms(self, repository, private):
66 def _create_default_perms(self, repository, private):
67 # create default permission
67 # create default permission
68 default = 'repository.read'
68 default = 'repository.read'
69 def_user = User.get_default_user()
69 def_user = User.get_default_user()
70 for p in def_user.user_perms:
70 for p in def_user.user_perms:
71 if p.permission.permission_name.startswith('repository.'):
71 if p.permission.permission_name.startswith('repository.'):
72 default = p.permission.permission_name
72 default = p.permission.permission_name
73 break
73 break
74
74
75 default_perm = 'repository.none' if private else default
75 default_perm = 'repository.none' if private else default
76
76
77 repo_to_perm = UserRepoToPerm()
77 repo_to_perm = UserRepoToPerm()
78 repo_to_perm.permission = Permission.get_by_key(default_perm)
78 repo_to_perm.permission = Permission.get_by_key(default_perm)
79
79
80 repo_to_perm.repository = repository
80 repo_to_perm.repository = repository
81 repo_to_perm.user_id = def_user.user_id
81 repo_to_perm.user_id = def_user.user_id
82
82
83 return repo_to_perm
83 return repo_to_perm
84
84
85 @LazyProperty
85 @LazyProperty
86 def repos_path(self):
86 def repos_path(self):
87 """
87 """
88 Gets the repositories root path from database
88 Gets the repositories root path from database
89 """
89 """
90 settings_model = VcsSettingsModel(sa=self.sa)
90 settings_model = VcsSettingsModel(sa=self.sa)
91 return settings_model.get_repos_location()
91 return settings_model.get_repos_location()
92
92
93 def get(self, repo_id):
93 def get(self, repo_id):
94 repo = self.sa.query(Repository) \
94 repo = self.sa.query(Repository) \
95 .filter(Repository.repo_id == repo_id)
95 .filter(Repository.repo_id == repo_id)
96
96
97 return repo.scalar()
97 return repo.scalar()
98
98
99 def get_repo(self, repository):
99 def get_repo(self, repository):
100 return self._get_repo(repository)
100 return self._get_repo(repository)
101
101
102 def get_by_repo_name(self, repo_name, cache=False):
102 def get_by_repo_name(self, repo_name, cache=False):
103 repo = self.sa.query(Repository) \
103 repo = self.sa.query(Repository) \
104 .filter(Repository.repo_name == repo_name)
104 .filter(Repository.repo_name == repo_name)
105
105
106 if cache:
106 if cache:
107 name_key = _hash_key(repo_name)
107 name_key = _hash_key(repo_name)
108 repo = repo.options(
108 repo = repo.options(
109 FromCache("sql_cache_short", "get_repo_%s" % name_key))
109 FromCache("sql_cache_short", "get_repo_%s" % name_key))
110 return repo.scalar()
110 return repo.scalar()
111
111
112 def _extract_id_from_repo_name(self, repo_name):
112 def _extract_id_from_repo_name(self, repo_name):
113 if repo_name.startswith('/'):
113 if repo_name.startswith('/'):
114 repo_name = repo_name.lstrip('/')
114 repo_name = repo_name.lstrip('/')
115 by_id_match = re.match(r'^_(\d{1,})', repo_name)
115 by_id_match = re.match(r'^_(\d{1,})', repo_name)
116 if by_id_match:
116 if by_id_match:
117 return by_id_match.groups()[0]
117 return by_id_match.groups()[0]
118
118
119 def get_repo_by_id(self, repo_name):
119 def get_repo_by_id(self, repo_name):
120 """
120 """
121 Extracts repo_name by id from special urls.
121 Extracts repo_name by id from special urls.
122 Example url is _11/repo_name
122 Example url is _11/repo_name
123
123
124 :param repo_name:
124 :param repo_name:
125 :return: repo object if matched else None
125 :return: repo object if matched else None
126 """
126 """
127
127
128 try:
128 try:
129 _repo_id = self._extract_id_from_repo_name(repo_name)
129 _repo_id = self._extract_id_from_repo_name(repo_name)
130 if _repo_id:
130 if _repo_id:
131 return self.get(_repo_id)
131 return self.get(_repo_id)
132 except Exception:
132 except Exception:
133 log.exception('Failed to extract repo_name from URL')
133 log.exception('Failed to extract repo_name from URL')
134
134
135 return None
135 return None
136
136
137 def get_repos_for_root(self, root, traverse=False):
137 def get_repos_for_root(self, root, traverse=False):
138 if traverse:
138 if traverse:
139 like_expression = u'{}%'.format(safe_unicode(root))
139 like_expression = u'{}%'.format(safe_unicode(root))
140 repos = Repository.query().filter(
140 repos = Repository.query().filter(
141 Repository.repo_name.like(like_expression)).all()
141 Repository.repo_name.like(like_expression)).all()
142 else:
142 else:
143 if root and not isinstance(root, RepoGroup):
143 if root and not isinstance(root, RepoGroup):
144 raise ValueError(
144 raise ValueError(
145 'Root must be an instance '
145 'Root must be an instance '
146 'of RepoGroup, got:{} instead'.format(type(root)))
146 'of RepoGroup, got:{} instead'.format(type(root)))
147 repos = Repository.query().filter(Repository.group == root).all()
147 repos = Repository.query().filter(Repository.group == root).all()
148 return repos
148 return repos
149
149
150 def get_url(self, repo, request=None, permalink=False):
150 def get_url(self, repo, request=None, permalink=False):
151 if not request:
151 if not request:
152 request = get_current_request()
152 request = get_current_request()
153
153
154 if not request:
154 if not request:
155 return
155 return
156
156
157 if permalink:
157 if permalink:
158 return request.route_url(
158 return request.route_url(
159 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
159 'repo_summary', repo_name='_{}'.format(safe_str(repo.repo_id)))
160 else:
160 else:
161 return request.route_url(
161 return request.route_url(
162 'repo_summary', repo_name=safe_str(repo.repo_name))
162 'repo_summary', repo_name=safe_str(repo.repo_name))
163
163
164 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
164 def get_commit_url(self, repo, commit_id, request=None, permalink=False):
165 if not request:
165 if not request:
166 request = get_current_request()
166 request = get_current_request()
167
167
168 if not request:
168 if not request:
169 return
169 return
170
170
171 if permalink:
171 if permalink:
172 return request.route_url(
172 return request.route_url(
173 'repo_commit', repo_name=safe_str(repo.repo_id),
173 'repo_commit', repo_name=safe_str(repo.repo_id),
174 commit_id=commit_id)
174 commit_id=commit_id)
175
175
176 else:
176 else:
177 return request.route_url(
177 return request.route_url(
178 'repo_commit', repo_name=safe_str(repo.repo_name),
178 'repo_commit', repo_name=safe_str(repo.repo_name),
179 commit_id=commit_id)
179 commit_id=commit_id)
180
180
181 def get_repo_log(self, repo, filter_term):
181 def get_repo_log(self, repo, filter_term):
182 repo_log = UserLog.query()\
182 repo_log = UserLog.query()\
183 .filter(or_(UserLog.repository_id == repo.repo_id,
183 .filter(or_(UserLog.repository_id == repo.repo_id,
184 UserLog.repository_name == repo.repo_name))\
184 UserLog.repository_name == repo.repo_name))\
185 .options(joinedload(UserLog.user))\
185 .options(joinedload(UserLog.user))\
186 .options(joinedload(UserLog.repository))\
186 .options(joinedload(UserLog.repository))\
187 .order_by(UserLog.action_date.desc())
187 .order_by(UserLog.action_date.desc())
188
188
189 repo_log = user_log_filter(repo_log, filter_term)
189 repo_log = user_log_filter(repo_log, filter_term)
190 return repo_log
190 return repo_log
191
191
192 @classmethod
192 @classmethod
193 def update_commit_cache(cls, repositories=None):
193 def update_commit_cache(cls, repositories=None):
194 if not repositories:
194 if not repositories:
195 repositories = Repository.getAll()
195 repositories = Repository.getAll()
196 for repo in repositories:
196 for repo in repositories:
197 repo.update_commit_cache()
197 repo.update_commit_cache()
198
198
199 def get_repos_as_dict(self, repo_list=None, admin=False,
199 def get_repos_as_dict(self, repo_list=None, admin=False,
200 super_user_actions=False, short_name=None):
200 super_user_actions=False, short_name=None):
201
201
202 _render = get_current_request().get_partial_renderer(
202 _render = get_current_request().get_partial_renderer(
203 'rhodecode:templates/data_table/_dt_elements.mako')
203 'rhodecode:templates/data_table/_dt_elements.mako')
204 c = _render.get_call_context()
204 c = _render.get_call_context()
205 h = _render.get_helpers()
205 h = _render.get_helpers()
206
206
207 def quick_menu(repo_name):
207 def quick_menu(repo_name):
208 return _render('quick_menu', repo_name)
208 return _render('quick_menu', repo_name)
209
209
210 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
210 def repo_lnk(name, rtype, rstate, private, archived, fork_of):
211 if short_name is not None:
211 if short_name is not None:
212 short_name_var = short_name
212 short_name_var = short_name
213 else:
213 else:
214 short_name_var = not admin
214 short_name_var = not admin
215 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
215 return _render('repo_name', name, rtype, rstate, private, archived, fork_of,
216 short_name=short_name_var, admin=False)
216 short_name=short_name_var, admin=False)
217
217
218 def last_change(last_change):
218 def last_change(last_change):
219 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
219 if admin and isinstance(last_change, datetime.datetime) and not last_change.tzinfo:
220 ts = time.time()
220 ts = time.time()
221 utc_offset = (datetime.datetime.fromtimestamp(ts)
221 utc_offset = (datetime.datetime.fromtimestamp(ts)
222 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
222 - datetime.datetime.utcfromtimestamp(ts)).total_seconds()
223 last_change = last_change + datetime.timedelta(seconds=utc_offset)
223 last_change = last_change + datetime.timedelta(seconds=utc_offset)
224
224
225 return _render("last_change", last_change)
225 return _render("last_change", last_change)
226
226
227 def rss_lnk(repo_name):
227 def rss_lnk(repo_name):
228 return _render("rss", repo_name)
228 return _render("rss", repo_name)
229
229
230 def atom_lnk(repo_name):
230 def atom_lnk(repo_name):
231 return _render("atom", repo_name)
231 return _render("atom", repo_name)
232
232
233 def last_rev(repo_name, cs_cache):
233 def last_rev(repo_name, cs_cache):
234 return _render('revision', repo_name, cs_cache.get('revision'),
234 return _render('revision', repo_name, cs_cache.get('revision'),
235 cs_cache.get('raw_id'), cs_cache.get('author'),
235 cs_cache.get('raw_id'), cs_cache.get('author'),
236 cs_cache.get('message'), cs_cache.get('date'))
236 cs_cache.get('message'), cs_cache.get('date'))
237
237
238 def desc(desc):
238 def desc(desc):
239 return _render('repo_desc', desc, c.visual.stylify_metatags)
239 return _render('repo_desc', desc, c.visual.stylify_metatags)
240
240
241 def state(repo_state):
241 def state(repo_state):
242 return _render("repo_state", repo_state)
242 return _render("repo_state", repo_state)
243
243
244 def repo_actions(repo_name):
244 def repo_actions(repo_name):
245 return _render('repo_actions', repo_name, super_user_actions)
245 return _render('repo_actions', repo_name, super_user_actions)
246
246
247 def user_profile(username):
247 def user_profile(username):
248 return _render('user_profile', username)
248 return _render('user_profile', username)
249
249
250 repos_data = []
250 repos_data = []
251 for repo in repo_list:
251 for repo in repo_list:
252 # NOTE(marcink): because we use only raw column we need to load it like that
252 # NOTE(marcink): because we use only raw column we need to load it like that
253 changeset_cache = Repository._load_changeset_cache(
253 changeset_cache = Repository._load_changeset_cache(
254 repo.repo_id, repo._changeset_cache)
254 repo.repo_id, repo._changeset_cache)
255
255
256 row = {
256 row = {
257 "menu": quick_menu(repo.repo_name),
257 "menu": quick_menu(repo.repo_name),
258
258
259 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
259 "name": repo_lnk(repo.repo_name, repo.repo_type, repo.repo_state,
260 repo.private, repo.archived, repo.fork),
260 repo.private, repo.archived, repo.fork),
261
261
262 "desc": desc(h.escape(repo.description)),
262 "desc": desc(h.escape(repo.description)),
263
263
264 "last_change": last_change(repo.updated_on),
264 "last_change": last_change(repo.updated_on),
265
265
266 "last_changeset": last_rev(repo.repo_name, changeset_cache),
266 "last_changeset": last_rev(repo.repo_name, changeset_cache),
267 "last_changeset_raw": changeset_cache.get('revision'),
267 "last_changeset_raw": changeset_cache.get('revision'),
268
268
269 "owner": user_profile(repo.User.username),
269 "owner": user_profile(repo.User.username),
270
270
271 "state": state(repo.repo_state),
271 "state": state(repo.repo_state),
272 "rss": rss_lnk(repo.repo_name),
272 "rss": rss_lnk(repo.repo_name),
273 "atom": atom_lnk(repo.repo_name),
273 "atom": atom_lnk(repo.repo_name),
274 }
274 }
275 if admin:
275 if admin:
276 row.update({
276 row.update({
277 "action": repo_actions(repo.repo_name),
277 "action": repo_actions(repo.repo_name),
278 })
278 })
279 repos_data.append(row)
279 repos_data.append(row)
280
280
281 return repos_data
281 return repos_data
282
282
283 def get_repos_data_table(
283 def get_repos_data_table(
284 self, draw, start, limit,
284 self, draw, start, limit,
285 search_q, order_by, order_dir,
285 search_q, order_by, order_dir,
286 auth_user, repo_group_id):
286 auth_user, repo_group_id):
287 from rhodecode.model.scm import RepoList
287 from rhodecode.model.scm import RepoList
288
288
289 _perms = ['repository.read', 'repository.write', 'repository.admin']
289 _perms = ['repository.read', 'repository.write', 'repository.admin']
290
290
291 repos = Repository.query() \
291 repos = Repository.query() \
292 .filter(Repository.group_id == repo_group_id) \
292 .filter(Repository.group_id == repo_group_id) \
293 .all()
293 .all()
294 auth_repo_list = RepoList(
294 auth_repo_list = RepoList(
295 repos, perm_set=_perms,
295 repos, perm_set=_perms,
296 extra_kwargs=dict(user=auth_user))
296 extra_kwargs=dict(user=auth_user))
297
297
298 allowed_ids = [-1]
298 allowed_ids = [-1]
299 for repo in auth_repo_list:
299 for repo in auth_repo_list:
300 allowed_ids.append(repo.repo_id)
300 allowed_ids.append(repo.repo_id)
301
301
302 repos_data_total_count = Repository.query() \
302 repos_data_total_count = Repository.query() \
303 .filter(Repository.group_id == repo_group_id) \
303 .filter(Repository.group_id == repo_group_id) \
304 .filter(or_(
304 .filter(or_(
305 # generate multiple IN to fix limitation problems
305 # generate multiple IN to fix limitation problems
306 *in_filter_generator(Repository.repo_id, allowed_ids))
306 *in_filter_generator(Repository.repo_id, allowed_ids))
307 ) \
307 ) \
308 .count()
308 .count()
309
309
310 base_q = Session.query(
310 base_q = Session.query(
311 Repository.repo_id,
311 Repository.repo_id,
312 Repository.repo_name,
312 Repository.repo_name,
313 Repository.description,
313 Repository.description,
314 Repository.repo_type,
314 Repository.repo_type,
315 Repository.repo_state,
315 Repository.repo_state,
316 Repository.private,
316 Repository.private,
317 Repository.archived,
317 Repository.archived,
318 Repository.fork,
318 Repository.fork,
319 Repository.updated_on,
319 Repository.updated_on,
320 Repository._changeset_cache,
320 Repository._changeset_cache,
321 User,
321 User,
322 ) \
322 ) \
323 .filter(Repository.group_id == repo_group_id) \
323 .filter(Repository.group_id == repo_group_id) \
324 .filter(or_(
324 .filter(or_(
325 # generate multiple IN to fix limitation problems
325 # generate multiple IN to fix limitation problems
326 *in_filter_generator(Repository.repo_id, allowed_ids))
326 *in_filter_generator(Repository.repo_id, allowed_ids))
327 ) \
327 ) \
328 .join(User, User.user_id == Repository.user_id) \
328 .join(User, User.user_id == Repository.user_id) \
329 .group_by(Repository, User)
329 .group_by(Repository, User)
330
330
331 repos_data_total_filtered_count = base_q.count()
331 repos_data_total_filtered_count = base_q.count()
332
332
333 sort_defined = False
333 sort_defined = False
334 if order_by == 'repo_name':
334 if order_by == 'repo_name':
335 sort_col = func.lower(Repository.repo_name)
335 sort_col = func.lower(Repository.repo_name)
336 sort_defined = True
336 sort_defined = True
337 elif order_by == 'user_username':
337 elif order_by == 'user_username':
338 sort_col = User.username
338 sort_col = User.username
339 else:
339 else:
340 sort_col = getattr(Repository, order_by, None)
340 sort_col = getattr(Repository, order_by, None)
341
341
342 if sort_defined or sort_col:
342 if sort_defined or sort_col:
343 if order_dir == 'asc':
343 if order_dir == 'asc':
344 sort_col = sort_col.asc()
344 sort_col = sort_col.asc()
345 else:
345 else:
346 sort_col = sort_col.desc()
346 sort_col = sort_col.desc()
347
347
348 base_q = base_q.order_by(sort_col)
348 base_q = base_q.order_by(sort_col)
349 base_q = base_q.offset(start).limit(limit)
349 base_q = base_q.offset(start).limit(limit)
350
350
351 repos_list = base_q.all()
351 repos_list = base_q.all()
352
352
353 repos_data = RepoModel().get_repos_as_dict(
353 repos_data = RepoModel().get_repos_as_dict(
354 repo_list=repos_list, admin=False)
354 repo_list=repos_list, admin=False)
355
355
356 data = ({
356 data = ({
357 'draw': draw,
357 'draw': draw,
358 'data': repos_data,
358 'data': repos_data,
359 'recordsTotal': repos_data_total_count,
359 'recordsTotal': repos_data_total_count,
360 'recordsFiltered': repos_data_total_filtered_count,
360 'recordsFiltered': repos_data_total_filtered_count,
361 })
361 })
362 return data
362 return data
363
363
364 def _get_defaults(self, repo_name):
364 def _get_defaults(self, repo_name):
365 """
365 """
366 Gets information about repository, and returns a dict for
366 Gets information about repository, and returns a dict for
367 usage in forms
367 usage in forms
368
368
369 :param repo_name:
369 :param repo_name:
370 """
370 """
371
371
372 repo_info = Repository.get_by_repo_name(repo_name)
372 repo_info = Repository.get_by_repo_name(repo_name)
373
373
374 if repo_info is None:
374 if repo_info is None:
375 return None
375 return None
376
376
377 defaults = repo_info.get_dict()
377 defaults = repo_info.get_dict()
378 defaults['repo_name'] = repo_info.just_name
378 defaults['repo_name'] = repo_info.just_name
379
379
380 groups = repo_info.groups_with_parents
380 groups = repo_info.groups_with_parents
381 parent_group = groups[-1] if groups else None
381 parent_group = groups[-1] if groups else None
382
382
383 # we use -1 as this is how in HTML, we mark an empty group
383 # we use -1 as this is how in HTML, we mark an empty group
384 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
384 defaults['repo_group'] = getattr(parent_group, 'group_id', -1)
385
385
386 keys_to_process = (
386 keys_to_process = (
387 {'k': 'repo_type', 'strip': False},
387 {'k': 'repo_type', 'strip': False},
388 {'k': 'repo_enable_downloads', 'strip': True},
388 {'k': 'repo_enable_downloads', 'strip': True},
389 {'k': 'repo_description', 'strip': True},
389 {'k': 'repo_description', 'strip': True},
390 {'k': 'repo_enable_locking', 'strip': True},
390 {'k': 'repo_enable_locking', 'strip': True},
391 {'k': 'repo_landing_rev', 'strip': True},
391 {'k': 'repo_landing_rev', 'strip': True},
392 {'k': 'clone_uri', 'strip': False},
392 {'k': 'clone_uri', 'strip': False},
393 {'k': 'push_uri', 'strip': False},
393 {'k': 'push_uri', 'strip': False},
394 {'k': 'repo_private', 'strip': True},
394 {'k': 'repo_private', 'strip': True},
395 {'k': 'repo_enable_statistics', 'strip': True}
395 {'k': 'repo_enable_statistics', 'strip': True}
396 )
396 )
397
397
398 for item in keys_to_process:
398 for item in keys_to_process:
399 attr = item['k']
399 attr = item['k']
400 if item['strip']:
400 if item['strip']:
401 attr = remove_prefix(item['k'], 'repo_')
401 attr = remove_prefix(item['k'], 'repo_')
402
402
403 val = defaults[attr]
403 val = defaults[attr]
404 if item['k'] == 'repo_landing_rev':
404 if item['k'] == 'repo_landing_rev':
405 val = ':'.join(defaults[attr])
405 val = ':'.join(defaults[attr])
406 defaults[item['k']] = val
406 defaults[item['k']] = val
407 if item['k'] == 'clone_uri':
407 if item['k'] == 'clone_uri':
408 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
408 defaults['clone_uri_hidden'] = repo_info.clone_uri_hidden
409 if item['k'] == 'push_uri':
409 if item['k'] == 'push_uri':
410 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
410 defaults['push_uri_hidden'] = repo_info.push_uri_hidden
411
411
412 # fill owner
412 # fill owner
413 if repo_info.user:
413 if repo_info.user:
414 defaults.update({'user': repo_info.user.username})
414 defaults.update({'user': repo_info.user.username})
415 else:
415 else:
416 replacement_user = User.get_first_super_admin().username
416 replacement_user = User.get_first_super_admin().username
417 defaults.update({'user': replacement_user})
417 defaults.update({'user': replacement_user})
418
418
419 return defaults
419 return defaults
420
420
421 def update(self, repo, **kwargs):
421 def update(self, repo, **kwargs):
422 try:
422 try:
423 cur_repo = self._get_repo(repo)
423 cur_repo = self._get_repo(repo)
424 source_repo_name = cur_repo.repo_name
424 source_repo_name = cur_repo.repo_name
425 if 'user' in kwargs:
425 if 'user' in kwargs:
426 cur_repo.user = User.get_by_username(kwargs['user'])
426 cur_repo.user = User.get_by_username(kwargs['user'])
427
427
428 if 'repo_group' in kwargs:
428 if 'repo_group' in kwargs:
429 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
429 cur_repo.group = RepoGroup.get(kwargs['repo_group'])
430 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
430 log.debug('Updating repo %s with params:%s', cur_repo, kwargs)
431
431
432 update_keys = [
432 update_keys = [
433 (1, 'repo_description'),
433 (1, 'repo_description'),
434 (1, 'repo_landing_rev'),
434 (1, 'repo_landing_rev'),
435 (1, 'repo_private'),
435 (1, 'repo_private'),
436 (1, 'repo_enable_downloads'),
436 (1, 'repo_enable_downloads'),
437 (1, 'repo_enable_locking'),
437 (1, 'repo_enable_locking'),
438 (1, 'repo_enable_statistics'),
438 (1, 'repo_enable_statistics'),
439 (0, 'clone_uri'),
439 (0, 'clone_uri'),
440 (0, 'push_uri'),
440 (0, 'push_uri'),
441 (0, 'fork_id')
441 (0, 'fork_id')
442 ]
442 ]
443 for strip, k in update_keys:
443 for strip, k in update_keys:
444 if k in kwargs:
444 if k in kwargs:
445 val = kwargs[k]
445 val = kwargs[k]
446 if strip:
446 if strip:
447 k = remove_prefix(k, 'repo_')
447 k = remove_prefix(k, 'repo_')
448
448
449 setattr(cur_repo, k, val)
449 setattr(cur_repo, k, val)
450
450
451 new_name = cur_repo.get_new_name(kwargs['repo_name'])
451 new_name = cur_repo.get_new_name(kwargs['repo_name'])
452 cur_repo.repo_name = new_name
452 cur_repo.repo_name = new_name
453
453
454 # if private flag is set, reset default permission to NONE
454 # if private flag is set, reset default permission to NONE
455 if kwargs.get('repo_private'):
455 if kwargs.get('repo_private'):
456 EMPTY_PERM = 'repository.none'
456 EMPTY_PERM = 'repository.none'
457 RepoModel().grant_user_permission(
457 RepoModel().grant_user_permission(
458 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
458 repo=cur_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM
459 )
459 )
460
460
461 # handle extra fields
461 # handle extra fields
462 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
462 for field in filter(lambda k: k.startswith(RepositoryField.PREFIX), kwargs):
463 k = RepositoryField.un_prefix_key(field)
463 k = RepositoryField.un_prefix_key(field)
464 ex_field = RepositoryField.get_by_key_name(
464 ex_field = RepositoryField.get_by_key_name(
465 key=k, repo=cur_repo)
465 key=k, repo=cur_repo)
466 if ex_field:
466 if ex_field:
467 ex_field.field_value = kwargs[field]
467 ex_field.field_value = kwargs[field]
468 self.sa.add(ex_field)
468 self.sa.add(ex_field)
469
469
470 self.sa.add(cur_repo)
470 self.sa.add(cur_repo)
471
471
472 if source_repo_name != new_name:
472 if source_repo_name != new_name:
473 # rename repository
473 # rename repository
474 self._rename_filesystem_repo(
474 self._rename_filesystem_repo(
475 old=source_repo_name, new=new_name)
475 old=source_repo_name, new=new_name)
476
476
477 return cur_repo
477 return cur_repo
478 except Exception:
478 except Exception:
479 log.error(traceback.format_exc())
479 log.error(traceback.format_exc())
480 raise
480 raise
481
481
482 def _create_repo(self, repo_name, repo_type, description, owner,
482 def _create_repo(self, repo_name, repo_type, description, owner,
483 private=False, clone_uri=None, repo_group=None,
483 private=False, clone_uri=None, repo_group=None,
484 landing_rev='rev:tip', fork_of=None,
484 landing_rev='rev:tip', fork_of=None,
485 copy_fork_permissions=False, enable_statistics=False,
485 copy_fork_permissions=False, enable_statistics=False,
486 enable_locking=False, enable_downloads=False,
486 enable_locking=False, enable_downloads=False,
487 copy_group_permissions=False,
487 copy_group_permissions=False,
488 state=Repository.STATE_PENDING):
488 state=Repository.STATE_PENDING):
489 """
489 """
490 Create repository inside database with PENDING state, this should be
490 Create repository inside database with PENDING state, this should be
491 only executed by create() repo. With exception of importing existing
491 only executed by create() repo. With exception of importing existing
492 repos
492 repos
493 """
493 """
494 from rhodecode.model.scm import ScmModel
494 from rhodecode.model.scm import ScmModel
495
495
496 owner = self._get_user(owner)
496 owner = self._get_user(owner)
497 fork_of = self._get_repo(fork_of)
497 fork_of = self._get_repo(fork_of)
498 repo_group = self._get_repo_group(safe_int(repo_group))
498 repo_group = self._get_repo_group(safe_int(repo_group))
499
499
500 try:
500 try:
501 repo_name = safe_unicode(repo_name)
501 repo_name = safe_unicode(repo_name)
502 description = safe_unicode(description)
502 description = safe_unicode(description)
503 # repo name is just a name of repository
503 # repo name is just a name of repository
504 # while repo_name_full is a full qualified name that is combined
504 # while repo_name_full is a full qualified name that is combined
505 # with name and path of group
505 # with name and path of group
506 repo_name_full = repo_name
506 repo_name_full = repo_name
507 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
507 repo_name = repo_name.split(Repository.NAME_SEP)[-1]
508
508
509 new_repo = Repository()
509 new_repo = Repository()
510 new_repo.repo_state = state
510 new_repo.repo_state = state
511 new_repo.enable_statistics = False
511 new_repo.enable_statistics = False
512 new_repo.repo_name = repo_name_full
512 new_repo.repo_name = repo_name_full
513 new_repo.repo_type = repo_type
513 new_repo.repo_type = repo_type
514 new_repo.user = owner
514 new_repo.user = owner
515 new_repo.group = repo_group
515 new_repo.group = repo_group
516 new_repo.description = description or repo_name
516 new_repo.description = description or repo_name
517 new_repo.private = private
517 new_repo.private = private
518 new_repo.archived = False
518 new_repo.archived = False
519 new_repo.clone_uri = clone_uri
519 new_repo.clone_uri = clone_uri
520 new_repo.landing_rev = landing_rev
520 new_repo.landing_rev = landing_rev
521
521
522 new_repo.enable_statistics = enable_statistics
522 new_repo.enable_statistics = enable_statistics
523 new_repo.enable_locking = enable_locking
523 new_repo.enable_locking = enable_locking
524 new_repo.enable_downloads = enable_downloads
524 new_repo.enable_downloads = enable_downloads
525
525
526 if repo_group:
526 if repo_group:
527 new_repo.enable_locking = repo_group.enable_locking
527 new_repo.enable_locking = repo_group.enable_locking
528
528
529 if fork_of:
529 if fork_of:
530 parent_repo = fork_of
530 parent_repo = fork_of
531 new_repo.fork = parent_repo
531 new_repo.fork = parent_repo
532
532
533 events.trigger(events.RepoPreCreateEvent(new_repo))
533 events.trigger(events.RepoPreCreateEvent(new_repo))
534
534
535 self.sa.add(new_repo)
535 self.sa.add(new_repo)
536
536
537 EMPTY_PERM = 'repository.none'
537 EMPTY_PERM = 'repository.none'
538 if fork_of and copy_fork_permissions:
538 if fork_of and copy_fork_permissions:
539 repo = fork_of
539 repo = fork_of
540 user_perms = UserRepoToPerm.query() \
540 user_perms = UserRepoToPerm.query() \
541 .filter(UserRepoToPerm.repository == repo).all()
541 .filter(UserRepoToPerm.repository == repo).all()
542 group_perms = UserGroupRepoToPerm.query() \
542 group_perms = UserGroupRepoToPerm.query() \
543 .filter(UserGroupRepoToPerm.repository == repo).all()
543 .filter(UserGroupRepoToPerm.repository == repo).all()
544
544
545 for perm in user_perms:
545 for perm in user_perms:
546 UserRepoToPerm.create(
546 UserRepoToPerm.create(
547 perm.user, new_repo, perm.permission)
547 perm.user, new_repo, perm.permission)
548
548
549 for perm in group_perms:
549 for perm in group_perms:
550 UserGroupRepoToPerm.create(
550 UserGroupRepoToPerm.create(
551 perm.users_group, new_repo, perm.permission)
551 perm.users_group, new_repo, perm.permission)
552 # in case we copy permissions and also set this repo to private
552 # in case we copy permissions and also set this repo to private
553 # override the default user permission to make it a private repo
553 # override the default user permission to make it a private repo
554 if private:
554 if private:
555 RepoModel(self.sa).grant_user_permission(
555 RepoModel(self.sa).grant_user_permission(
556 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
556 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
557
557
558 elif repo_group and copy_group_permissions:
558 elif repo_group and copy_group_permissions:
559 user_perms = UserRepoGroupToPerm.query() \
559 user_perms = UserRepoGroupToPerm.query() \
560 .filter(UserRepoGroupToPerm.group == repo_group).all()
560 .filter(UserRepoGroupToPerm.group == repo_group).all()
561
561
562 group_perms = UserGroupRepoGroupToPerm.query() \
562 group_perms = UserGroupRepoGroupToPerm.query() \
563 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
563 .filter(UserGroupRepoGroupToPerm.group == repo_group).all()
564
564
565 for perm in user_perms:
565 for perm in user_perms:
566 perm_name = perm.permission.permission_name.replace(
566 perm_name = perm.permission.permission_name.replace(
567 'group.', 'repository.')
567 'group.', 'repository.')
568 perm_obj = Permission.get_by_key(perm_name)
568 perm_obj = Permission.get_by_key(perm_name)
569 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
569 UserRepoToPerm.create(perm.user, new_repo, perm_obj)
570
570
571 for perm in group_perms:
571 for perm in group_perms:
572 perm_name = perm.permission.permission_name.replace(
572 perm_name = perm.permission.permission_name.replace(
573 'group.', 'repository.')
573 'group.', 'repository.')
574 perm_obj = Permission.get_by_key(perm_name)
574 perm_obj = Permission.get_by_key(perm_name)
575 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
575 UserGroupRepoToPerm.create(perm.users_group, new_repo, perm_obj)
576
576
577 if private:
577 if private:
578 RepoModel(self.sa).grant_user_permission(
578 RepoModel(self.sa).grant_user_permission(
579 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
579 repo=new_repo, user=User.DEFAULT_USER, perm=EMPTY_PERM)
580
580
581 else:
581 else:
582 perm_obj = self._create_default_perms(new_repo, private)
582 perm_obj = self._create_default_perms(new_repo, private)
583 self.sa.add(perm_obj)
583 self.sa.add(perm_obj)
584
584
585 # now automatically start following this repository as owner
585 # now automatically start following this repository as owner
586 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
586 ScmModel(self.sa).toggle_following_repo(new_repo.repo_id, owner.user_id)
587
587
588 # we need to flush here, in order to check if database won't
588 # we need to flush here, in order to check if database won't
589 # throw any exceptions, create filesystem dirs at the very end
589 # throw any exceptions, create filesystem dirs at the very end
590 self.sa.flush()
590 self.sa.flush()
591 events.trigger(events.RepoCreateEvent(new_repo))
591 events.trigger(events.RepoCreateEvent(new_repo))
592 return new_repo
592 return new_repo
593
593
594 except Exception:
594 except Exception:
595 log.error(traceback.format_exc())
595 log.error(traceback.format_exc())
596 raise
596 raise
597
597
598 def create(self, form_data, cur_user):
598 def create(self, form_data, cur_user):
599 """
599 """
600 Create repository using celery tasks
600 Create repository using celery tasks
601
601
602 :param form_data:
602 :param form_data:
603 :param cur_user:
603 :param cur_user:
604 """
604 """
605 from rhodecode.lib.celerylib import tasks, run_task
605 from rhodecode.lib.celerylib import tasks, run_task
606 return run_task(tasks.create_repo, form_data, cur_user)
606 return run_task(tasks.create_repo, form_data, cur_user)
607
607
608 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
608 def update_permissions(self, repo, perm_additions=None, perm_updates=None,
609 perm_deletions=None, check_perms=True,
609 perm_deletions=None, check_perms=True,
610 cur_user=None):
610 cur_user=None):
611 if not perm_additions:
611 if not perm_additions:
612 perm_additions = []
612 perm_additions = []
613 if not perm_updates:
613 if not perm_updates:
614 perm_updates = []
614 perm_updates = []
615 if not perm_deletions:
615 if not perm_deletions:
616 perm_deletions = []
616 perm_deletions = []
617
617
618 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
618 req_perms = ('usergroup.read', 'usergroup.write', 'usergroup.admin')
619
619
620 changes = {
620 changes = {
621 'added': [],
621 'added': [],
622 'updated': [],
622 'updated': [],
623 'deleted': [],
623 'deleted': [],
624 'default_user_changed': None
624 'default_user_changed': None
625 }
625 }
626
626
627 repo = self._get_repo(repo)
627 repo = self._get_repo(repo)
628
628
629 # update permissions
629 # update permissions
630 for member_id, perm, member_type in perm_updates:
630 for member_id, perm, member_type in perm_updates:
631 member_id = int(member_id)
631 member_id = int(member_id)
632 if member_type == 'user':
632 if member_type == 'user':
633 member_name = User.get(member_id).username
633 member_name = User.get(member_id).username
634 if member_name == User.DEFAULT_USER:
634 if member_name == User.DEFAULT_USER:
635 # NOTE(dan): detect if we changed permissions for default user
635 # NOTE(dan): detect if we changed permissions for default user
636 perm_obj = self.sa.query(UserRepoToPerm) \
636 perm_obj = self.sa.query(UserRepoToPerm) \
637 .filter(UserRepoToPerm.user_id == member_id) \
637 .filter(UserRepoToPerm.user_id == member_id) \
638 .filter(UserRepoToPerm.repository == repo) \
638 .filter(UserRepoToPerm.repository == repo) \
639 .scalar()
639 .scalar()
640 if perm_obj and perm_obj.permission.permission_name != perm:
640 if perm_obj and perm_obj.permission.permission_name != perm:
641 changes['default_user_changed'] = True
641 changes['default_user_changed'] = True
642
642
643 # this updates also current one if found
643 # this updates also current one if found
644 self.grant_user_permission(
644 self.grant_user_permission(
645 repo=repo, user=member_id, perm=perm)
645 repo=repo, user=member_id, perm=perm)
646 elif member_type == 'user_group':
646 elif member_type == 'user_group':
647 # check if we have permissions to alter this usergroup
647 # check if we have permissions to alter this usergroup
648 member_name = UserGroup.get(member_id).users_group_name
648 member_name = UserGroup.get(member_id).users_group_name
649 if not check_perms or HasUserGroupPermissionAny(
649 if not check_perms or HasUserGroupPermissionAny(
650 *req_perms)(member_name, user=cur_user):
650 *req_perms)(member_name, user=cur_user):
651 self.grant_user_group_permission(
651 self.grant_user_group_permission(
652 repo=repo, group_name=member_id, perm=perm)
652 repo=repo, group_name=member_id, perm=perm)
653 else:
653 else:
654 raise ValueError("member_type must be 'user' or 'user_group' "
654 raise ValueError("member_type must be 'user' or 'user_group' "
655 "got {} instead".format(member_type))
655 "got {} instead".format(member_type))
656 changes['updated'].append({'type': member_type, 'id': member_id,
656 changes['updated'].append({'type': member_type, 'id': member_id,
657 'name': member_name, 'new_perm': perm})
657 'name': member_name, 'new_perm': perm})
658
658
659 # set new permissions
659 # set new permissions
660 for member_id, perm, member_type in perm_additions:
660 for member_id, perm, member_type in perm_additions:
661 member_id = int(member_id)
661 member_id = int(member_id)
662 if member_type == 'user':
662 if member_type == 'user':
663 member_name = User.get(member_id).username
663 member_name = User.get(member_id).username
664 self.grant_user_permission(
664 self.grant_user_permission(
665 repo=repo, user=member_id, perm=perm)
665 repo=repo, user=member_id, perm=perm)
666 elif member_type == 'user_group':
666 elif member_type == 'user_group':
667 # check if we have permissions to alter this usergroup
667 # check if we have permissions to alter this usergroup
668 member_name = UserGroup.get(member_id).users_group_name
668 member_name = UserGroup.get(member_id).users_group_name
669 if not check_perms or HasUserGroupPermissionAny(
669 if not check_perms or HasUserGroupPermissionAny(
670 *req_perms)(member_name, user=cur_user):
670 *req_perms)(member_name, user=cur_user):
671 self.grant_user_group_permission(
671 self.grant_user_group_permission(
672 repo=repo, group_name=member_id, perm=perm)
672 repo=repo, group_name=member_id, perm=perm)
673 else:
673 else:
674 raise ValueError("member_type must be 'user' or 'user_group' "
674 raise ValueError("member_type must be 'user' or 'user_group' "
675 "got {} instead".format(member_type))
675 "got {} instead".format(member_type))
676
676
677 changes['added'].append({'type': member_type, 'id': member_id,
677 changes['added'].append({'type': member_type, 'id': member_id,
678 'name': member_name, 'new_perm': perm})
678 'name': member_name, 'new_perm': perm})
679 # delete permissions
679 # delete permissions
680 for member_id, perm, member_type in perm_deletions:
680 for member_id, perm, member_type in perm_deletions:
681 member_id = int(member_id)
681 member_id = int(member_id)
682 if member_type == 'user':
682 if member_type == 'user':
683 member_name = User.get(member_id).username
683 member_name = User.get(member_id).username
684 self.revoke_user_permission(repo=repo, user=member_id)
684 self.revoke_user_permission(repo=repo, user=member_id)
685 elif member_type == 'user_group':
685 elif member_type == 'user_group':
686 # check if we have permissions to alter this usergroup
686 # check if we have permissions to alter this usergroup
687 member_name = UserGroup.get(member_id).users_group_name
687 member_name = UserGroup.get(member_id).users_group_name
688 if not check_perms or HasUserGroupPermissionAny(
688 if not check_perms or HasUserGroupPermissionAny(
689 *req_perms)(member_name, user=cur_user):
689 *req_perms)(member_name, user=cur_user):
690 self.revoke_user_group_permission(
690 self.revoke_user_group_permission(
691 repo=repo, group_name=member_id)
691 repo=repo, group_name=member_id)
692 else:
692 else:
693 raise ValueError("member_type must be 'user' or 'user_group' "
693 raise ValueError("member_type must be 'user' or 'user_group' "
694 "got {} instead".format(member_type))
694 "got {} instead".format(member_type))
695
695
696 changes['deleted'].append({'type': member_type, 'id': member_id,
696 changes['deleted'].append({'type': member_type, 'id': member_id,
697 'name': member_name, 'new_perm': perm})
697 'name': member_name, 'new_perm': perm})
698 return changes
698 return changes
699
699
700 def create_fork(self, form_data, cur_user):
700 def create_fork(self, form_data, cur_user):
701 """
701 """
702 Simple wrapper into executing celery task for fork creation
702 Simple wrapper into executing celery task for fork creation
703
703
704 :param form_data:
704 :param form_data:
705 :param cur_user:
705 :param cur_user:
706 """
706 """
707 from rhodecode.lib.celerylib import tasks, run_task
707 from rhodecode.lib.celerylib import tasks, run_task
708 return run_task(tasks.create_repo_fork, form_data, cur_user)
708 return run_task(tasks.create_repo_fork, form_data, cur_user)
709
709
710 def archive(self, repo):
710 def archive(self, repo):
711 """
711 """
712 Archive given repository. Set archive flag.
712 Archive given repository. Set archive flag.
713
713
714 :param repo:
714 :param repo:
715 """
715 """
716 repo = self._get_repo(repo)
716 repo = self._get_repo(repo)
717 if repo:
717 if repo:
718
718
719 try:
719 try:
720 repo.archived = True
720 repo.archived = True
721 self.sa.add(repo)
721 self.sa.add(repo)
722 self.sa.commit()
722 self.sa.commit()
723 except Exception:
723 except Exception:
724 log.error(traceback.format_exc())
724 log.error(traceback.format_exc())
725 raise
725 raise
726
726
727 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
727 def delete(self, repo, forks=None, pull_requests=None, fs_remove=True, cur_user=None):
728 """
728 """
729 Delete given repository, forks parameter defines what do do with
729 Delete given repository, forks parameter defines what do do with
730 attached forks. Throws AttachedForksError if deleted repo has attached
730 attached forks. Throws AttachedForksError if deleted repo has attached
731 forks
731 forks
732
732
733 :param repo:
733 :param repo:
734 :param forks: str 'delete' or 'detach'
734 :param forks: str 'delete' or 'detach'
735 :param pull_requests: str 'delete' or None
735 :param pull_requests: str 'delete' or None
736 :param fs_remove: remove(archive) repo from filesystem
736 :param fs_remove: remove(archive) repo from filesystem
737 """
737 """
738 if not cur_user:
738 if not cur_user:
739 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
739 cur_user = getattr(get_current_rhodecode_user(), 'username', None)
740 repo = self._get_repo(repo)
740 repo = self._get_repo(repo)
741 if repo:
741 if repo:
742 if forks == 'detach':
742 if forks == 'detach':
743 for r in repo.forks:
743 for r in repo.forks:
744 r.fork = None
744 r.fork = None
745 self.sa.add(r)
745 self.sa.add(r)
746 elif forks == 'delete':
746 elif forks == 'delete':
747 for r in repo.forks:
747 for r in repo.forks:
748 self.delete(r, forks='delete')
748 self.delete(r, forks='delete')
749 elif [f for f in repo.forks]:
749 elif [f for f in repo.forks]:
750 raise AttachedForksError()
750 raise AttachedForksError()
751
751
752 # check for pull requests
752 # check for pull requests
753 pr_sources = repo.pull_requests_source
753 pr_sources = repo.pull_requests_source
754 pr_targets = repo.pull_requests_target
754 pr_targets = repo.pull_requests_target
755 if pull_requests != 'delete' and (pr_sources or pr_targets):
755 if pull_requests != 'delete' and (pr_sources or pr_targets):
756 raise AttachedPullRequestsError()
756 raise AttachedPullRequestsError()
757
757
758 old_repo_dict = repo.get_dict()
758 old_repo_dict = repo.get_dict()
759 events.trigger(events.RepoPreDeleteEvent(repo))
759 events.trigger(events.RepoPreDeleteEvent(repo))
760 try:
760 try:
761 self.sa.delete(repo)
761 self.sa.delete(repo)
762 if fs_remove:
762 if fs_remove:
763 self._delete_filesystem_repo(repo)
763 self._delete_filesystem_repo(repo)
764 else:
764 else:
765 log.debug('skipping removal from filesystem')
765 log.debug('skipping removal from filesystem')
766 old_repo_dict.update({
766 old_repo_dict.update({
767 'deleted_by': cur_user,
767 'deleted_by': cur_user,
768 'deleted_on': time.time(),
768 'deleted_on': time.time(),
769 })
769 })
770 log_delete_repository(**old_repo_dict)
770 log_delete_repository(**old_repo_dict)
771 events.trigger(events.RepoDeleteEvent(repo))
771 events.trigger(events.RepoDeleteEvent(repo))
772 except Exception:
772 except Exception:
773 log.error(traceback.format_exc())
773 log.error(traceback.format_exc())
774 raise
774 raise
775
775
776 def grant_user_permission(self, repo, user, perm):
776 def grant_user_permission(self, repo, user, perm):
777 """
777 """
778 Grant permission for user on given repository, or update existing one
778 Grant permission for user on given repository, or update existing one
779 if found
779 if found
780
780
781 :param repo: Instance of Repository, repository_id, or repository name
781 :param repo: Instance of Repository, repository_id, or repository name
782 :param user: Instance of User, user_id or username
782 :param user: Instance of User, user_id or username
783 :param perm: Instance of Permission, or permission_name
783 :param perm: Instance of Permission, or permission_name
784 """
784 """
785 user = self._get_user(user)
785 user = self._get_user(user)
786 repo = self._get_repo(repo)
786 repo = self._get_repo(repo)
787 permission = self._get_perm(perm)
787 permission = self._get_perm(perm)
788
788
789 # check if we have that permission already
789 # check if we have that permission already
790 obj = self.sa.query(UserRepoToPerm) \
790 obj = self.sa.query(UserRepoToPerm) \
791 .filter(UserRepoToPerm.user == user) \
791 .filter(UserRepoToPerm.user == user) \
792 .filter(UserRepoToPerm.repository == repo) \
792 .filter(UserRepoToPerm.repository == repo) \
793 .scalar()
793 .scalar()
794 if obj is None:
794 if obj is None:
795 # create new !
795 # create new !
796 obj = UserRepoToPerm()
796 obj = UserRepoToPerm()
797 obj.repository = repo
797 obj.repository = repo
798 obj.user = user
798 obj.user = user
799 obj.permission = permission
799 obj.permission = permission
800 self.sa.add(obj)
800 self.sa.add(obj)
801 log.debug('Granted perm %s to %s on %s', perm, user, repo)
801 log.debug('Granted perm %s to %s on %s', perm, user, repo)
802 action_logger_generic(
802 action_logger_generic(
803 'granted permission: {} to user: {} on repo: {}'.format(
803 'granted permission: {} to user: {} on repo: {}'.format(
804 perm, user, repo), namespace='security.repo')
804 perm, user, repo), namespace='security.repo')
805 return obj
805 return obj
806
806
807 def revoke_user_permission(self, repo, user):
807 def revoke_user_permission(self, repo, user):
808 """
808 """
809 Revoke permission for user on given repository
809 Revoke permission for user on given repository
810
810
811 :param repo: Instance of Repository, repository_id, or repository name
811 :param repo: Instance of Repository, repository_id, or repository name
812 :param user: Instance of User, user_id or username
812 :param user: Instance of User, user_id or username
813 """
813 """
814
814
815 user = self._get_user(user)
815 user = self._get_user(user)
816 repo = self._get_repo(repo)
816 repo = self._get_repo(repo)
817
817
818 obj = self.sa.query(UserRepoToPerm) \
818 obj = self.sa.query(UserRepoToPerm) \
819 .filter(UserRepoToPerm.repository == repo) \
819 .filter(UserRepoToPerm.repository == repo) \
820 .filter(UserRepoToPerm.user == user) \
820 .filter(UserRepoToPerm.user == user) \
821 .scalar()
821 .scalar()
822 if obj:
822 if obj:
823 self.sa.delete(obj)
823 self.sa.delete(obj)
824 log.debug('Revoked perm on %s on %s', repo, user)
824 log.debug('Revoked perm on %s on %s', repo, user)
825 action_logger_generic(
825 action_logger_generic(
826 'revoked permission from user: {} on repo: {}'.format(
826 'revoked permission from user: {} on repo: {}'.format(
827 user, repo), namespace='security.repo')
827 user, repo), namespace='security.repo')
828
828
829 def grant_user_group_permission(self, repo, group_name, perm):
829 def grant_user_group_permission(self, repo, group_name, perm):
830 """
830 """
831 Grant permission for user group on given repository, or update
831 Grant permission for user group on given repository, or update
832 existing one if found
832 existing one if found
833
833
834 :param repo: Instance of Repository, repository_id, or repository name
834 :param repo: Instance of Repository, repository_id, or repository name
835 :param group_name: Instance of UserGroup, users_group_id,
835 :param group_name: Instance of UserGroup, users_group_id,
836 or user group name
836 or user group name
837 :param perm: Instance of Permission, or permission_name
837 :param perm: Instance of Permission, or permission_name
838 """
838 """
839 repo = self._get_repo(repo)
839 repo = self._get_repo(repo)
840 group_name = self._get_user_group(group_name)
840 group_name = self._get_user_group(group_name)
841 permission = self._get_perm(perm)
841 permission = self._get_perm(perm)
842
842
843 # check if we have that permission already
843 # check if we have that permission already
844 obj = self.sa.query(UserGroupRepoToPerm) \
844 obj = self.sa.query(UserGroupRepoToPerm) \
845 .filter(UserGroupRepoToPerm.users_group == group_name) \
845 .filter(UserGroupRepoToPerm.users_group == group_name) \
846 .filter(UserGroupRepoToPerm.repository == repo) \
846 .filter(UserGroupRepoToPerm.repository == repo) \
847 .scalar()
847 .scalar()
848
848
849 if obj is None:
849 if obj is None:
850 # create new
850 # create new
851 obj = UserGroupRepoToPerm()
851 obj = UserGroupRepoToPerm()
852
852
853 obj.repository = repo
853 obj.repository = repo
854 obj.users_group = group_name
854 obj.users_group = group_name
855 obj.permission = permission
855 obj.permission = permission
856 self.sa.add(obj)
856 self.sa.add(obj)
857 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
857 log.debug('Granted perm %s to %s on %s', perm, group_name, repo)
858 action_logger_generic(
858 action_logger_generic(
859 'granted permission: {} to usergroup: {} on repo: {}'.format(
859 'granted permission: {} to usergroup: {} on repo: {}'.format(
860 perm, group_name, repo), namespace='security.repo')
860 perm, group_name, repo), namespace='security.repo')
861
861
862 return obj
862 return obj
863
863
864 def revoke_user_group_permission(self, repo, group_name):
864 def revoke_user_group_permission(self, repo, group_name):
865 """
865 """
866 Revoke permission for user group on given repository
866 Revoke permission for user group on given repository
867
867
868 :param repo: Instance of Repository, repository_id, or repository name
868 :param repo: Instance of Repository, repository_id, or repository name
869 :param group_name: Instance of UserGroup, users_group_id,
869 :param group_name: Instance of UserGroup, users_group_id,
870 or user group name
870 or user group name
871 """
871 """
872 repo = self._get_repo(repo)
872 repo = self._get_repo(repo)
873 group_name = self._get_user_group(group_name)
873 group_name = self._get_user_group(group_name)
874
874
875 obj = self.sa.query(UserGroupRepoToPerm) \
875 obj = self.sa.query(UserGroupRepoToPerm) \
876 .filter(UserGroupRepoToPerm.repository == repo) \
876 .filter(UserGroupRepoToPerm.repository == repo) \
877 .filter(UserGroupRepoToPerm.users_group == group_name) \
877 .filter(UserGroupRepoToPerm.users_group == group_name) \
878 .scalar()
878 .scalar()
879 if obj:
879 if obj:
880 self.sa.delete(obj)
880 self.sa.delete(obj)
881 log.debug('Revoked perm to %s on %s', repo, group_name)
881 log.debug('Revoked perm to %s on %s', repo, group_name)
882 action_logger_generic(
882 action_logger_generic(
883 'revoked permission from usergroup: {} on repo: {}'.format(
883 'revoked permission from usergroup: {} on repo: {}'.format(
884 group_name, repo), namespace='security.repo')
884 group_name, repo), namespace='security.repo')
885
885
886 def delete_stats(self, repo_name):
886 def delete_stats(self, repo_name):
887 """
887 """
888 removes stats for given repo
888 removes stats for given repo
889
889
890 :param repo_name:
890 :param repo_name:
891 """
891 """
892 repo = self._get_repo(repo_name)
892 repo = self._get_repo(repo_name)
893 try:
893 try:
894 obj = self.sa.query(Statistics) \
894 obj = self.sa.query(Statistics) \
895 .filter(Statistics.repository == repo).scalar()
895 .filter(Statistics.repository == repo).scalar()
896 if obj:
896 if obj:
897 self.sa.delete(obj)
897 self.sa.delete(obj)
898 except Exception:
898 except Exception:
899 log.error(traceback.format_exc())
899 log.error(traceback.format_exc())
900 raise
900 raise
901
901
902 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
902 def add_repo_field(self, repo_name, field_key, field_label, field_value='',
903 field_type='str', field_desc=''):
903 field_type='str', field_desc=''):
904
904
905 repo = self._get_repo(repo_name)
905 repo = self._get_repo(repo_name)
906
906
907 new_field = RepositoryField()
907 new_field = RepositoryField()
908 new_field.repository = repo
908 new_field.repository = repo
909 new_field.field_key = field_key
909 new_field.field_key = field_key
910 new_field.field_type = field_type # python type
910 new_field.field_type = field_type # python type
911 new_field.field_value = field_value
911 new_field.field_value = field_value
912 new_field.field_desc = field_desc
912 new_field.field_desc = field_desc
913 new_field.field_label = field_label
913 new_field.field_label = field_label
914 self.sa.add(new_field)
914 self.sa.add(new_field)
915 return new_field
915 return new_field
916
916
917 def delete_repo_field(self, repo_name, field_key):
917 def delete_repo_field(self, repo_name, field_key):
918 repo = self._get_repo(repo_name)
918 repo = self._get_repo(repo_name)
919 field = RepositoryField.get_by_key_name(field_key, repo)
919 field = RepositoryField.get_by_key_name(field_key, repo)
920 if field:
920 if field:
921 self.sa.delete(field)
921 self.sa.delete(field)
922
922
923 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
923 def _create_filesystem_repo(self, repo_name, repo_type, repo_group,
924 clone_uri=None, repo_store_location=None,
924 clone_uri=None, repo_store_location=None,
925 use_global_config=False, install_hooks=True):
925 use_global_config=False, install_hooks=True):
926 """
926 """
927 makes repository on filesystem. It's group aware means it'll create
927 makes repository on filesystem. It's group aware means it'll create
928 a repository within a group, and alter the paths accordingly of
928 a repository within a group, and alter the paths accordingly of
929 group location
929 group location
930
930
931 :param repo_name:
931 :param repo_name:
932 :param alias:
932 :param alias:
933 :param parent:
933 :param parent:
934 :param clone_uri:
934 :param clone_uri:
935 :param repo_store_location:
935 :param repo_store_location:
936 """
936 """
937 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
937 from rhodecode.lib.utils import is_valid_repo, is_valid_repo_group
938 from rhodecode.model.scm import ScmModel
938 from rhodecode.model.scm import ScmModel
939
939
940 if Repository.NAME_SEP in repo_name:
940 if Repository.NAME_SEP in repo_name:
941 raise ValueError(
941 raise ValueError(
942 'repo_name must not contain groups got `%s`' % repo_name)
942 'repo_name must not contain groups got `%s`' % repo_name)
943
943
944 if isinstance(repo_group, RepoGroup):
944 if isinstance(repo_group, RepoGroup):
945 new_parent_path = os.sep.join(repo_group.full_path_splitted)
945 new_parent_path = os.sep.join(repo_group.full_path_splitted)
946 else:
946 else:
947 new_parent_path = repo_group or ''
947 new_parent_path = repo_group or ''
948
948
949 if repo_store_location:
949 if repo_store_location:
950 _paths = [repo_store_location]
950 _paths = [repo_store_location]
951 else:
951 else:
952 _paths = [self.repos_path, new_parent_path, repo_name]
952 _paths = [self.repos_path, new_parent_path, repo_name]
953 # we need to make it str for mercurial
953 # we need to make it str for mercurial
954 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
954 repo_path = os.path.join(*map(lambda x: safe_str(x), _paths))
955
955
956 # check if this path is not a repository
956 # check if this path is not a repository
957 if is_valid_repo(repo_path, self.repos_path):
957 if is_valid_repo(repo_path, self.repos_path):
958 raise Exception('This path %s is a valid repository' % repo_path)
958 raise Exception('This path %s is a valid repository' % repo_path)
959
959
960 # check if this path is a group
960 # check if this path is a group
961 if is_valid_repo_group(repo_path, self.repos_path):
961 if is_valid_repo_group(repo_path, self.repos_path):
962 raise Exception('This path %s is a valid group' % repo_path)
962 raise Exception('This path %s is a valid group' % repo_path)
963
963
964 log.info('creating repo %s in %s from url: `%s`',
964 log.info('creating repo %s in %s from url: `%s`',
965 repo_name, safe_unicode(repo_path),
965 repo_name, safe_unicode(repo_path),
966 obfuscate_url_pw(clone_uri))
966 obfuscate_url_pw(clone_uri))
967
967
968 backend = get_backend(repo_type)
968 backend = get_backend(repo_type)
969
969
970 config_repo = None if use_global_config else repo_name
970 config_repo = None if use_global_config else repo_name
971 if config_repo and new_parent_path:
971 if config_repo and new_parent_path:
972 config_repo = Repository.NAME_SEP.join(
972 config_repo = Repository.NAME_SEP.join(
973 (new_parent_path, config_repo))
973 (new_parent_path, config_repo))
974 config = make_db_config(clear_session=False, repo=config_repo)
974 config = make_db_config(clear_session=False, repo=config_repo)
975 config.set('extensions', 'largefiles', '')
975 config.set('extensions', 'largefiles', '')
976
976
977 # patch and reset hooks section of UI config to not run any
977 # patch and reset hooks section of UI config to not run any
978 # hooks on creating remote repo
978 # hooks on creating remote repo
979 config.clear_section('hooks')
979 config.clear_section('hooks')
980
980
981 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
981 # TODO: johbo: Unify this, hardcoded "bare=True" does not look nice
982 if repo_type == 'git':
982 if repo_type == 'git':
983 repo = backend(
983 repo = backend(
984 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
984 repo_path, config=config, create=True, src_url=clone_uri, bare=True,
985 with_wire={"cache": False})
985 with_wire={"cache": False})
986 else:
986 else:
987 repo = backend(
987 repo = backend(
988 repo_path, config=config, create=True, src_url=clone_uri,
988 repo_path, config=config, create=True, src_url=clone_uri,
989 with_wire={"cache": False})
989 with_wire={"cache": False})
990
990
991 if install_hooks:
991 if install_hooks:
992 repo.install_hooks()
992 repo.install_hooks()
993
993
994 log.debug('Created repo %s with %s backend',
994 log.debug('Created repo %s with %s backend',
995 safe_unicode(repo_name), safe_unicode(repo_type))
995 safe_unicode(repo_name), safe_unicode(repo_type))
996 return repo
996 return repo
997
997
998 def _rename_filesystem_repo(self, old, new):
998 def _rename_filesystem_repo(self, old, new):
999 """
999 """
1000 renames repository on filesystem
1000 renames repository on filesystem
1001
1001
1002 :param old: old name
1002 :param old: old name
1003 :param new: new name
1003 :param new: new name
1004 """
1004 """
1005 log.info('renaming repo from %s to %s', old, new)
1005 log.info('renaming repo from %s to %s', old, new)
1006
1006
1007 old_path = os.path.join(self.repos_path, old)
1007 old_path = os.path.join(self.repos_path, old)
1008 new_path = os.path.join(self.repos_path, new)
1008 new_path = os.path.join(self.repos_path, new)
1009 if os.path.isdir(new_path):
1009 if os.path.isdir(new_path):
1010 raise Exception(
1010 raise Exception(
1011 'Was trying to rename to already existing dir %s' % new_path
1011 'Was trying to rename to already existing dir %s' % new_path
1012 )
1012 )
1013 shutil.move(old_path, new_path)
1013 shutil.move(old_path, new_path)
1014
1014
1015 def _delete_filesystem_repo(self, repo):
1015 def _delete_filesystem_repo(self, repo):
1016 """
1016 """
1017 removes repo from filesystem, the removal is acctually made by
1017 removes repo from filesystem, the removal is acctually made by
1018 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1018 added rm__ prefix into dir, and rename internat .hg/.git dirs so this
1019 repository is no longer valid for rhodecode, can be undeleted later on
1019 repository is no longer valid for rhodecode, can be undeleted later on
1020 by reverting the renames on this repository
1020 by reverting the renames on this repository
1021
1021
1022 :param repo: repo object
1022 :param repo: repo object
1023 """
1023 """
1024 rm_path = os.path.join(self.repos_path, repo.repo_name)
1024 rm_path = os.path.join(self.repos_path, repo.repo_name)
1025 repo_group = repo.group
1025 repo_group = repo.group
1026 log.info("Removing repository %s", rm_path)
1026 log.info("Removing repository %s", rm_path)
1027 # disable hg/git internal that it doesn't get detected as repo
1027 # disable hg/git internal that it doesn't get detected as repo
1028 alias = repo.repo_type
1028 alias = repo.repo_type
1029
1029
1030 config = make_db_config(clear_session=False)
1030 config = make_db_config(clear_session=False)
1031 config.set('extensions', 'largefiles', '')
1031 config.set('extensions', 'largefiles', '')
1032 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1032 bare = getattr(repo.scm_instance(config=config), 'bare', False)
1033
1033
1034 # skip this for bare git repos
1034 # skip this for bare git repos
1035 if not bare:
1035 if not bare:
1036 # disable VCS repo
1036 # disable VCS repo
1037 vcs_path = os.path.join(rm_path, '.%s' % alias)
1037 vcs_path = os.path.join(rm_path, '.%s' % alias)
1038 if os.path.exists(vcs_path):
1038 if os.path.exists(vcs_path):
1039 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1039 shutil.move(vcs_path, os.path.join(rm_path, 'rm__.%s' % alias))
1040
1040
1041 _now = datetime.datetime.now()
1041 _now = datetime.datetime.now()
1042 _ms = str(_now.microsecond).rjust(6, '0')
1042 _ms = str(_now.microsecond).rjust(6, '0')
1043 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1043 _d = 'rm__%s__%s' % (_now.strftime('%Y%m%d_%H%M%S_' + _ms),
1044 repo.just_name)
1044 repo.just_name)
1045 if repo_group:
1045 if repo_group:
1046 # if repository is in group, prefix the removal path with the group
1046 # if repository is in group, prefix the removal path with the group
1047 args = repo_group.full_path_splitted + [_d]
1047 args = repo_group.full_path_splitted + [_d]
1048 _d = os.path.join(*args)
1048 _d = os.path.join(*args)
1049
1049
1050 if os.path.isdir(rm_path):
1050 if os.path.isdir(rm_path):
1051 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1051 shutil.move(rm_path, os.path.join(self.repos_path, _d))
1052
1052
1053 # finally cleanup diff-cache if it exists
1053 # finally cleanup diff-cache if it exists
1054 cached_diffs_dir = repo.cached_diffs_dir
1054 cached_diffs_dir = repo.cached_diffs_dir
1055 if os.path.isdir(cached_diffs_dir):
1055 if os.path.isdir(cached_diffs_dir):
1056 shutil.rmtree(cached_diffs_dir)
1056 shutil.rmtree(cached_diffs_dir)
1057
1057
1058
1058
1059 class ReadmeFinder:
1059 class ReadmeFinder:
1060 """
1060 """
1061 Utility which knows how to find a readme for a specific commit.
1061 Utility which knows how to find a readme for a specific commit.
1062
1062
1063 The main idea is that this is a configurable algorithm. When creating an
1063 The main idea is that this is a configurable algorithm. When creating an
1064 instance you can define parameters, currently only the `default_renderer`.
1064 instance you can define parameters, currently only the `default_renderer`.
1065 Based on this configuration the method :meth:`search` behaves slightly
1065 Based on this configuration the method :meth:`search` behaves slightly
1066 different.
1066 different.
1067 """
1067 """
1068
1068
1069 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1069 readme_re = re.compile(r'^readme(\.[^\.]+)?$', re.IGNORECASE)
1070 path_re = re.compile(r'^docs?', re.IGNORECASE)
1070 path_re = re.compile(r'^docs?', re.IGNORECASE)
1071
1071
1072 default_priorities = {
1072 default_priorities = {
1073 None: 0,
1073 None: 0,
1074 '.text': 2,
1074 '.text': 2,
1075 '.txt': 3,
1075 '.txt': 3,
1076 '.rst': 1,
1076 '.rst': 1,
1077 '.rest': 2,
1077 '.rest': 2,
1078 '.md': 1,
1078 '.md': 1,
1079 '.mkdn': 2,
1079 '.mkdn': 2,
1080 '.mdown': 3,
1080 '.mdown': 3,
1081 '.markdown': 4,
1081 '.markdown': 4,
1082 }
1082 }
1083
1083
1084 path_priority = {
1084 path_priority = {
1085 'doc': 0,
1085 'doc': 0,
1086 'docs': 1,
1086 'docs': 1,
1087 }
1087 }
1088
1088
1089 FALLBACK_PRIORITY = 99
1089 FALLBACK_PRIORITY = 99
1090
1090
1091 RENDERER_TO_EXTENSION = {
1091 RENDERER_TO_EXTENSION = {
1092 'rst': ['.rst', '.rest'],
1092 'rst': ['.rst', '.rest'],
1093 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1093 'markdown': ['.md', 'mkdn', '.mdown', '.markdown'],
1094 }
1094 }
1095
1095
1096 def __init__(self, default_renderer=None):
1096 def __init__(self, default_renderer=None):
1097 self._default_renderer = default_renderer
1097 self._default_renderer = default_renderer
1098 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1098 self._renderer_extensions = self.RENDERER_TO_EXTENSION.get(
1099 default_renderer, [])
1099 default_renderer, [])
1100
1100
1101 def search(self, commit, path='/'):
1101 def search(self, commit, path=u'/'):
1102 """
1102 """
1103 Find a readme in the given `commit`.
1103 Find a readme in the given `commit`.
1104 """
1104 """
1105 nodes = commit.get_nodes(path)
1105 nodes = commit.get_nodes(path)
1106 matches = self._match_readmes(nodes)
1106 matches = self._match_readmes(nodes)
1107 matches = self._sort_according_to_priority(matches)
1107 matches = self._sort_according_to_priority(matches)
1108 if matches:
1108 if matches:
1109 return matches[0].node
1109 return matches[0].node
1110
1110
1111 paths = self._match_paths(nodes)
1111 paths = self._match_paths(nodes)
1112 paths = self._sort_paths_according_to_priority(paths)
1112 paths = self._sort_paths_according_to_priority(paths)
1113 for path in paths:
1113 for path in paths:
1114 match = self.search(commit, path=path)
1114 match = self.search(commit, path=path)
1115 if match:
1115 if match:
1116 return match
1116 return match
1117
1117
1118 return None
1118 return None
1119
1119
1120 def _match_readmes(self, nodes):
1120 def _match_readmes(self, nodes):
1121 for node in nodes:
1121 for node in nodes:
1122 if not node.is_file():
1122 if not node.is_file():
1123 continue
1123 continue
1124 path = node.path.rsplit('/', 1)[-1]
1124 path = node.path.rsplit('/', 1)[-1]
1125 match = self.readme_re.match(path)
1125 match = self.readme_re.match(path)
1126 if match:
1126 if match:
1127 extension = match.group(1)
1127 extension = match.group(1)
1128 yield ReadmeMatch(node, match, self._priority(extension))
1128 yield ReadmeMatch(node, match, self._priority(extension))
1129
1129
1130 def _match_paths(self, nodes):
1130 def _match_paths(self, nodes):
1131 for node in nodes:
1131 for node in nodes:
1132 if not node.is_dir():
1132 if not node.is_dir():
1133 continue
1133 continue
1134 match = self.path_re.match(node.path)
1134 match = self.path_re.match(node.path)
1135 if match:
1135 if match:
1136 yield node.path
1136 yield node.path
1137
1137
1138 def _priority(self, extension):
1138 def _priority(self, extension):
1139 renderer_priority = (
1139 renderer_priority = (
1140 0 if extension in self._renderer_extensions else 1)
1140 0 if extension in self._renderer_extensions else 1)
1141 extension_priority = self.default_priorities.get(
1141 extension_priority = self.default_priorities.get(
1142 extension, self.FALLBACK_PRIORITY)
1142 extension, self.FALLBACK_PRIORITY)
1143 return (renderer_priority, extension_priority)
1143 return (renderer_priority, extension_priority)
1144
1144
1145 def _sort_according_to_priority(self, matches):
1145 def _sort_according_to_priority(self, matches):
1146
1146
1147 def priority_and_path(match):
1147 def priority_and_path(match):
1148 return (match.priority, match.path)
1148 return (match.priority, match.path)
1149
1149
1150 return sorted(matches, key=priority_and_path)
1150 return sorted(matches, key=priority_and_path)
1151
1151
1152 def _sort_paths_according_to_priority(self, paths):
1152 def _sort_paths_according_to_priority(self, paths):
1153
1153
1154 def priority_and_path(path):
1154 def priority_and_path(path):
1155 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1155 return (self.path_priority.get(path, self.FALLBACK_PRIORITY), path)
1156
1156
1157 return sorted(paths, key=priority_and_path)
1157 return sorted(paths, key=priority_and_path)
1158
1158
1159
1159
1160 class ReadmeMatch:
1160 class ReadmeMatch:
1161
1161
1162 def __init__(self, node, match, priority):
1162 def __init__(self, node, match, priority):
1163 self.node = node
1163 self.node = node
1164 self._match = match
1164 self._match = match
1165 self.priority = priority
1165 self.priority = priority
1166
1166
1167 @property
1167 @property
1168 def path(self):
1168 def path(self):
1169 return self.node.path
1169 return self.node.path
1170
1170
1171 def __repr__(self):
1171 def __repr__(self):
1172 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
1172 return '<ReadmeMatch {} priority={}'.format(self.path, self.priority)
General Comments 0
You need to be logged in to leave comments. Login now