##// END OF EJS Templates
file-nodes: added streaming remote attributes for vcsserver....
dan -
r3895:2b1d7e0d default
parent child Browse files
Show More
@@ -1,1547 +1,1545 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2011-2019 RhodeCode GmbH
3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import itertools
21 import itertools
22 import logging
22 import logging
23 import os
23 import os
24 import shutil
24 import shutil
25 import tempfile
25 import tempfile
26 import collections
26 import collections
27 import urllib
27 import urllib
28 import pathlib2
28 import pathlib2
29
29
30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
31 from pyramid.view import view_config
31 from pyramid.view import view_config
32 from pyramid.renderers import render
32 from pyramid.renderers import render
33 from pyramid.response import Response
33 from pyramid.response import Response
34
34
35 import rhodecode
35 import rhodecode
36 from rhodecode.apps._base import RepoAppView
36 from rhodecode.apps._base import RepoAppView
37
37
38
38
39 from rhodecode.lib import diffs, helpers as h, rc_cache
39 from rhodecode.lib import diffs, helpers as h, rc_cache
40 from rhodecode.lib import audit_logger
40 from rhodecode.lib import audit_logger
41 from rhodecode.lib.view_utils import parse_path_ref
41 from rhodecode.lib.view_utils import parse_path_ref
42 from rhodecode.lib.exceptions import NonRelativePathError
42 from rhodecode.lib.exceptions import NonRelativePathError
43 from rhodecode.lib.codeblocks import (
43 from rhodecode.lib.codeblocks import (
44 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
44 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
45 from rhodecode.lib.utils2 import (
45 from rhodecode.lib.utils2 import (
46 convert_line_endings, detect_mode, safe_str, str2bool, safe_int, sha1, safe_unicode)
46 convert_line_endings, detect_mode, safe_str, str2bool, safe_int, sha1, safe_unicode)
47 from rhodecode.lib.auth import (
47 from rhodecode.lib.auth import (
48 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
48 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
49 from rhodecode.lib.vcs import path as vcspath
49 from rhodecode.lib.vcs import path as vcspath
50 from rhodecode.lib.vcs.backends.base import EmptyCommit
50 from rhodecode.lib.vcs.backends.base import EmptyCommit
51 from rhodecode.lib.vcs.conf import settings
51 from rhodecode.lib.vcs.conf import settings
52 from rhodecode.lib.vcs.nodes import FileNode
52 from rhodecode.lib.vcs.nodes import FileNode
53 from rhodecode.lib.vcs.exceptions import (
53 from rhodecode.lib.vcs.exceptions import (
54 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
54 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
55 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
55 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
56 NodeDoesNotExistError, CommitError, NodeError)
56 NodeDoesNotExistError, CommitError, NodeError)
57
57
58 from rhodecode.model.scm import ScmModel
58 from rhodecode.model.scm import ScmModel
59 from rhodecode.model.db import Repository
59 from rhodecode.model.db import Repository
60
60
61 log = logging.getLogger(__name__)
61 log = logging.getLogger(__name__)
62
62
63
63
64 class RepoFilesView(RepoAppView):
64 class RepoFilesView(RepoAppView):
65
65
66 @staticmethod
66 @staticmethod
67 def adjust_file_path_for_svn(f_path, repo):
67 def adjust_file_path_for_svn(f_path, repo):
68 """
68 """
69 Computes the relative path of `f_path`.
69 Computes the relative path of `f_path`.
70
70
71 This is mainly based on prefix matching of the recognized tags and
71 This is mainly based on prefix matching of the recognized tags and
72 branches in the underlying repository.
72 branches in the underlying repository.
73 """
73 """
74 tags_and_branches = itertools.chain(
74 tags_and_branches = itertools.chain(
75 repo.branches.iterkeys(),
75 repo.branches.iterkeys(),
76 repo.tags.iterkeys())
76 repo.tags.iterkeys())
77 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
77 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
78
78
79 for name in tags_and_branches:
79 for name in tags_and_branches:
80 if f_path.startswith('{}/'.format(name)):
80 if f_path.startswith('{}/'.format(name)):
81 f_path = vcspath.relpath(f_path, name)
81 f_path = vcspath.relpath(f_path, name)
82 break
82 break
83 return f_path
83 return f_path
84
84
85 def load_default_context(self):
85 def load_default_context(self):
86 c = self._get_local_tmpl_context(include_app_defaults=True)
86 c = self._get_local_tmpl_context(include_app_defaults=True)
87 c.rhodecode_repo = self.rhodecode_vcs_repo
87 c.rhodecode_repo = self.rhodecode_vcs_repo
88 c.enable_downloads = self.db_repo.enable_downloads
88 c.enable_downloads = self.db_repo.enable_downloads
89 return c
89 return c
90
90
91 def _ensure_not_locked(self, commit_id='tip'):
91 def _ensure_not_locked(self, commit_id='tip'):
92 _ = self.request.translate
92 _ = self.request.translate
93
93
94 repo = self.db_repo
94 repo = self.db_repo
95 if repo.enable_locking and repo.locked[0]:
95 if repo.enable_locking and repo.locked[0]:
96 h.flash(_('This repository has been locked by %s on %s')
96 h.flash(_('This repository has been locked by %s on %s')
97 % (h.person_by_id(repo.locked[0]),
97 % (h.person_by_id(repo.locked[0]),
98 h.format_date(h.time_to_datetime(repo.locked[1]))),
98 h.format_date(h.time_to_datetime(repo.locked[1]))),
99 'warning')
99 'warning')
100 files_url = h.route_path(
100 files_url = h.route_path(
101 'repo_files:default_path',
101 'repo_files:default_path',
102 repo_name=self.db_repo_name, commit_id=commit_id)
102 repo_name=self.db_repo_name, commit_id=commit_id)
103 raise HTTPFound(files_url)
103 raise HTTPFound(files_url)
104
104
105 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
105 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
106 _ = self.request.translate
106 _ = self.request.translate
107
107
108 if not is_head:
108 if not is_head:
109 message = _('Cannot modify file. '
109 message = _('Cannot modify file. '
110 'Given commit `{}` is not head of a branch.').format(commit_id)
110 'Given commit `{}` is not head of a branch.').format(commit_id)
111 h.flash(message, category='warning')
111 h.flash(message, category='warning')
112
112
113 if json_mode:
113 if json_mode:
114 return message
114 return message
115
115
116 files_url = h.route_path(
116 files_url = h.route_path(
117 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
117 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
118 f_path=f_path)
118 f_path=f_path)
119 raise HTTPFound(files_url)
119 raise HTTPFound(files_url)
120
120
121 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
121 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
122 _ = self.request.translate
122 _ = self.request.translate
123
123
124 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
124 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
125 self.db_repo_name, branch_name)
125 self.db_repo_name, branch_name)
126 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
126 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
127 message = _('Branch `{}` changes forbidden by rule {}.').format(
127 message = _('Branch `{}` changes forbidden by rule {}.').format(
128 branch_name, rule)
128 branch_name, rule)
129 h.flash(message, 'warning')
129 h.flash(message, 'warning')
130
130
131 if json_mode:
131 if json_mode:
132 return message
132 return message
133
133
134 files_url = h.route_path(
134 files_url = h.route_path(
135 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
135 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
136
136
137 raise HTTPFound(files_url)
137 raise HTTPFound(files_url)
138
138
139 def _get_commit_and_path(self):
139 def _get_commit_and_path(self):
140 default_commit_id = self.db_repo.landing_rev[1]
140 default_commit_id = self.db_repo.landing_rev[1]
141 default_f_path = '/'
141 default_f_path = '/'
142
142
143 commit_id = self.request.matchdict.get(
143 commit_id = self.request.matchdict.get(
144 'commit_id', default_commit_id)
144 'commit_id', default_commit_id)
145 f_path = self._get_f_path(self.request.matchdict, default_f_path)
145 f_path = self._get_f_path(self.request.matchdict, default_f_path)
146 return commit_id, f_path
146 return commit_id, f_path
147
147
148 def _get_default_encoding(self, c):
148 def _get_default_encoding(self, c):
149 enc_list = getattr(c, 'default_encodings', [])
149 enc_list = getattr(c, 'default_encodings', [])
150 return enc_list[0] if enc_list else 'UTF-8'
150 return enc_list[0] if enc_list else 'UTF-8'
151
151
152 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
152 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
153 """
153 """
154 This is a safe way to get commit. If an error occurs it redirects to
154 This is a safe way to get commit. If an error occurs it redirects to
155 tip with proper message
155 tip with proper message
156
156
157 :param commit_id: id of commit to fetch
157 :param commit_id: id of commit to fetch
158 :param redirect_after: toggle redirection
158 :param redirect_after: toggle redirection
159 """
159 """
160 _ = self.request.translate
160 _ = self.request.translate
161
161
162 try:
162 try:
163 return self.rhodecode_vcs_repo.get_commit(commit_id)
163 return self.rhodecode_vcs_repo.get_commit(commit_id)
164 except EmptyRepositoryError:
164 except EmptyRepositoryError:
165 if not redirect_after:
165 if not redirect_after:
166 return None
166 return None
167
167
168 _url = h.route_path(
168 _url = h.route_path(
169 'repo_files_add_file',
169 'repo_files_add_file',
170 repo_name=self.db_repo_name, commit_id=0, f_path='')
170 repo_name=self.db_repo_name, commit_id=0, f_path='')
171
171
172 if h.HasRepoPermissionAny(
172 if h.HasRepoPermissionAny(
173 'repository.write', 'repository.admin')(self.db_repo_name):
173 'repository.write', 'repository.admin')(self.db_repo_name):
174 add_new = h.link_to(
174 add_new = h.link_to(
175 _('Click here to add a new file.'), _url, class_="alert-link")
175 _('Click here to add a new file.'), _url, class_="alert-link")
176 else:
176 else:
177 add_new = ""
177 add_new = ""
178
178
179 h.flash(h.literal(
179 h.flash(h.literal(
180 _('There are no files yet. %s') % add_new), category='warning')
180 _('There are no files yet. %s') % add_new), category='warning')
181 raise HTTPFound(
181 raise HTTPFound(
182 h.route_path('repo_summary', repo_name=self.db_repo_name))
182 h.route_path('repo_summary', repo_name=self.db_repo_name))
183
183
184 except (CommitDoesNotExistError, LookupError):
184 except (CommitDoesNotExistError, LookupError):
185 msg = _('No such commit exists for this repository')
185 msg = _('No such commit exists for this repository')
186 h.flash(msg, category='error')
186 h.flash(msg, category='error')
187 raise HTTPNotFound()
187 raise HTTPNotFound()
188 except RepositoryError as e:
188 except RepositoryError as e:
189 h.flash(safe_str(h.escape(e)), category='error')
189 h.flash(safe_str(h.escape(e)), category='error')
190 raise HTTPNotFound()
190 raise HTTPNotFound()
191
191
192 def _get_filenode_or_redirect(self, commit_obj, path):
192 def _get_filenode_or_redirect(self, commit_obj, path):
193 """
193 """
194 Returns file_node, if error occurs or given path is directory,
194 Returns file_node, if error occurs or given path is directory,
195 it'll redirect to top level path
195 it'll redirect to top level path
196 """
196 """
197 _ = self.request.translate
197 _ = self.request.translate
198
198
199 try:
199 try:
200 file_node = commit_obj.get_node(path)
200 file_node = commit_obj.get_node(path)
201 if file_node.is_dir():
201 if file_node.is_dir():
202 raise RepositoryError('The given path is a directory')
202 raise RepositoryError('The given path is a directory')
203 except CommitDoesNotExistError:
203 except CommitDoesNotExistError:
204 log.exception('No such commit exists for this repository')
204 log.exception('No such commit exists for this repository')
205 h.flash(_('No such commit exists for this repository'), category='error')
205 h.flash(_('No such commit exists for this repository'), category='error')
206 raise HTTPNotFound()
206 raise HTTPNotFound()
207 except RepositoryError as e:
207 except RepositoryError as e:
208 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
208 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
209 h.flash(safe_str(h.escape(e)), category='error')
209 h.flash(safe_str(h.escape(e)), category='error')
210 raise HTTPNotFound()
210 raise HTTPNotFound()
211
211
212 return file_node
212 return file_node
213
213
214 def _is_valid_head(self, commit_id, repo):
214 def _is_valid_head(self, commit_id, repo):
215 branch_name = sha_commit_id = ''
215 branch_name = sha_commit_id = ''
216 is_head = False
216 is_head = False
217 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
217 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
218
218
219 for _branch_name, branch_commit_id in repo.branches.items():
219 for _branch_name, branch_commit_id in repo.branches.items():
220 # simple case we pass in branch name, it's a HEAD
220 # simple case we pass in branch name, it's a HEAD
221 if commit_id == _branch_name:
221 if commit_id == _branch_name:
222 is_head = True
222 is_head = True
223 branch_name = _branch_name
223 branch_name = _branch_name
224 sha_commit_id = branch_commit_id
224 sha_commit_id = branch_commit_id
225 break
225 break
226 # case when we pass in full sha commit_id, which is a head
226 # case when we pass in full sha commit_id, which is a head
227 elif commit_id == branch_commit_id:
227 elif commit_id == branch_commit_id:
228 is_head = True
228 is_head = True
229 branch_name = _branch_name
229 branch_name = _branch_name
230 sha_commit_id = branch_commit_id
230 sha_commit_id = branch_commit_id
231 break
231 break
232
232
233 if h.is_svn(repo) and not repo.is_empty():
233 if h.is_svn(repo) and not repo.is_empty():
234 # Note: Subversion only has one head.
234 # Note: Subversion only has one head.
235 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
235 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
236 is_head = True
236 is_head = True
237 return branch_name, sha_commit_id, is_head
237 return branch_name, sha_commit_id, is_head
238
238
239 # checked branches, means we only need to try to get the branch/commit_sha
239 # checked branches, means we only need to try to get the branch/commit_sha
240 if not repo.is_empty():
240 if not repo.is_empty():
241 commit = repo.get_commit(commit_id=commit_id)
241 commit = repo.get_commit(commit_id=commit_id)
242 if commit:
242 if commit:
243 branch_name = commit.branch
243 branch_name = commit.branch
244 sha_commit_id = commit.raw_id
244 sha_commit_id = commit.raw_id
245
245
246 return branch_name, sha_commit_id, is_head
246 return branch_name, sha_commit_id, is_head
247
247
248 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False):
248 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False):
249
249
250 repo_id = self.db_repo.repo_id
250 repo_id = self.db_repo.repo_id
251 force_recache = self.get_recache_flag()
251 force_recache = self.get_recache_flag()
252
252
253 cache_seconds = safe_int(
253 cache_seconds = safe_int(
254 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
254 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
255 cache_on = not force_recache and cache_seconds > 0
255 cache_on = not force_recache and cache_seconds > 0
256 log.debug(
256 log.debug(
257 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
257 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
258 'with caching: %s[TTL: %ss]' % (
258 'with caching: %s[TTL: %ss]' % (
259 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
259 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
260
260
261 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
261 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
262 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
262 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
263
263
264 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
264 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
265 condition=cache_on)
265 condition=cache_on)
266 def compute_file_tree(ver, repo_id, commit_id, f_path, full_load):
266 def compute_file_tree(ver, repo_id, commit_id, f_path, full_load):
267 log.debug('Generating cached file tree at ver:%s for repo_id: %s, %s, %s',
267 log.debug('Generating cached file tree at ver:%s for repo_id: %s, %s, %s',
268 ver, repo_id, commit_id, f_path)
268 ver, repo_id, commit_id, f_path)
269
269
270 c.full_load = full_load
270 c.full_load = full_load
271 return render(
271 return render(
272 'rhodecode:templates/files/files_browser_tree.mako',
272 'rhodecode:templates/files/files_browser_tree.mako',
273 self._get_template_context(c), self.request)
273 self._get_template_context(c), self.request)
274
274
275 return compute_file_tree('v1', self.db_repo.repo_id, commit_id, f_path, full_load)
275 return compute_file_tree('v1', self.db_repo.repo_id, commit_id, f_path, full_load)
276
276
277 def _get_archive_spec(self, fname):
277 def _get_archive_spec(self, fname):
278 log.debug('Detecting archive spec for: `%s`', fname)
278 log.debug('Detecting archive spec for: `%s`', fname)
279
279
280 fileformat = None
280 fileformat = None
281 ext = None
281 ext = None
282 content_type = None
282 content_type = None
283 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
283 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
284
284
285 if fname.endswith(extension):
285 if fname.endswith(extension):
286 fileformat = a_type
286 fileformat = a_type
287 log.debug('archive is of type: %s', fileformat)
287 log.debug('archive is of type: %s', fileformat)
288 ext = extension
288 ext = extension
289 break
289 break
290
290
291 if not fileformat:
291 if not fileformat:
292 raise ValueError()
292 raise ValueError()
293
293
294 # left over part of whole fname is the commit
294 # left over part of whole fname is the commit
295 commit_id = fname[:-len(ext)]
295 commit_id = fname[:-len(ext)]
296
296
297 return commit_id, ext, fileformat, content_type
297 return commit_id, ext, fileformat, content_type
298
298
299 def create_pure_path(self, *parts):
299 def create_pure_path(self, *parts):
300 # Split paths and sanitize them, removing any ../ etc
300 # Split paths and sanitize them, removing any ../ etc
301 sanitized_path = [
301 sanitized_path = [
302 x for x in pathlib2.PurePath(*parts).parts
302 x for x in pathlib2.PurePath(*parts).parts
303 if x not in ['.', '..']]
303 if x not in ['.', '..']]
304
304
305 pure_path = pathlib2.PurePath(*sanitized_path)
305 pure_path = pathlib2.PurePath(*sanitized_path)
306 return pure_path
306 return pure_path
307
307
308 def _is_lf_enabled(self, target_repo):
308 def _is_lf_enabled(self, target_repo):
309 lf_enabled = False
309 lf_enabled = False
310
310
311 lf_key_for_vcs_map = {
311 lf_key_for_vcs_map = {
312 'hg': 'extensions_largefiles',
312 'hg': 'extensions_largefiles',
313 'git': 'vcs_git_lfs_enabled'
313 'git': 'vcs_git_lfs_enabled'
314 }
314 }
315
315
316 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
316 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
317
317
318 if lf_key_for_vcs:
318 if lf_key_for_vcs:
319 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
319 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
320
320
321 return lf_enabled
321 return lf_enabled
322
322
323 @LoginRequired()
323 @LoginRequired()
324 @HasRepoPermissionAnyDecorator(
324 @HasRepoPermissionAnyDecorator(
325 'repository.read', 'repository.write', 'repository.admin')
325 'repository.read', 'repository.write', 'repository.admin')
326 @view_config(
326 @view_config(
327 route_name='repo_archivefile', request_method='GET',
327 route_name='repo_archivefile', request_method='GET',
328 renderer=None)
328 renderer=None)
329 def repo_archivefile(self):
329 def repo_archivefile(self):
330 # archive cache config
330 # archive cache config
331 from rhodecode import CONFIG
331 from rhodecode import CONFIG
332 _ = self.request.translate
332 _ = self.request.translate
333 self.load_default_context()
333 self.load_default_context()
334 default_at_path = '/'
334 default_at_path = '/'
335 fname = self.request.matchdict['fname']
335 fname = self.request.matchdict['fname']
336 subrepos = self.request.GET.get('subrepos') == 'true'
336 subrepos = self.request.GET.get('subrepos') == 'true'
337 at_path = self.request.GET.get('at_path') or default_at_path
337 at_path = self.request.GET.get('at_path') or default_at_path
338
338
339 if not self.db_repo.enable_downloads:
339 if not self.db_repo.enable_downloads:
340 return Response(_('Downloads disabled'))
340 return Response(_('Downloads disabled'))
341
341
342 try:
342 try:
343 commit_id, ext, fileformat, content_type = \
343 commit_id, ext, fileformat, content_type = \
344 self._get_archive_spec(fname)
344 self._get_archive_spec(fname)
345 except ValueError:
345 except ValueError:
346 return Response(_('Unknown archive type for: `{}`').format(
346 return Response(_('Unknown archive type for: `{}`').format(
347 h.escape(fname)))
347 h.escape(fname)))
348
348
349 try:
349 try:
350 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
350 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
351 except CommitDoesNotExistError:
351 except CommitDoesNotExistError:
352 return Response(_('Unknown commit_id {}').format(
352 return Response(_('Unknown commit_id {}').format(
353 h.escape(commit_id)))
353 h.escape(commit_id)))
354 except EmptyRepositoryError:
354 except EmptyRepositoryError:
355 return Response(_('Empty repository'))
355 return Response(_('Empty repository'))
356
356
357 try:
357 try:
358 at_path = commit.get_node(at_path).path or default_at_path
358 at_path = commit.get_node(at_path).path or default_at_path
359 except Exception:
359 except Exception:
360 return Response(_('No node at path {} for this repository').format(at_path))
360 return Response(_('No node at path {} for this repository').format(at_path))
361
361
362 path_sha = sha1(at_path)[:8]
362 path_sha = sha1(at_path)[:8]
363
363
364 # original backward compat name of archive
364 # original backward compat name of archive
365 clean_name = safe_str(self.db_repo_name.replace('/', '_'))
365 clean_name = safe_str(self.db_repo_name.replace('/', '_'))
366 short_sha = safe_str(commit.short_id)
366 short_sha = safe_str(commit.short_id)
367
367
368 if at_path == default_at_path:
368 if at_path == default_at_path:
369 archive_name = '{}-{}{}{}'.format(
369 archive_name = '{}-{}{}{}'.format(
370 clean_name,
370 clean_name,
371 '-sub' if subrepos else '',
371 '-sub' if subrepos else '',
372 short_sha,
372 short_sha,
373 ext)
373 ext)
374 # custom path and new name
374 # custom path and new name
375 else:
375 else:
376 archive_name = '{}-{}{}-{}{}'.format(
376 archive_name = '{}-{}{}-{}{}'.format(
377 clean_name,
377 clean_name,
378 '-sub' if subrepos else '',
378 '-sub' if subrepos else '',
379 short_sha,
379 short_sha,
380 path_sha,
380 path_sha,
381 ext)
381 ext)
382
382
383 use_cached_archive = False
383 use_cached_archive = False
384 archive_cache_enabled = CONFIG.get(
384 archive_cache_enabled = CONFIG.get(
385 'archive_cache_dir') and not self.request.GET.get('no_cache')
385 'archive_cache_dir') and not self.request.GET.get('no_cache')
386 cached_archive_path = None
386 cached_archive_path = None
387
387
388 if archive_cache_enabled:
388 if archive_cache_enabled:
389 # check if we it's ok to write
389 # check if we it's ok to write
390 if not os.path.isdir(CONFIG['archive_cache_dir']):
390 if not os.path.isdir(CONFIG['archive_cache_dir']):
391 os.makedirs(CONFIG['archive_cache_dir'])
391 os.makedirs(CONFIG['archive_cache_dir'])
392 cached_archive_path = os.path.join(
392 cached_archive_path = os.path.join(
393 CONFIG['archive_cache_dir'], archive_name)
393 CONFIG['archive_cache_dir'], archive_name)
394 if os.path.isfile(cached_archive_path):
394 if os.path.isfile(cached_archive_path):
395 log.debug('Found cached archive in %s', cached_archive_path)
395 log.debug('Found cached archive in %s', cached_archive_path)
396 fd, archive = None, cached_archive_path
396 fd, archive = None, cached_archive_path
397 use_cached_archive = True
397 use_cached_archive = True
398 else:
398 else:
399 log.debug('Archive %s is not yet cached', archive_name)
399 log.debug('Archive %s is not yet cached', archive_name)
400
400
401 if not use_cached_archive:
401 if not use_cached_archive:
402 # generate new archive
402 # generate new archive
403 fd, archive = tempfile.mkstemp()
403 fd, archive = tempfile.mkstemp()
404 log.debug('Creating new temp archive in %s', archive)
404 log.debug('Creating new temp archive in %s', archive)
405 try:
405 try:
406 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos,
406 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos,
407 archive_at_path=at_path)
407 archive_at_path=at_path)
408 except ImproperArchiveTypeError:
408 except ImproperArchiveTypeError:
409 return _('Unknown archive type')
409 return _('Unknown archive type')
410 if archive_cache_enabled:
410 if archive_cache_enabled:
411 # if we generated the archive and we have cache enabled
411 # if we generated the archive and we have cache enabled
412 # let's use this for future
412 # let's use this for future
413 log.debug('Storing new archive in %s', cached_archive_path)
413 log.debug('Storing new archive in %s', cached_archive_path)
414 shutil.move(archive, cached_archive_path)
414 shutil.move(archive, cached_archive_path)
415 archive = cached_archive_path
415 archive = cached_archive_path
416
416
417 # store download action
417 # store download action
418 audit_logger.store_web(
418 audit_logger.store_web(
419 'repo.archive.download', action_data={
419 'repo.archive.download', action_data={
420 'user_agent': self.request.user_agent,
420 'user_agent': self.request.user_agent,
421 'archive_name': archive_name,
421 'archive_name': archive_name,
422 'archive_spec': fname,
422 'archive_spec': fname,
423 'archive_cached': use_cached_archive},
423 'archive_cached': use_cached_archive},
424 user=self._rhodecode_user,
424 user=self._rhodecode_user,
425 repo=self.db_repo,
425 repo=self.db_repo,
426 commit=True
426 commit=True
427 )
427 )
428
428
429 def get_chunked_archive(archive_path):
429 def get_chunked_archive(archive_path):
430 with open(archive_path, 'rb') as stream:
430 with open(archive_path, 'rb') as stream:
431 while True:
431 while True:
432 data = stream.read(16 * 1024)
432 data = stream.read(16 * 1024)
433 if not data:
433 if not data:
434 if fd: # fd means we used temporary file
434 if fd: # fd means we used temporary file
435 os.close(fd)
435 os.close(fd)
436 if not archive_cache_enabled:
436 if not archive_cache_enabled:
437 log.debug('Destroying temp archive %s', archive_path)
437 log.debug('Destroying temp archive %s', archive_path)
438 os.remove(archive_path)
438 os.remove(archive_path)
439 break
439 break
440 yield data
440 yield data
441
441
442 response = Response(app_iter=get_chunked_archive(archive))
442 response = Response(app_iter=get_chunked_archive(archive))
443 response.content_disposition = str(
443 response.content_disposition = str(
444 'attachment; filename=%s' % archive_name)
444 'attachment; filename=%s' % archive_name)
445 response.content_type = str(content_type)
445 response.content_type = str(content_type)
446
446
447 return response
447 return response
448
448
449 def _get_file_node(self, commit_id, f_path):
449 def _get_file_node(self, commit_id, f_path):
450 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
450 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
451 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
451 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
452 try:
452 try:
453 node = commit.get_node(f_path)
453 node = commit.get_node(f_path)
454 if node.is_dir():
454 if node.is_dir():
455 raise NodeError('%s path is a %s not a file'
455 raise NodeError('%s path is a %s not a file'
456 % (node, type(node)))
456 % (node, type(node)))
457 except NodeDoesNotExistError:
457 except NodeDoesNotExistError:
458 commit = EmptyCommit(
458 commit = EmptyCommit(
459 commit_id=commit_id,
459 commit_id=commit_id,
460 idx=commit.idx,
460 idx=commit.idx,
461 repo=commit.repository,
461 repo=commit.repository,
462 alias=commit.repository.alias,
462 alias=commit.repository.alias,
463 message=commit.message,
463 message=commit.message,
464 author=commit.author,
464 author=commit.author,
465 date=commit.date)
465 date=commit.date)
466 node = FileNode(f_path, '', commit=commit)
466 node = FileNode(f_path, '', commit=commit)
467 else:
467 else:
468 commit = EmptyCommit(
468 commit = EmptyCommit(
469 repo=self.rhodecode_vcs_repo,
469 repo=self.rhodecode_vcs_repo,
470 alias=self.rhodecode_vcs_repo.alias)
470 alias=self.rhodecode_vcs_repo.alias)
471 node = FileNode(f_path, '', commit=commit)
471 node = FileNode(f_path, '', commit=commit)
472 return node
472 return node
473
473
474 @LoginRequired()
474 @LoginRequired()
475 @HasRepoPermissionAnyDecorator(
475 @HasRepoPermissionAnyDecorator(
476 'repository.read', 'repository.write', 'repository.admin')
476 'repository.read', 'repository.write', 'repository.admin')
477 @view_config(
477 @view_config(
478 route_name='repo_files_diff', request_method='GET',
478 route_name='repo_files_diff', request_method='GET',
479 renderer=None)
479 renderer=None)
480 def repo_files_diff(self):
480 def repo_files_diff(self):
481 c = self.load_default_context()
481 c = self.load_default_context()
482 f_path = self._get_f_path(self.request.matchdict)
482 f_path = self._get_f_path(self.request.matchdict)
483 diff1 = self.request.GET.get('diff1', '')
483 diff1 = self.request.GET.get('diff1', '')
484 diff2 = self.request.GET.get('diff2', '')
484 diff2 = self.request.GET.get('diff2', '')
485
485
486 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
486 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
487
487
488 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
488 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
489 line_context = self.request.GET.get('context', 3)
489 line_context = self.request.GET.get('context', 3)
490
490
491 if not any((diff1, diff2)):
491 if not any((diff1, diff2)):
492 h.flash(
492 h.flash(
493 'Need query parameter "diff1" or "diff2" to generate a diff.',
493 'Need query parameter "diff1" or "diff2" to generate a diff.',
494 category='error')
494 category='error')
495 raise HTTPBadRequest()
495 raise HTTPBadRequest()
496
496
497 c.action = self.request.GET.get('diff')
497 c.action = self.request.GET.get('diff')
498 if c.action not in ['download', 'raw']:
498 if c.action not in ['download', 'raw']:
499 compare_url = h.route_path(
499 compare_url = h.route_path(
500 'repo_compare',
500 'repo_compare',
501 repo_name=self.db_repo_name,
501 repo_name=self.db_repo_name,
502 source_ref_type='rev',
502 source_ref_type='rev',
503 source_ref=diff1,
503 source_ref=diff1,
504 target_repo=self.db_repo_name,
504 target_repo=self.db_repo_name,
505 target_ref_type='rev',
505 target_ref_type='rev',
506 target_ref=diff2,
506 target_ref=diff2,
507 _query=dict(f_path=f_path))
507 _query=dict(f_path=f_path))
508 # redirect to new view if we render diff
508 # redirect to new view if we render diff
509 raise HTTPFound(compare_url)
509 raise HTTPFound(compare_url)
510
510
511 try:
511 try:
512 node1 = self._get_file_node(diff1, path1)
512 node1 = self._get_file_node(diff1, path1)
513 node2 = self._get_file_node(diff2, f_path)
513 node2 = self._get_file_node(diff2, f_path)
514 except (RepositoryError, NodeError):
514 except (RepositoryError, NodeError):
515 log.exception("Exception while trying to get node from repository")
515 log.exception("Exception while trying to get node from repository")
516 raise HTTPFound(
516 raise HTTPFound(
517 h.route_path('repo_files', repo_name=self.db_repo_name,
517 h.route_path('repo_files', repo_name=self.db_repo_name,
518 commit_id='tip', f_path=f_path))
518 commit_id='tip', f_path=f_path))
519
519
520 if all(isinstance(node.commit, EmptyCommit)
520 if all(isinstance(node.commit, EmptyCommit)
521 for node in (node1, node2)):
521 for node in (node1, node2)):
522 raise HTTPNotFound()
522 raise HTTPNotFound()
523
523
524 c.commit_1 = node1.commit
524 c.commit_1 = node1.commit
525 c.commit_2 = node2.commit
525 c.commit_2 = node2.commit
526
526
527 if c.action == 'download':
527 if c.action == 'download':
528 _diff = diffs.get_gitdiff(node1, node2,
528 _diff = diffs.get_gitdiff(node1, node2,
529 ignore_whitespace=ignore_whitespace,
529 ignore_whitespace=ignore_whitespace,
530 context=line_context)
530 context=line_context)
531 diff = diffs.DiffProcessor(_diff, format='gitdiff')
531 diff = diffs.DiffProcessor(_diff, format='gitdiff')
532
532
533 response = Response(self.path_filter.get_raw_patch(diff))
533 response = Response(self.path_filter.get_raw_patch(diff))
534 response.content_type = 'text/plain'
534 response.content_type = 'text/plain'
535 response.content_disposition = (
535 response.content_disposition = (
536 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
536 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
537 )
537 )
538 charset = self._get_default_encoding(c)
538 charset = self._get_default_encoding(c)
539 if charset:
539 if charset:
540 response.charset = charset
540 response.charset = charset
541 return response
541 return response
542
542
543 elif c.action == 'raw':
543 elif c.action == 'raw':
544 _diff = diffs.get_gitdiff(node1, node2,
544 _diff = diffs.get_gitdiff(node1, node2,
545 ignore_whitespace=ignore_whitespace,
545 ignore_whitespace=ignore_whitespace,
546 context=line_context)
546 context=line_context)
547 diff = diffs.DiffProcessor(_diff, format='gitdiff')
547 diff = diffs.DiffProcessor(_diff, format='gitdiff')
548
548
549 response = Response(self.path_filter.get_raw_patch(diff))
549 response = Response(self.path_filter.get_raw_patch(diff))
550 response.content_type = 'text/plain'
550 response.content_type = 'text/plain'
551 charset = self._get_default_encoding(c)
551 charset = self._get_default_encoding(c)
552 if charset:
552 if charset:
553 response.charset = charset
553 response.charset = charset
554 return response
554 return response
555
555
556 # in case we ever end up here
556 # in case we ever end up here
557 raise HTTPNotFound()
557 raise HTTPNotFound()
558
558
559 @LoginRequired()
559 @LoginRequired()
560 @HasRepoPermissionAnyDecorator(
560 @HasRepoPermissionAnyDecorator(
561 'repository.read', 'repository.write', 'repository.admin')
561 'repository.read', 'repository.write', 'repository.admin')
562 @view_config(
562 @view_config(
563 route_name='repo_files_diff_2way_redirect', request_method='GET',
563 route_name='repo_files_diff_2way_redirect', request_method='GET',
564 renderer=None)
564 renderer=None)
565 def repo_files_diff_2way_redirect(self):
565 def repo_files_diff_2way_redirect(self):
566 """
566 """
567 Kept only to make OLD links work
567 Kept only to make OLD links work
568 """
568 """
569 f_path = self._get_f_path_unchecked(self.request.matchdict)
569 f_path = self._get_f_path_unchecked(self.request.matchdict)
570 diff1 = self.request.GET.get('diff1', '')
570 diff1 = self.request.GET.get('diff1', '')
571 diff2 = self.request.GET.get('diff2', '')
571 diff2 = self.request.GET.get('diff2', '')
572
572
573 if not any((diff1, diff2)):
573 if not any((diff1, diff2)):
574 h.flash(
574 h.flash(
575 'Need query parameter "diff1" or "diff2" to generate a diff.',
575 'Need query parameter "diff1" or "diff2" to generate a diff.',
576 category='error')
576 category='error')
577 raise HTTPBadRequest()
577 raise HTTPBadRequest()
578
578
579 compare_url = h.route_path(
579 compare_url = h.route_path(
580 'repo_compare',
580 'repo_compare',
581 repo_name=self.db_repo_name,
581 repo_name=self.db_repo_name,
582 source_ref_type='rev',
582 source_ref_type='rev',
583 source_ref=diff1,
583 source_ref=diff1,
584 target_ref_type='rev',
584 target_ref_type='rev',
585 target_ref=diff2,
585 target_ref=diff2,
586 _query=dict(f_path=f_path, diffmode='sideside',
586 _query=dict(f_path=f_path, diffmode='sideside',
587 target_repo=self.db_repo_name,))
587 target_repo=self.db_repo_name,))
588 raise HTTPFound(compare_url)
588 raise HTTPFound(compare_url)
589
589
590 @LoginRequired()
590 @LoginRequired()
591 @HasRepoPermissionAnyDecorator(
591 @HasRepoPermissionAnyDecorator(
592 'repository.read', 'repository.write', 'repository.admin')
592 'repository.read', 'repository.write', 'repository.admin')
593 @view_config(
593 @view_config(
594 route_name='repo_files', request_method='GET',
594 route_name='repo_files', request_method='GET',
595 renderer=None)
595 renderer=None)
596 @view_config(
596 @view_config(
597 route_name='repo_files:default_path', request_method='GET',
597 route_name='repo_files:default_path', request_method='GET',
598 renderer=None)
598 renderer=None)
599 @view_config(
599 @view_config(
600 route_name='repo_files:default_commit', request_method='GET',
600 route_name='repo_files:default_commit', request_method='GET',
601 renderer=None)
601 renderer=None)
602 @view_config(
602 @view_config(
603 route_name='repo_files:rendered', request_method='GET',
603 route_name='repo_files:rendered', request_method='GET',
604 renderer=None)
604 renderer=None)
605 @view_config(
605 @view_config(
606 route_name='repo_files:annotated', request_method='GET',
606 route_name='repo_files:annotated', request_method='GET',
607 renderer=None)
607 renderer=None)
608 def repo_files(self):
608 def repo_files(self):
609 c = self.load_default_context()
609 c = self.load_default_context()
610
610
611 view_name = getattr(self.request.matched_route, 'name', None)
611 view_name = getattr(self.request.matched_route, 'name', None)
612
612
613 c.annotate = view_name == 'repo_files:annotated'
613 c.annotate = view_name == 'repo_files:annotated'
614 # default is false, but .rst/.md files later are auto rendered, we can
614 # default is false, but .rst/.md files later are auto rendered, we can
615 # overwrite auto rendering by setting this GET flag
615 # overwrite auto rendering by setting this GET flag
616 c.renderer = view_name == 'repo_files:rendered' or \
616 c.renderer = view_name == 'repo_files:rendered' or \
617 not self.request.GET.get('no-render', False)
617 not self.request.GET.get('no-render', False)
618
618
619 # redirect to given commit_id from form if given
619 # redirect to given commit_id from form if given
620 get_commit_id = self.request.GET.get('at_rev', None)
620 get_commit_id = self.request.GET.get('at_rev', None)
621 if get_commit_id:
621 if get_commit_id:
622 self._get_commit_or_redirect(get_commit_id)
622 self._get_commit_or_redirect(get_commit_id)
623
623
624 commit_id, f_path = self._get_commit_and_path()
624 commit_id, f_path = self._get_commit_and_path()
625 c.commit = self._get_commit_or_redirect(commit_id)
625 c.commit = self._get_commit_or_redirect(commit_id)
626 c.branch = self.request.GET.get('branch', None)
626 c.branch = self.request.GET.get('branch', None)
627 c.f_path = f_path
627 c.f_path = f_path
628
628
629 # prev link
629 # prev link
630 try:
630 try:
631 prev_commit = c.commit.prev(c.branch)
631 prev_commit = c.commit.prev(c.branch)
632 c.prev_commit = prev_commit
632 c.prev_commit = prev_commit
633 c.url_prev = h.route_path(
633 c.url_prev = h.route_path(
634 'repo_files', repo_name=self.db_repo_name,
634 'repo_files', repo_name=self.db_repo_name,
635 commit_id=prev_commit.raw_id, f_path=f_path)
635 commit_id=prev_commit.raw_id, f_path=f_path)
636 if c.branch:
636 if c.branch:
637 c.url_prev += '?branch=%s' % c.branch
637 c.url_prev += '?branch=%s' % c.branch
638 except (CommitDoesNotExistError, VCSError):
638 except (CommitDoesNotExistError, VCSError):
639 c.url_prev = '#'
639 c.url_prev = '#'
640 c.prev_commit = EmptyCommit()
640 c.prev_commit = EmptyCommit()
641
641
642 # next link
642 # next link
643 try:
643 try:
644 next_commit = c.commit.next(c.branch)
644 next_commit = c.commit.next(c.branch)
645 c.next_commit = next_commit
645 c.next_commit = next_commit
646 c.url_next = h.route_path(
646 c.url_next = h.route_path(
647 'repo_files', repo_name=self.db_repo_name,
647 'repo_files', repo_name=self.db_repo_name,
648 commit_id=next_commit.raw_id, f_path=f_path)
648 commit_id=next_commit.raw_id, f_path=f_path)
649 if c.branch:
649 if c.branch:
650 c.url_next += '?branch=%s' % c.branch
650 c.url_next += '?branch=%s' % c.branch
651 except (CommitDoesNotExistError, VCSError):
651 except (CommitDoesNotExistError, VCSError):
652 c.url_next = '#'
652 c.url_next = '#'
653 c.next_commit = EmptyCommit()
653 c.next_commit = EmptyCommit()
654
654
655 # files or dirs
655 # files or dirs
656 try:
656 try:
657 c.file = c.commit.get_node(f_path)
657 c.file = c.commit.get_node(f_path)
658 c.file_author = True
658 c.file_author = True
659 c.file_tree = ''
659 c.file_tree = ''
660
660
661 # load file content
661 # load file content
662 if c.file.is_file():
662 if c.file.is_file():
663 c.lf_node = {}
663 c.lf_node = {}
664
664
665 has_lf_enabled = self._is_lf_enabled(self.db_repo)
665 has_lf_enabled = self._is_lf_enabled(self.db_repo)
666 if has_lf_enabled:
666 if has_lf_enabled:
667 c.lf_node = c.file.get_largefile_node()
667 c.lf_node = c.file.get_largefile_node()
668
668
669 c.file_source_page = 'true'
669 c.file_source_page = 'true'
670 c.file_last_commit = c.file.last_commit
670 c.file_last_commit = c.file.last_commit
671 if c.file.size < c.visual.cut_off_limit_diff:
671 if c.file.size < c.visual.cut_off_limit_diff:
672 if c.annotate: # annotation has precedence over renderer
672 if c.annotate: # annotation has precedence over renderer
673 c.annotated_lines = filenode_as_annotated_lines_tokens(
673 c.annotated_lines = filenode_as_annotated_lines_tokens(
674 c.file
674 c.file
675 )
675 )
676 else:
676 else:
677 c.renderer = (
677 c.renderer = (
678 c.renderer and h.renderer_from_filename(c.file.path)
678 c.renderer and h.renderer_from_filename(c.file.path)
679 )
679 )
680 if not c.renderer:
680 if not c.renderer:
681 c.lines = filenode_as_lines_tokens(c.file)
681 c.lines = filenode_as_lines_tokens(c.file)
682
682
683 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
683 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
684 commit_id, self.rhodecode_vcs_repo)
684 commit_id, self.rhodecode_vcs_repo)
685 c.on_branch_head = is_head
685 c.on_branch_head = is_head
686
686
687 branch = c.commit.branch if (
687 branch = c.commit.branch if (
688 c.commit.branch and '/' not in c.commit.branch) else None
688 c.commit.branch and '/' not in c.commit.branch) else None
689 c.branch_or_raw_id = branch or c.commit.raw_id
689 c.branch_or_raw_id = branch or c.commit.raw_id
690 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
690 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
691
691
692 author = c.file_last_commit.author
692 author = c.file_last_commit.author
693 c.authors = [[
693 c.authors = [[
694 h.email(author),
694 h.email(author),
695 h.person(author, 'username_or_name_or_email'),
695 h.person(author, 'username_or_name_or_email'),
696 1
696 1
697 ]]
697 ]]
698
698
699 else: # load tree content at path
699 else: # load tree content at path
700 c.file_source_page = 'false'
700 c.file_source_page = 'false'
701 c.authors = []
701 c.authors = []
702 # this loads a simple tree without metadata to speed things up
702 # this loads a simple tree without metadata to speed things up
703 # later via ajax we call repo_nodetree_full and fetch whole
703 # later via ajax we call repo_nodetree_full and fetch whole
704 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path)
704 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path)
705
705
706 except RepositoryError as e:
706 except RepositoryError as e:
707 h.flash(safe_str(h.escape(e)), category='error')
707 h.flash(safe_str(h.escape(e)), category='error')
708 raise HTTPNotFound()
708 raise HTTPNotFound()
709
709
710 if self.request.environ.get('HTTP_X_PJAX'):
710 if self.request.environ.get('HTTP_X_PJAX'):
711 html = render('rhodecode:templates/files/files_pjax.mako',
711 html = render('rhodecode:templates/files/files_pjax.mako',
712 self._get_template_context(c), self.request)
712 self._get_template_context(c), self.request)
713 else:
713 else:
714 html = render('rhodecode:templates/files/files.mako',
714 html = render('rhodecode:templates/files/files.mako',
715 self._get_template_context(c), self.request)
715 self._get_template_context(c), self.request)
716 return Response(html)
716 return Response(html)
717
717
718 @HasRepoPermissionAnyDecorator(
718 @HasRepoPermissionAnyDecorator(
719 'repository.read', 'repository.write', 'repository.admin')
719 'repository.read', 'repository.write', 'repository.admin')
720 @view_config(
720 @view_config(
721 route_name='repo_files:annotated_previous', request_method='GET',
721 route_name='repo_files:annotated_previous', request_method='GET',
722 renderer=None)
722 renderer=None)
723 def repo_files_annotated_previous(self):
723 def repo_files_annotated_previous(self):
724 self.load_default_context()
724 self.load_default_context()
725
725
726 commit_id, f_path = self._get_commit_and_path()
726 commit_id, f_path = self._get_commit_and_path()
727 commit = self._get_commit_or_redirect(commit_id)
727 commit = self._get_commit_or_redirect(commit_id)
728 prev_commit_id = commit.raw_id
728 prev_commit_id = commit.raw_id
729 line_anchor = self.request.GET.get('line_anchor')
729 line_anchor = self.request.GET.get('line_anchor')
730 is_file = False
730 is_file = False
731 try:
731 try:
732 _file = commit.get_node(f_path)
732 _file = commit.get_node(f_path)
733 is_file = _file.is_file()
733 is_file = _file.is_file()
734 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
734 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
735 pass
735 pass
736
736
737 if is_file:
737 if is_file:
738 history = commit.get_path_history(f_path)
738 history = commit.get_path_history(f_path)
739 prev_commit_id = history[1].raw_id \
739 prev_commit_id = history[1].raw_id \
740 if len(history) > 1 else prev_commit_id
740 if len(history) > 1 else prev_commit_id
741 prev_url = h.route_path(
741 prev_url = h.route_path(
742 'repo_files:annotated', repo_name=self.db_repo_name,
742 'repo_files:annotated', repo_name=self.db_repo_name,
743 commit_id=prev_commit_id, f_path=f_path,
743 commit_id=prev_commit_id, f_path=f_path,
744 _anchor='L{}'.format(line_anchor))
744 _anchor='L{}'.format(line_anchor))
745
745
746 raise HTTPFound(prev_url)
746 raise HTTPFound(prev_url)
747
747
748 @LoginRequired()
748 @LoginRequired()
749 @HasRepoPermissionAnyDecorator(
749 @HasRepoPermissionAnyDecorator(
750 'repository.read', 'repository.write', 'repository.admin')
750 'repository.read', 'repository.write', 'repository.admin')
751 @view_config(
751 @view_config(
752 route_name='repo_nodetree_full', request_method='GET',
752 route_name='repo_nodetree_full', request_method='GET',
753 renderer=None, xhr=True)
753 renderer=None, xhr=True)
754 @view_config(
754 @view_config(
755 route_name='repo_nodetree_full:default_path', request_method='GET',
755 route_name='repo_nodetree_full:default_path', request_method='GET',
756 renderer=None, xhr=True)
756 renderer=None, xhr=True)
757 def repo_nodetree_full(self):
757 def repo_nodetree_full(self):
758 """
758 """
759 Returns rendered html of file tree that contains commit date,
759 Returns rendered html of file tree that contains commit date,
760 author, commit_id for the specified combination of
760 author, commit_id for the specified combination of
761 repo, commit_id and file path
761 repo, commit_id and file path
762 """
762 """
763 c = self.load_default_context()
763 c = self.load_default_context()
764
764
765 commit_id, f_path = self._get_commit_and_path()
765 commit_id, f_path = self._get_commit_and_path()
766 commit = self._get_commit_or_redirect(commit_id)
766 commit = self._get_commit_or_redirect(commit_id)
767 try:
767 try:
768 dir_node = commit.get_node(f_path)
768 dir_node = commit.get_node(f_path)
769 except RepositoryError as e:
769 except RepositoryError as e:
770 return Response('error: {}'.format(h.escape(safe_str(e))))
770 return Response('error: {}'.format(h.escape(safe_str(e))))
771
771
772 if dir_node.is_file():
772 if dir_node.is_file():
773 return Response('')
773 return Response('')
774
774
775 c.file = dir_node
775 c.file = dir_node
776 c.commit = commit
776 c.commit = commit
777
777
778 html = self._get_tree_at_commit(
778 html = self._get_tree_at_commit(
779 c, commit.raw_id, dir_node.path, full_load=True)
779 c, commit.raw_id, dir_node.path, full_load=True)
780
780
781 return Response(html)
781 return Response(html)
782
782
783 def _get_attachement_headers(self, f_path):
783 def _get_attachement_headers(self, f_path):
784 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
784 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
785 safe_path = f_name.replace('"', '\\"')
785 safe_path = f_name.replace('"', '\\"')
786 encoded_path = urllib.quote(f_name)
786 encoded_path = urllib.quote(f_name)
787
787
788 return "attachment; " \
788 return "attachment; " \
789 "filename=\"{}\"; " \
789 "filename=\"{}\"; " \
790 "filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
790 "filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
791
791
792 @LoginRequired()
792 @LoginRequired()
793 @HasRepoPermissionAnyDecorator(
793 @HasRepoPermissionAnyDecorator(
794 'repository.read', 'repository.write', 'repository.admin')
794 'repository.read', 'repository.write', 'repository.admin')
795 @view_config(
795 @view_config(
796 route_name='repo_file_raw', request_method='GET',
796 route_name='repo_file_raw', request_method='GET',
797 renderer=None)
797 renderer=None)
798 def repo_file_raw(self):
798 def repo_file_raw(self):
799 """
799 """
800 Action for show as raw, some mimetypes are "rendered",
800 Action for show as raw, some mimetypes are "rendered",
801 those include images, icons.
801 those include images, icons.
802 """
802 """
803 c = self.load_default_context()
803 c = self.load_default_context()
804
804
805 commit_id, f_path = self._get_commit_and_path()
805 commit_id, f_path = self._get_commit_and_path()
806 commit = self._get_commit_or_redirect(commit_id)
806 commit = self._get_commit_or_redirect(commit_id)
807 file_node = self._get_filenode_or_redirect(commit, f_path)
807 file_node = self._get_filenode_or_redirect(commit, f_path)
808
808
809 raw_mimetype_mapping = {
809 raw_mimetype_mapping = {
810 # map original mimetype to a mimetype used for "show as raw"
810 # map original mimetype to a mimetype used for "show as raw"
811 # you can also provide a content-disposition to override the
811 # you can also provide a content-disposition to override the
812 # default "attachment" disposition.
812 # default "attachment" disposition.
813 # orig_type: (new_type, new_dispo)
813 # orig_type: (new_type, new_dispo)
814
814
815 # show images inline:
815 # show images inline:
816 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
816 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
817 # for example render an SVG with javascript inside or even render
817 # for example render an SVG with javascript inside or even render
818 # HTML.
818 # HTML.
819 'image/x-icon': ('image/x-icon', 'inline'),
819 'image/x-icon': ('image/x-icon', 'inline'),
820 'image/png': ('image/png', 'inline'),
820 'image/png': ('image/png', 'inline'),
821 'image/gif': ('image/gif', 'inline'),
821 'image/gif': ('image/gif', 'inline'),
822 'image/jpeg': ('image/jpeg', 'inline'),
822 'image/jpeg': ('image/jpeg', 'inline'),
823 'application/pdf': ('application/pdf', 'inline'),
823 'application/pdf': ('application/pdf', 'inline'),
824 }
824 }
825
825
826 mimetype = file_node.mimetype
826 mimetype = file_node.mimetype
827 try:
827 try:
828 mimetype, disposition = raw_mimetype_mapping[mimetype]
828 mimetype, disposition = raw_mimetype_mapping[mimetype]
829 except KeyError:
829 except KeyError:
830 # we don't know anything special about this, handle it safely
830 # we don't know anything special about this, handle it safely
831 if file_node.is_binary:
831 if file_node.is_binary:
832 # do same as download raw for binary files
832 # do same as download raw for binary files
833 mimetype, disposition = 'application/octet-stream', 'attachment'
833 mimetype, disposition = 'application/octet-stream', 'attachment'
834 else:
834 else:
835 # do not just use the original mimetype, but force text/plain,
835 # do not just use the original mimetype, but force text/plain,
836 # otherwise it would serve text/html and that might be unsafe.
836 # otherwise it would serve text/html and that might be unsafe.
837 # Note: underlying vcs library fakes text/plain mimetype if the
837 # Note: underlying vcs library fakes text/plain mimetype if the
838 # mimetype can not be determined and it thinks it is not
838 # mimetype can not be determined and it thinks it is not
839 # binary.This might lead to erroneous text display in some
839 # binary.This might lead to erroneous text display in some
840 # cases, but helps in other cases, like with text files
840 # cases, but helps in other cases, like with text files
841 # without extension.
841 # without extension.
842 mimetype, disposition = 'text/plain', 'inline'
842 mimetype, disposition = 'text/plain', 'inline'
843
843
844 if disposition == 'attachment':
844 if disposition == 'attachment':
845 disposition = self._get_attachement_headers(f_path)
845 disposition = self._get_attachement_headers(f_path)
846
846
847 def stream_node():
847 stream_content = file_node.stream_bytes()
848 yield file_node.raw_bytes
849
848
850 response = Response(app_iter=stream_node())
849 response = Response(app_iter=stream_content)
851 response.content_disposition = disposition
850 response.content_disposition = disposition
852 response.content_type = mimetype
851 response.content_type = mimetype
853
852
854 charset = self._get_default_encoding(c)
853 charset = self._get_default_encoding(c)
855 if charset:
854 if charset:
856 response.charset = charset
855 response.charset = charset
857
856
858 return response
857 return response
859
858
860 @LoginRequired()
859 @LoginRequired()
861 @HasRepoPermissionAnyDecorator(
860 @HasRepoPermissionAnyDecorator(
862 'repository.read', 'repository.write', 'repository.admin')
861 'repository.read', 'repository.write', 'repository.admin')
863 @view_config(
862 @view_config(
864 route_name='repo_file_download', request_method='GET',
863 route_name='repo_file_download', request_method='GET',
865 renderer=None)
864 renderer=None)
866 @view_config(
865 @view_config(
867 route_name='repo_file_download:legacy', request_method='GET',
866 route_name='repo_file_download:legacy', request_method='GET',
868 renderer=None)
867 renderer=None)
869 def repo_file_download(self):
868 def repo_file_download(self):
870 c = self.load_default_context()
869 c = self.load_default_context()
871
870
872 commit_id, f_path = self._get_commit_and_path()
871 commit_id, f_path = self._get_commit_and_path()
873 commit = self._get_commit_or_redirect(commit_id)
872 commit = self._get_commit_or_redirect(commit_id)
874 file_node = self._get_filenode_or_redirect(commit, f_path)
873 file_node = self._get_filenode_or_redirect(commit, f_path)
875
874
876 if self.request.GET.get('lf'):
875 if self.request.GET.get('lf'):
877 # only if lf get flag is passed, we download this file
876 # only if lf get flag is passed, we download this file
878 # as LFS/Largefile
877 # as LFS/Largefile
879 lf_node = file_node.get_largefile_node()
878 lf_node = file_node.get_largefile_node()
880 if lf_node:
879 if lf_node:
881 # overwrite our pointer with the REAL large-file
880 # overwrite our pointer with the REAL large-file
882 file_node = lf_node
881 file_node = lf_node
883
882
884 disposition = self._get_attachement_headers(f_path)
883 disposition = self._get_attachement_headers(f_path)
885
884
886 def stream_node():
885 stream_content = file_node.stream_bytes()
887 yield file_node.raw_bytes
888
886
889 response = Response(app_iter=stream_node())
887 response = Response(app_iter=stream_content)
890 response.content_disposition = disposition
888 response.content_disposition = disposition
891 response.content_type = file_node.mimetype
889 response.content_type = file_node.mimetype
892
890
893 charset = self._get_default_encoding(c)
891 charset = self._get_default_encoding(c)
894 if charset:
892 if charset:
895 response.charset = charset
893 response.charset = charset
896
894
897 return response
895 return response
898
896
899 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
897 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
900
898
901 cache_seconds = safe_int(
899 cache_seconds = safe_int(
902 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
900 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
903 cache_on = cache_seconds > 0
901 cache_on = cache_seconds > 0
904 log.debug(
902 log.debug(
905 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
903 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
906 'with caching: %s[TTL: %ss]' % (
904 'with caching: %s[TTL: %ss]' % (
907 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
905 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
908
906
909 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
907 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
910 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
908 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
911
909
912 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
910 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
913 condition=cache_on)
911 condition=cache_on)
914 def compute_file_search(repo_id, commit_id, f_path):
912 def compute_file_search(repo_id, commit_id, f_path):
915 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
913 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
916 repo_id, commit_id, f_path)
914 repo_id, commit_id, f_path)
917 try:
915 try:
918 _d, _f = ScmModel().get_nodes(
916 _d, _f = ScmModel().get_nodes(
919 repo_name, commit_id, f_path, flat=False)
917 repo_name, commit_id, f_path, flat=False)
920 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
918 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
921 log.exception(safe_str(e))
919 log.exception(safe_str(e))
922 h.flash(safe_str(h.escape(e)), category='error')
920 h.flash(safe_str(h.escape(e)), category='error')
923 raise HTTPFound(h.route_path(
921 raise HTTPFound(h.route_path(
924 'repo_files', repo_name=self.db_repo_name,
922 'repo_files', repo_name=self.db_repo_name,
925 commit_id='tip', f_path='/'))
923 commit_id='tip', f_path='/'))
926
924
927 return _d + _f
925 return _d + _f
928
926
929 result = compute_file_search(self.db_repo.repo_id, commit_id, f_path)
927 result = compute_file_search(self.db_repo.repo_id, commit_id, f_path)
930 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
928 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
931
929
932 @LoginRequired()
930 @LoginRequired()
933 @HasRepoPermissionAnyDecorator(
931 @HasRepoPermissionAnyDecorator(
934 'repository.read', 'repository.write', 'repository.admin')
932 'repository.read', 'repository.write', 'repository.admin')
935 @view_config(
933 @view_config(
936 route_name='repo_files_nodelist', request_method='GET',
934 route_name='repo_files_nodelist', request_method='GET',
937 renderer='json_ext', xhr=True)
935 renderer='json_ext', xhr=True)
938 def repo_nodelist(self):
936 def repo_nodelist(self):
939 self.load_default_context()
937 self.load_default_context()
940
938
941 commit_id, f_path = self._get_commit_and_path()
939 commit_id, f_path = self._get_commit_and_path()
942 commit = self._get_commit_or_redirect(commit_id)
940 commit = self._get_commit_or_redirect(commit_id)
943
941
944 metadata = self._get_nodelist_at_commit(
942 metadata = self._get_nodelist_at_commit(
945 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
943 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
946 return {'nodes': metadata}
944 return {'nodes': metadata}
947
945
948 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
946 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
949 items = []
947 items = []
950 for name, commit_id in branches_or_tags.items():
948 for name, commit_id in branches_or_tags.items():
951 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
949 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
952 items.append((sym_ref, name, ref_type))
950 items.append((sym_ref, name, ref_type))
953 return items
951 return items
954
952
955 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
953 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
956 return commit_id
954 return commit_id
957
955
958 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
956 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
959 new_f_path = vcspath.join(name, f_path)
957 new_f_path = vcspath.join(name, f_path)
960 return u'%s@%s' % (new_f_path, commit_id)
958 return u'%s@%s' % (new_f_path, commit_id)
961
959
962 def _get_node_history(self, commit_obj, f_path, commits=None):
960 def _get_node_history(self, commit_obj, f_path, commits=None):
963 """
961 """
964 get commit history for given node
962 get commit history for given node
965
963
966 :param commit_obj: commit to calculate history
964 :param commit_obj: commit to calculate history
967 :param f_path: path for node to calculate history for
965 :param f_path: path for node to calculate history for
968 :param commits: if passed don't calculate history and take
966 :param commits: if passed don't calculate history and take
969 commits defined in this list
967 commits defined in this list
970 """
968 """
971 _ = self.request.translate
969 _ = self.request.translate
972
970
973 # calculate history based on tip
971 # calculate history based on tip
974 tip = self.rhodecode_vcs_repo.get_commit()
972 tip = self.rhodecode_vcs_repo.get_commit()
975 if commits is None:
973 if commits is None:
976 pre_load = ["author", "branch"]
974 pre_load = ["author", "branch"]
977 try:
975 try:
978 commits = tip.get_path_history(f_path, pre_load=pre_load)
976 commits = tip.get_path_history(f_path, pre_load=pre_load)
979 except (NodeDoesNotExistError, CommitError):
977 except (NodeDoesNotExistError, CommitError):
980 # this node is not present at tip!
978 # this node is not present at tip!
981 commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
979 commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
982
980
983 history = []
981 history = []
984 commits_group = ([], _("Changesets"))
982 commits_group = ([], _("Changesets"))
985 for commit in commits:
983 for commit in commits:
986 branch = ' (%s)' % commit.branch if commit.branch else ''
984 branch = ' (%s)' % commit.branch if commit.branch else ''
987 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
985 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
988 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
986 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
989 history.append(commits_group)
987 history.append(commits_group)
990
988
991 symbolic_reference = self._symbolic_reference
989 symbolic_reference = self._symbolic_reference
992
990
993 if self.rhodecode_vcs_repo.alias == 'svn':
991 if self.rhodecode_vcs_repo.alias == 'svn':
994 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
992 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
995 f_path, self.rhodecode_vcs_repo)
993 f_path, self.rhodecode_vcs_repo)
996 if adjusted_f_path != f_path:
994 if adjusted_f_path != f_path:
997 log.debug(
995 log.debug(
998 'Recognized svn tag or branch in file "%s", using svn '
996 'Recognized svn tag or branch in file "%s", using svn '
999 'specific symbolic references', f_path)
997 'specific symbolic references', f_path)
1000 f_path = adjusted_f_path
998 f_path = adjusted_f_path
1001 symbolic_reference = self._symbolic_reference_svn
999 symbolic_reference = self._symbolic_reference_svn
1002
1000
1003 branches = self._create_references(
1001 branches = self._create_references(
1004 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1002 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1005 branches_group = (branches, _("Branches"))
1003 branches_group = (branches, _("Branches"))
1006
1004
1007 tags = self._create_references(
1005 tags = self._create_references(
1008 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1006 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1009 tags_group = (tags, _("Tags"))
1007 tags_group = (tags, _("Tags"))
1010
1008
1011 history.append(branches_group)
1009 history.append(branches_group)
1012 history.append(tags_group)
1010 history.append(tags_group)
1013
1011
1014 return history, commits
1012 return history, commits
1015
1013
1016 @LoginRequired()
1014 @LoginRequired()
1017 @HasRepoPermissionAnyDecorator(
1015 @HasRepoPermissionAnyDecorator(
1018 'repository.read', 'repository.write', 'repository.admin')
1016 'repository.read', 'repository.write', 'repository.admin')
1019 @view_config(
1017 @view_config(
1020 route_name='repo_file_history', request_method='GET',
1018 route_name='repo_file_history', request_method='GET',
1021 renderer='json_ext')
1019 renderer='json_ext')
1022 def repo_file_history(self):
1020 def repo_file_history(self):
1023 self.load_default_context()
1021 self.load_default_context()
1024
1022
1025 commit_id, f_path = self._get_commit_and_path()
1023 commit_id, f_path = self._get_commit_and_path()
1026 commit = self._get_commit_or_redirect(commit_id)
1024 commit = self._get_commit_or_redirect(commit_id)
1027 file_node = self._get_filenode_or_redirect(commit, f_path)
1025 file_node = self._get_filenode_or_redirect(commit, f_path)
1028
1026
1029 if file_node.is_file():
1027 if file_node.is_file():
1030 file_history, _hist = self._get_node_history(commit, f_path)
1028 file_history, _hist = self._get_node_history(commit, f_path)
1031
1029
1032 res = []
1030 res = []
1033 for obj in file_history:
1031 for obj in file_history:
1034 res.append({
1032 res.append({
1035 'text': obj[1],
1033 'text': obj[1],
1036 'children': [{'id': o[0], 'text': o[1], 'type': o[2]} for o in obj[0]]
1034 'children': [{'id': o[0], 'text': o[1], 'type': o[2]} for o in obj[0]]
1037 })
1035 })
1038
1036
1039 data = {
1037 data = {
1040 'more': False,
1038 'more': False,
1041 'results': res
1039 'results': res
1042 }
1040 }
1043 return data
1041 return data
1044
1042
1045 log.warning('Cannot fetch history for directory')
1043 log.warning('Cannot fetch history for directory')
1046 raise HTTPBadRequest()
1044 raise HTTPBadRequest()
1047
1045
1048 @LoginRequired()
1046 @LoginRequired()
1049 @HasRepoPermissionAnyDecorator(
1047 @HasRepoPermissionAnyDecorator(
1050 'repository.read', 'repository.write', 'repository.admin')
1048 'repository.read', 'repository.write', 'repository.admin')
1051 @view_config(
1049 @view_config(
1052 route_name='repo_file_authors', request_method='GET',
1050 route_name='repo_file_authors', request_method='GET',
1053 renderer='rhodecode:templates/files/file_authors_box.mako')
1051 renderer='rhodecode:templates/files/file_authors_box.mako')
1054 def repo_file_authors(self):
1052 def repo_file_authors(self):
1055 c = self.load_default_context()
1053 c = self.load_default_context()
1056
1054
1057 commit_id, f_path = self._get_commit_and_path()
1055 commit_id, f_path = self._get_commit_and_path()
1058 commit = self._get_commit_or_redirect(commit_id)
1056 commit = self._get_commit_or_redirect(commit_id)
1059 file_node = self._get_filenode_or_redirect(commit, f_path)
1057 file_node = self._get_filenode_or_redirect(commit, f_path)
1060
1058
1061 if not file_node.is_file():
1059 if not file_node.is_file():
1062 raise HTTPBadRequest()
1060 raise HTTPBadRequest()
1063
1061
1064 c.file_last_commit = file_node.last_commit
1062 c.file_last_commit = file_node.last_commit
1065 if self.request.GET.get('annotate') == '1':
1063 if self.request.GET.get('annotate') == '1':
1066 # use _hist from annotation if annotation mode is on
1064 # use _hist from annotation if annotation mode is on
1067 commit_ids = set(x[1] for x in file_node.annotate)
1065 commit_ids = set(x[1] for x in file_node.annotate)
1068 _hist = (
1066 _hist = (
1069 self.rhodecode_vcs_repo.get_commit(commit_id)
1067 self.rhodecode_vcs_repo.get_commit(commit_id)
1070 for commit_id in commit_ids)
1068 for commit_id in commit_ids)
1071 else:
1069 else:
1072 _f_history, _hist = self._get_node_history(commit, f_path)
1070 _f_history, _hist = self._get_node_history(commit, f_path)
1073 c.file_author = False
1071 c.file_author = False
1074
1072
1075 unique = collections.OrderedDict()
1073 unique = collections.OrderedDict()
1076 for commit in _hist:
1074 for commit in _hist:
1077 author = commit.author
1075 author = commit.author
1078 if author not in unique:
1076 if author not in unique:
1079 unique[commit.author] = [
1077 unique[commit.author] = [
1080 h.email(author),
1078 h.email(author),
1081 h.person(author, 'username_or_name_or_email'),
1079 h.person(author, 'username_or_name_or_email'),
1082 1 # counter
1080 1 # counter
1083 ]
1081 ]
1084
1082
1085 else:
1083 else:
1086 # increase counter
1084 # increase counter
1087 unique[commit.author][2] += 1
1085 unique[commit.author][2] += 1
1088
1086
1089 c.authors = [val for val in unique.values()]
1087 c.authors = [val for val in unique.values()]
1090
1088
1091 return self._get_template_context(c)
1089 return self._get_template_context(c)
1092
1090
1093 @LoginRequired()
1091 @LoginRequired()
1094 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1092 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1095 @view_config(
1093 @view_config(
1096 route_name='repo_files_remove_file', request_method='GET',
1094 route_name='repo_files_remove_file', request_method='GET',
1097 renderer='rhodecode:templates/files/files_delete.mako')
1095 renderer='rhodecode:templates/files/files_delete.mako')
1098 def repo_files_remove_file(self):
1096 def repo_files_remove_file(self):
1099 _ = self.request.translate
1097 _ = self.request.translate
1100 c = self.load_default_context()
1098 c = self.load_default_context()
1101 commit_id, f_path = self._get_commit_and_path()
1099 commit_id, f_path = self._get_commit_and_path()
1102
1100
1103 self._ensure_not_locked()
1101 self._ensure_not_locked()
1104 _branch_name, _sha_commit_id, is_head = \
1102 _branch_name, _sha_commit_id, is_head = \
1105 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1103 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1106
1104
1107 self.forbid_non_head(is_head, f_path)
1105 self.forbid_non_head(is_head, f_path)
1108 self.check_branch_permission(_branch_name)
1106 self.check_branch_permission(_branch_name)
1109
1107
1110 c.commit = self._get_commit_or_redirect(commit_id)
1108 c.commit = self._get_commit_or_redirect(commit_id)
1111 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1109 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1112
1110
1113 c.default_message = _(
1111 c.default_message = _(
1114 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1112 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1115 c.f_path = f_path
1113 c.f_path = f_path
1116
1114
1117 return self._get_template_context(c)
1115 return self._get_template_context(c)
1118
1116
1119 @LoginRequired()
1117 @LoginRequired()
1120 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1118 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1121 @CSRFRequired()
1119 @CSRFRequired()
1122 @view_config(
1120 @view_config(
1123 route_name='repo_files_delete_file', request_method='POST',
1121 route_name='repo_files_delete_file', request_method='POST',
1124 renderer=None)
1122 renderer=None)
1125 def repo_files_delete_file(self):
1123 def repo_files_delete_file(self):
1126 _ = self.request.translate
1124 _ = self.request.translate
1127
1125
1128 c = self.load_default_context()
1126 c = self.load_default_context()
1129 commit_id, f_path = self._get_commit_and_path()
1127 commit_id, f_path = self._get_commit_and_path()
1130
1128
1131 self._ensure_not_locked()
1129 self._ensure_not_locked()
1132 _branch_name, _sha_commit_id, is_head = \
1130 _branch_name, _sha_commit_id, is_head = \
1133 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1131 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1134
1132
1135 self.forbid_non_head(is_head, f_path)
1133 self.forbid_non_head(is_head, f_path)
1136 self.check_branch_permission(_branch_name)
1134 self.check_branch_permission(_branch_name)
1137
1135
1138 c.commit = self._get_commit_or_redirect(commit_id)
1136 c.commit = self._get_commit_or_redirect(commit_id)
1139 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1137 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1140
1138
1141 c.default_message = _(
1139 c.default_message = _(
1142 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1140 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1143 c.f_path = f_path
1141 c.f_path = f_path
1144 node_path = f_path
1142 node_path = f_path
1145 author = self._rhodecode_db_user.full_contact
1143 author = self._rhodecode_db_user.full_contact
1146 message = self.request.POST.get('message') or c.default_message
1144 message = self.request.POST.get('message') or c.default_message
1147 try:
1145 try:
1148 nodes = {
1146 nodes = {
1149 node_path: {
1147 node_path: {
1150 'content': ''
1148 'content': ''
1151 }
1149 }
1152 }
1150 }
1153 ScmModel().delete_nodes(
1151 ScmModel().delete_nodes(
1154 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1152 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1155 message=message,
1153 message=message,
1156 nodes=nodes,
1154 nodes=nodes,
1157 parent_commit=c.commit,
1155 parent_commit=c.commit,
1158 author=author,
1156 author=author,
1159 )
1157 )
1160
1158
1161 h.flash(
1159 h.flash(
1162 _('Successfully deleted file `{}`').format(
1160 _('Successfully deleted file `{}`').format(
1163 h.escape(f_path)), category='success')
1161 h.escape(f_path)), category='success')
1164 except Exception:
1162 except Exception:
1165 log.exception('Error during commit operation')
1163 log.exception('Error during commit operation')
1166 h.flash(_('Error occurred during commit'), category='error')
1164 h.flash(_('Error occurred during commit'), category='error')
1167 raise HTTPFound(
1165 raise HTTPFound(
1168 h.route_path('repo_commit', repo_name=self.db_repo_name,
1166 h.route_path('repo_commit', repo_name=self.db_repo_name,
1169 commit_id='tip'))
1167 commit_id='tip'))
1170
1168
1171 @LoginRequired()
1169 @LoginRequired()
1172 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1170 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1173 @view_config(
1171 @view_config(
1174 route_name='repo_files_edit_file', request_method='GET',
1172 route_name='repo_files_edit_file', request_method='GET',
1175 renderer='rhodecode:templates/files/files_edit.mako')
1173 renderer='rhodecode:templates/files/files_edit.mako')
1176 def repo_files_edit_file(self):
1174 def repo_files_edit_file(self):
1177 _ = self.request.translate
1175 _ = self.request.translate
1178 c = self.load_default_context()
1176 c = self.load_default_context()
1179 commit_id, f_path = self._get_commit_and_path()
1177 commit_id, f_path = self._get_commit_and_path()
1180
1178
1181 self._ensure_not_locked()
1179 self._ensure_not_locked()
1182 _branch_name, _sha_commit_id, is_head = \
1180 _branch_name, _sha_commit_id, is_head = \
1183 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1181 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1184
1182
1185 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1183 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1186 self.check_branch_permission(_branch_name, commit_id=commit_id)
1184 self.check_branch_permission(_branch_name, commit_id=commit_id)
1187
1185
1188 c.commit = self._get_commit_or_redirect(commit_id)
1186 c.commit = self._get_commit_or_redirect(commit_id)
1189 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1187 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1190
1188
1191 if c.file.is_binary:
1189 if c.file.is_binary:
1192 files_url = h.route_path(
1190 files_url = h.route_path(
1193 'repo_files',
1191 'repo_files',
1194 repo_name=self.db_repo_name,
1192 repo_name=self.db_repo_name,
1195 commit_id=c.commit.raw_id, f_path=f_path)
1193 commit_id=c.commit.raw_id, f_path=f_path)
1196 raise HTTPFound(files_url)
1194 raise HTTPFound(files_url)
1197
1195
1198 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1196 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1199 c.f_path = f_path
1197 c.f_path = f_path
1200
1198
1201 return self._get_template_context(c)
1199 return self._get_template_context(c)
1202
1200
1203 @LoginRequired()
1201 @LoginRequired()
1204 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1202 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1205 @CSRFRequired()
1203 @CSRFRequired()
1206 @view_config(
1204 @view_config(
1207 route_name='repo_files_update_file', request_method='POST',
1205 route_name='repo_files_update_file', request_method='POST',
1208 renderer=None)
1206 renderer=None)
1209 def repo_files_update_file(self):
1207 def repo_files_update_file(self):
1210 _ = self.request.translate
1208 _ = self.request.translate
1211 c = self.load_default_context()
1209 c = self.load_default_context()
1212 commit_id, f_path = self._get_commit_and_path()
1210 commit_id, f_path = self._get_commit_and_path()
1213
1211
1214 self._ensure_not_locked()
1212 self._ensure_not_locked()
1215
1213
1216 c.commit = self._get_commit_or_redirect(commit_id)
1214 c.commit = self._get_commit_or_redirect(commit_id)
1217 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1215 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1218
1216
1219 if c.file.is_binary:
1217 if c.file.is_binary:
1220 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1218 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1221 commit_id=c.commit.raw_id, f_path=f_path))
1219 commit_id=c.commit.raw_id, f_path=f_path))
1222
1220
1223 _branch_name, _sha_commit_id, is_head = \
1221 _branch_name, _sha_commit_id, is_head = \
1224 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1222 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1225
1223
1226 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1224 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1227 self.check_branch_permission(_branch_name, commit_id=commit_id)
1225 self.check_branch_permission(_branch_name, commit_id=commit_id)
1228
1226
1229 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1227 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1230 c.f_path = f_path
1228 c.f_path = f_path
1231
1229
1232 old_content = c.file.content
1230 old_content = c.file.content
1233 sl = old_content.splitlines(1)
1231 sl = old_content.splitlines(1)
1234 first_line = sl[0] if sl else ''
1232 first_line = sl[0] if sl else ''
1235
1233
1236 r_post = self.request.POST
1234 r_post = self.request.POST
1237 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1235 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1238 line_ending_mode = detect_mode(first_line, 0)
1236 line_ending_mode = detect_mode(first_line, 0)
1239 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1237 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1240
1238
1241 message = r_post.get('message') or c.default_message
1239 message = r_post.get('message') or c.default_message
1242 org_node_path = c.file.unicode_path
1240 org_node_path = c.file.unicode_path
1243 filename = r_post['filename']
1241 filename = r_post['filename']
1244
1242
1245 root_path = c.file.dir_path
1243 root_path = c.file.dir_path
1246 pure_path = self.create_pure_path(root_path, filename)
1244 pure_path = self.create_pure_path(root_path, filename)
1247 node_path = safe_unicode(bytes(pure_path))
1245 node_path = safe_unicode(bytes(pure_path))
1248
1246
1249 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1247 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1250 commit_id=commit_id)
1248 commit_id=commit_id)
1251 if content == old_content and node_path == org_node_path:
1249 if content == old_content and node_path == org_node_path:
1252 h.flash(_('No changes detected on {}').format(org_node_path),
1250 h.flash(_('No changes detected on {}').format(org_node_path),
1253 category='warning')
1251 category='warning')
1254 raise HTTPFound(default_redirect_url)
1252 raise HTTPFound(default_redirect_url)
1255
1253
1256 try:
1254 try:
1257 mapping = {
1255 mapping = {
1258 org_node_path: {
1256 org_node_path: {
1259 'org_filename': org_node_path,
1257 'org_filename': org_node_path,
1260 'filename': node_path,
1258 'filename': node_path,
1261 'content': content,
1259 'content': content,
1262 'lexer': '',
1260 'lexer': '',
1263 'op': 'mod',
1261 'op': 'mod',
1264 'mode': c.file.mode
1262 'mode': c.file.mode
1265 }
1263 }
1266 }
1264 }
1267
1265
1268 commit = ScmModel().update_nodes(
1266 commit = ScmModel().update_nodes(
1269 user=self._rhodecode_db_user.user_id,
1267 user=self._rhodecode_db_user.user_id,
1270 repo=self.db_repo,
1268 repo=self.db_repo,
1271 message=message,
1269 message=message,
1272 nodes=mapping,
1270 nodes=mapping,
1273 parent_commit=c.commit,
1271 parent_commit=c.commit,
1274 )
1272 )
1275
1273
1276 h.flash(_('Successfully committed changes to file `{}`').format(
1274 h.flash(_('Successfully committed changes to file `{}`').format(
1277 h.escape(f_path)), category='success')
1275 h.escape(f_path)), category='success')
1278 default_redirect_url = h.route_path(
1276 default_redirect_url = h.route_path(
1279 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1277 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1280
1278
1281 except Exception:
1279 except Exception:
1282 log.exception('Error occurred during commit')
1280 log.exception('Error occurred during commit')
1283 h.flash(_('Error occurred during commit'), category='error')
1281 h.flash(_('Error occurred during commit'), category='error')
1284
1282
1285 raise HTTPFound(default_redirect_url)
1283 raise HTTPFound(default_redirect_url)
1286
1284
1287 @LoginRequired()
1285 @LoginRequired()
1288 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1286 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1289 @view_config(
1287 @view_config(
1290 route_name='repo_files_add_file', request_method='GET',
1288 route_name='repo_files_add_file', request_method='GET',
1291 renderer='rhodecode:templates/files/files_add.mako')
1289 renderer='rhodecode:templates/files/files_add.mako')
1292 @view_config(
1290 @view_config(
1293 route_name='repo_files_upload_file', request_method='GET',
1291 route_name='repo_files_upload_file', request_method='GET',
1294 renderer='rhodecode:templates/files/files_upload.mako')
1292 renderer='rhodecode:templates/files/files_upload.mako')
1295 def repo_files_add_file(self):
1293 def repo_files_add_file(self):
1296 _ = self.request.translate
1294 _ = self.request.translate
1297 c = self.load_default_context()
1295 c = self.load_default_context()
1298 commit_id, f_path = self._get_commit_and_path()
1296 commit_id, f_path = self._get_commit_and_path()
1299
1297
1300 self._ensure_not_locked()
1298 self._ensure_not_locked()
1301
1299
1302 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1300 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1303 if c.commit is None:
1301 if c.commit is None:
1304 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1302 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1305
1303
1306 if self.rhodecode_vcs_repo.is_empty():
1304 if self.rhodecode_vcs_repo.is_empty():
1307 # for empty repository we cannot check for current branch, we rely on
1305 # for empty repository we cannot check for current branch, we rely on
1308 # c.commit.branch instead
1306 # c.commit.branch instead
1309 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1307 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1310 else:
1308 else:
1311 _branch_name, _sha_commit_id, is_head = \
1309 _branch_name, _sha_commit_id, is_head = \
1312 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1310 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1313
1311
1314 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1312 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1315 self.check_branch_permission(_branch_name, commit_id=commit_id)
1313 self.check_branch_permission(_branch_name, commit_id=commit_id)
1316
1314
1317 c.default_message = (_('Added file via RhodeCode Enterprise'))
1315 c.default_message = (_('Added file via RhodeCode Enterprise'))
1318 c.f_path = f_path.lstrip('/') # ensure not relative path
1316 c.f_path = f_path.lstrip('/') # ensure not relative path
1319
1317
1320 return self._get_template_context(c)
1318 return self._get_template_context(c)
1321
1319
1322 @LoginRequired()
1320 @LoginRequired()
1323 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1321 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1324 @CSRFRequired()
1322 @CSRFRequired()
1325 @view_config(
1323 @view_config(
1326 route_name='repo_files_create_file', request_method='POST',
1324 route_name='repo_files_create_file', request_method='POST',
1327 renderer=None)
1325 renderer=None)
1328 def repo_files_create_file(self):
1326 def repo_files_create_file(self):
1329 _ = self.request.translate
1327 _ = self.request.translate
1330 c = self.load_default_context()
1328 c = self.load_default_context()
1331 commit_id, f_path = self._get_commit_and_path()
1329 commit_id, f_path = self._get_commit_and_path()
1332
1330
1333 self._ensure_not_locked()
1331 self._ensure_not_locked()
1334
1332
1335 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1333 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1336 if c.commit is None:
1334 if c.commit is None:
1337 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1335 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1338
1336
1339 # calculate redirect URL
1337 # calculate redirect URL
1340 if self.rhodecode_vcs_repo.is_empty():
1338 if self.rhodecode_vcs_repo.is_empty():
1341 default_redirect_url = h.route_path(
1339 default_redirect_url = h.route_path(
1342 'repo_summary', repo_name=self.db_repo_name)
1340 'repo_summary', repo_name=self.db_repo_name)
1343 else:
1341 else:
1344 default_redirect_url = h.route_path(
1342 default_redirect_url = h.route_path(
1345 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1343 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1346
1344
1347 if self.rhodecode_vcs_repo.is_empty():
1345 if self.rhodecode_vcs_repo.is_empty():
1348 # for empty repository we cannot check for current branch, we rely on
1346 # for empty repository we cannot check for current branch, we rely on
1349 # c.commit.branch instead
1347 # c.commit.branch instead
1350 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1348 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1351 else:
1349 else:
1352 _branch_name, _sha_commit_id, is_head = \
1350 _branch_name, _sha_commit_id, is_head = \
1353 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1351 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1354
1352
1355 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1353 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1356 self.check_branch_permission(_branch_name, commit_id=commit_id)
1354 self.check_branch_permission(_branch_name, commit_id=commit_id)
1357
1355
1358 c.default_message = (_('Added file via RhodeCode Enterprise'))
1356 c.default_message = (_('Added file via RhodeCode Enterprise'))
1359 c.f_path = f_path
1357 c.f_path = f_path
1360
1358
1361 r_post = self.request.POST
1359 r_post = self.request.POST
1362 message = r_post.get('message') or c.default_message
1360 message = r_post.get('message') or c.default_message
1363 filename = r_post.get('filename')
1361 filename = r_post.get('filename')
1364 unix_mode = 0
1362 unix_mode = 0
1365 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1363 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1366
1364
1367 if not filename:
1365 if not filename:
1368 # If there's no commit, redirect to repo summary
1366 # If there's no commit, redirect to repo summary
1369 if type(c.commit) is EmptyCommit:
1367 if type(c.commit) is EmptyCommit:
1370 redirect_url = h.route_path(
1368 redirect_url = h.route_path(
1371 'repo_summary', repo_name=self.db_repo_name)
1369 'repo_summary', repo_name=self.db_repo_name)
1372 else:
1370 else:
1373 redirect_url = default_redirect_url
1371 redirect_url = default_redirect_url
1374 h.flash(_('No filename specified'), category='warning')
1372 h.flash(_('No filename specified'), category='warning')
1375 raise HTTPFound(redirect_url)
1373 raise HTTPFound(redirect_url)
1376
1374
1377 root_path = f_path
1375 root_path = f_path
1378 pure_path = self.create_pure_path(root_path, filename)
1376 pure_path = self.create_pure_path(root_path, filename)
1379 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1377 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1380
1378
1381 author = self._rhodecode_db_user.full_contact
1379 author = self._rhodecode_db_user.full_contact
1382 nodes = {
1380 nodes = {
1383 node_path: {
1381 node_path: {
1384 'content': content
1382 'content': content
1385 }
1383 }
1386 }
1384 }
1387
1385
1388 try:
1386 try:
1389
1387
1390 commit = ScmModel().create_nodes(
1388 commit = ScmModel().create_nodes(
1391 user=self._rhodecode_db_user.user_id,
1389 user=self._rhodecode_db_user.user_id,
1392 repo=self.db_repo,
1390 repo=self.db_repo,
1393 message=message,
1391 message=message,
1394 nodes=nodes,
1392 nodes=nodes,
1395 parent_commit=c.commit,
1393 parent_commit=c.commit,
1396 author=author,
1394 author=author,
1397 )
1395 )
1398
1396
1399 h.flash(_('Successfully committed new file `{}`').format(
1397 h.flash(_('Successfully committed new file `{}`').format(
1400 h.escape(node_path)), category='success')
1398 h.escape(node_path)), category='success')
1401
1399
1402 default_redirect_url = h.route_path(
1400 default_redirect_url = h.route_path(
1403 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1401 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1404
1402
1405 except NonRelativePathError:
1403 except NonRelativePathError:
1406 log.exception('Non Relative path found')
1404 log.exception('Non Relative path found')
1407 h.flash(_('The location specified must be a relative path and must not '
1405 h.flash(_('The location specified must be a relative path and must not '
1408 'contain .. in the path'), category='warning')
1406 'contain .. in the path'), category='warning')
1409 raise HTTPFound(default_redirect_url)
1407 raise HTTPFound(default_redirect_url)
1410 except (NodeError, NodeAlreadyExistsError) as e:
1408 except (NodeError, NodeAlreadyExistsError) as e:
1411 h.flash(_(h.escape(e)), category='error')
1409 h.flash(_(h.escape(e)), category='error')
1412 except Exception:
1410 except Exception:
1413 log.exception('Error occurred during commit')
1411 log.exception('Error occurred during commit')
1414 h.flash(_('Error occurred during commit'), category='error')
1412 h.flash(_('Error occurred during commit'), category='error')
1415
1413
1416 raise HTTPFound(default_redirect_url)
1414 raise HTTPFound(default_redirect_url)
1417
1415
1418 @LoginRequired()
1416 @LoginRequired()
1419 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1417 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1420 @CSRFRequired()
1418 @CSRFRequired()
1421 @view_config(
1419 @view_config(
1422 route_name='repo_files_upload_file', request_method='POST',
1420 route_name='repo_files_upload_file', request_method='POST',
1423 renderer='json_ext')
1421 renderer='json_ext')
1424 def repo_files_upload_file(self):
1422 def repo_files_upload_file(self):
1425 _ = self.request.translate
1423 _ = self.request.translate
1426 c = self.load_default_context()
1424 c = self.load_default_context()
1427 commit_id, f_path = self._get_commit_and_path()
1425 commit_id, f_path = self._get_commit_and_path()
1428
1426
1429 self._ensure_not_locked()
1427 self._ensure_not_locked()
1430
1428
1431 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1429 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1432 if c.commit is None:
1430 if c.commit is None:
1433 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1431 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1434
1432
1435 # calculate redirect URL
1433 # calculate redirect URL
1436 if self.rhodecode_vcs_repo.is_empty():
1434 if self.rhodecode_vcs_repo.is_empty():
1437 default_redirect_url = h.route_path(
1435 default_redirect_url = h.route_path(
1438 'repo_summary', repo_name=self.db_repo_name)
1436 'repo_summary', repo_name=self.db_repo_name)
1439 else:
1437 else:
1440 default_redirect_url = h.route_path(
1438 default_redirect_url = h.route_path(
1441 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1439 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1442
1440
1443 if self.rhodecode_vcs_repo.is_empty():
1441 if self.rhodecode_vcs_repo.is_empty():
1444 # for empty repository we cannot check for current branch, we rely on
1442 # for empty repository we cannot check for current branch, we rely on
1445 # c.commit.branch instead
1443 # c.commit.branch instead
1446 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1444 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1447 else:
1445 else:
1448 _branch_name, _sha_commit_id, is_head = \
1446 _branch_name, _sha_commit_id, is_head = \
1449 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1447 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1450
1448
1451 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1449 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1452 if error:
1450 if error:
1453 return {
1451 return {
1454 'error': error,
1452 'error': error,
1455 'redirect_url': default_redirect_url
1453 'redirect_url': default_redirect_url
1456 }
1454 }
1457 error = self.check_branch_permission(_branch_name, json_mode=True)
1455 error = self.check_branch_permission(_branch_name, json_mode=True)
1458 if error:
1456 if error:
1459 return {
1457 return {
1460 'error': error,
1458 'error': error,
1461 'redirect_url': default_redirect_url
1459 'redirect_url': default_redirect_url
1462 }
1460 }
1463
1461
1464 c.default_message = (_('Uploaded file via RhodeCode Enterprise'))
1462 c.default_message = (_('Uploaded file via RhodeCode Enterprise'))
1465 c.f_path = f_path
1463 c.f_path = f_path
1466
1464
1467 r_post = self.request.POST
1465 r_post = self.request.POST
1468
1466
1469 message = c.default_message
1467 message = c.default_message
1470 user_message = r_post.getall('message')
1468 user_message = r_post.getall('message')
1471 if isinstance(user_message, list) and user_message:
1469 if isinstance(user_message, list) and user_message:
1472 # we take the first from duplicated results if it's not empty
1470 # we take the first from duplicated results if it's not empty
1473 message = user_message[0] if user_message[0] else message
1471 message = user_message[0] if user_message[0] else message
1474
1472
1475 nodes = {}
1473 nodes = {}
1476
1474
1477 for file_obj in r_post.getall('files_upload') or []:
1475 for file_obj in r_post.getall('files_upload') or []:
1478 content = file_obj.file
1476 content = file_obj.file
1479 filename = file_obj.filename
1477 filename = file_obj.filename
1480
1478
1481 root_path = f_path
1479 root_path = f_path
1482 pure_path = self.create_pure_path(root_path, filename)
1480 pure_path = self.create_pure_path(root_path, filename)
1483 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1481 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1484
1482
1485 nodes[node_path] = {
1483 nodes[node_path] = {
1486 'content': content
1484 'content': content
1487 }
1485 }
1488
1486
1489 if not nodes:
1487 if not nodes:
1490 error = 'missing files'
1488 error = 'missing files'
1491 return {
1489 return {
1492 'error': error,
1490 'error': error,
1493 'redirect_url': default_redirect_url
1491 'redirect_url': default_redirect_url
1494 }
1492 }
1495
1493
1496 author = self._rhodecode_db_user.full_contact
1494 author = self._rhodecode_db_user.full_contact
1497
1495
1498 try:
1496 try:
1499 commit = ScmModel().create_nodes(
1497 commit = ScmModel().create_nodes(
1500 user=self._rhodecode_db_user.user_id,
1498 user=self._rhodecode_db_user.user_id,
1501 repo=self.db_repo,
1499 repo=self.db_repo,
1502 message=message,
1500 message=message,
1503 nodes=nodes,
1501 nodes=nodes,
1504 parent_commit=c.commit,
1502 parent_commit=c.commit,
1505 author=author,
1503 author=author,
1506 )
1504 )
1507 if len(nodes) == 1:
1505 if len(nodes) == 1:
1508 flash_message = _('Successfully committed {} new files').format(len(nodes))
1506 flash_message = _('Successfully committed {} new files').format(len(nodes))
1509 else:
1507 else:
1510 flash_message = _('Successfully committed 1 new file')
1508 flash_message = _('Successfully committed 1 new file')
1511
1509
1512 h.flash(flash_message, category='success')
1510 h.flash(flash_message, category='success')
1513
1511
1514 default_redirect_url = h.route_path(
1512 default_redirect_url = h.route_path(
1515 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1513 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1516
1514
1517 except NonRelativePathError:
1515 except NonRelativePathError:
1518 log.exception('Non Relative path found')
1516 log.exception('Non Relative path found')
1519 error = _('The location specified must be a relative path and must not '
1517 error = _('The location specified must be a relative path and must not '
1520 'contain .. in the path')
1518 'contain .. in the path')
1521 h.flash(error, category='warning')
1519 h.flash(error, category='warning')
1522
1520
1523 return {
1521 return {
1524 'error': error,
1522 'error': error,
1525 'redirect_url': default_redirect_url
1523 'redirect_url': default_redirect_url
1526 }
1524 }
1527 except (NodeError, NodeAlreadyExistsError) as e:
1525 except (NodeError, NodeAlreadyExistsError) as e:
1528 error = h.escape(e)
1526 error = h.escape(e)
1529 h.flash(error, category='error')
1527 h.flash(error, category='error')
1530
1528
1531 return {
1529 return {
1532 'error': error,
1530 'error': error,
1533 'redirect_url': default_redirect_url
1531 'redirect_url': default_redirect_url
1534 }
1532 }
1535 except Exception:
1533 except Exception:
1536 log.exception('Error occurred during commit')
1534 log.exception('Error occurred during commit')
1537 error = _('Error occurred during commit')
1535 error = _('Error occurred during commit')
1538 h.flash(error, category='error')
1536 h.flash(error, category='error')
1539 return {
1537 return {
1540 'error': error,
1538 'error': error,
1541 'redirect_url': default_redirect_url
1539 'redirect_url': default_redirect_url
1542 }
1540 }
1543
1541
1544 return {
1542 return {
1545 'error': None,
1543 'error': None,
1546 'redirect_url': default_redirect_url
1544 'redirect_url': default_redirect_url
1547 }
1545 }
@@ -1,188 +1,184 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Various version Control System version lib (vcs) management abstraction layer
22 Various version Control System version lib (vcs) management abstraction layer
23 for Python. Build with server client architecture.
23 for Python. Build with server client architecture.
24 """
24 """
25 import atexit
25 import atexit
26 import logging
26 import logging
27 import urlparse
27 import urlparse
28 from cStringIO import StringIO
28 from cStringIO import StringIO
29
29
30 import rhodecode
30 import rhodecode
31 from rhodecode.lib.vcs.conf import settings
31 from rhodecode.lib.vcs.conf import settings
32 from rhodecode.lib.vcs.backends import get_vcs_instance, get_backend
32 from rhodecode.lib.vcs.backends import get_vcs_instance, get_backend
33 from rhodecode.lib.vcs.exceptions import (
33 from rhodecode.lib.vcs.exceptions import (
34 VCSError, RepositoryError, CommitError, VCSCommunicationError)
34 VCSError, RepositoryError, CommitError, VCSCommunicationError)
35
35
36 VERSION = (0, 5, 0, 'dev')
36 VERSION = (0, 5, 0, 'dev')
37
37
38 __version__ = '.'.join((str(each) for each in VERSION[:4]))
38 __version__ = '.'.join((str(each) for each in VERSION[:4]))
39
39
40 __all__ = [
40 __all__ = [
41 'get_version', 'get_vcs_instance', 'get_backend',
41 'get_version', 'get_vcs_instance', 'get_backend',
42 'VCSError', 'RepositoryError', 'CommitError', 'VCSCommunicationError'
42 'VCSError', 'RepositoryError', 'CommitError', 'VCSCommunicationError'
43 ]
43 ]
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47 # The pycurl library directly accesses C API functions and is not patched by
47 # The pycurl library directly accesses C API functions and is not patched by
48 # gevent. This will potentially lead to deadlocks due to incompatibility to
48 # gevent. This will potentially lead to deadlocks due to incompatibility to
49 # gevent. Therefore we check if gevent is active and import a gevent compatible
49 # gevent. Therefore we check if gevent is active and import a gevent compatible
50 # wrapper in that case.
50 # wrapper in that case.
51 try:
51 try:
52 from gevent import monkey
52 from gevent import monkey
53 if monkey.is_module_patched('__builtin__'):
53 if monkey.is_module_patched('__builtin__'):
54 import geventcurl as pycurl
54 import geventcurl as pycurl
55 log.debug('Using gevent comapatible pycurl: %s', pycurl)
55 log.debug('Using gevent comapatible pycurl: %s', pycurl)
56 else:
56 else:
57 import pycurl
57 import pycurl
58 except ImportError:
58 except ImportError:
59 import pycurl
59 import pycurl
60
60
61
61
62 def get_version():
62 def get_version():
63 """
63 """
64 Returns shorter version (digit parts only) as string.
64 Returns shorter version (digit parts only) as string.
65 """
65 """
66 return '.'.join((str(each) for each in VERSION[:3]))
66 return '.'.join((str(each) for each in VERSION[:3]))
67
67
68
68
69 def connect_http(server_and_port):
69 def connect_http(server_and_port):
70 from rhodecode.lib.vcs import connection, client_http
70 from rhodecode.lib.vcs import connection, client_http
71 from rhodecode.lib.middleware.utils import scm_app
71 from rhodecode.lib.middleware.utils import scm_app
72
72
73 session_factory = client_http.ThreadlocalSessionFactory()
73 session_factory = client_http.ThreadlocalSessionFactory()
74
74
75 connection.Git = client_http.RepoMaker(
75 connection.Git = client_http.RemoteVCSMaker(
76 server_and_port, '/git', 'git', session_factory)
76 server_and_port, '/git', 'git', session_factory)
77 connection.Hg = client_http.RepoMaker(
77 connection.Hg = client_http.RemoteVCSMaker(
78 server_and_port, '/hg', 'hg', session_factory)
78 server_and_port, '/hg', 'hg', session_factory)
79 connection.Svn = client_http.RepoMaker(
79 connection.Svn = client_http.RemoteVCSMaker(
80 server_and_port, '/svn', 'svn', session_factory)
80 server_and_port, '/svn', 'svn', session_factory)
81 connection.Service = client_http.ServiceConnection(
81 connection.Service = client_http.ServiceConnection(
82 server_and_port, '/_service', session_factory)
82 server_and_port, '/_service', session_factory)
83
83
84 scm_app.HG_REMOTE_WSGI = client_http.VcsHttpProxy(
84 scm_app.HG_REMOTE_WSGI = client_http.VcsHttpProxy(
85 server_and_port, '/proxy/hg')
85 server_and_port, '/proxy/hg')
86 scm_app.GIT_REMOTE_WSGI = client_http.VcsHttpProxy(
86 scm_app.GIT_REMOTE_WSGI = client_http.VcsHttpProxy(
87 server_and_port, '/proxy/git')
87 server_and_port, '/proxy/git')
88
88
89 @atexit.register
89 @atexit.register
90 def free_connection_resources():
90 def free_connection_resources():
91 connection.Git = None
91 connection.Git = None
92 connection.Hg = None
92 connection.Hg = None
93 connection.Svn = None
93 connection.Svn = None
94 connection.Service = None
94 connection.Service = None
95
95
96
96
97 def connect_vcs(server_and_port, protocol):
97 def connect_vcs(server_and_port, protocol):
98 """
98 """
99 Initializes the connection to the vcs server.
99 Initializes the connection to the vcs server.
100
100
101 :param server_and_port: str, e.g. "localhost:9900"
101 :param server_and_port: str, e.g. "localhost:9900"
102 :param protocol: str or "http"
102 :param protocol: str or "http"
103 """
103 """
104 if protocol == 'http':
104 if protocol == 'http':
105 connect_http(server_and_port)
105 connect_http(server_and_port)
106 else:
106 else:
107 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
107 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
108
108
109
109
110 def create_vcsserver_proxy(server_and_port, protocol):
111 if protocol == 'http':
112 return _create_vcsserver_proxy_http(server_and_port)
113 else:
114 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
115
116
117 def _create_vcsserver_proxy_http(server_and_port):
118 from rhodecode.lib.vcs import client_http
119
120 session = _create_http_rpc_session()
121 url = urlparse.urljoin('http://%s' % server_and_port, '/server')
122 return client_http.RemoteObject(url, session)
123
124
125 class CurlSession(object):
110 class CurlSession(object):
126 """
111 """
127 Modeled so that it provides a subset of the requests interface.
112 Modeled so that it provides a subset of the requests interface.
128
113
129 This has been created so that it does only provide a minimal API for our
114 This has been created so that it does only provide a minimal API for our
130 needs. The parts which it provides are based on the API of the library
115 needs. The parts which it provides are based on the API of the library
131 `requests` which allows us to easily benchmark against it.
116 `requests` which allows us to easily benchmark against it.
132
117
133 Please have a look at the class :class:`requests.Session` when you extend
118 Please have a look at the class :class:`requests.Session` when you extend
134 it.
119 it.
135 """
120 """
136
121
137 def __init__(self):
122 def __init__(self):
138 curl = pycurl.Curl()
123 curl = pycurl.Curl()
139 # TODO: johbo: I did test with 7.19 of libcurl. This version has
124 # TODO: johbo: I did test with 7.19 of libcurl. This version has
140 # trouble with 100 - continue being set in the expect header. This
125 # trouble with 100 - continue being set in the expect header. This
141 # can lead to massive performance drops, switching it off here.
126 # can lead to massive performance drops, switching it off here.
142 curl.setopt(curl.HTTPHEADER, ["Expect:"])
127 curl.setopt(curl.HTTPHEADER, ["Expect:"])
143 curl.setopt(curl.TCP_NODELAY, True)
128 curl.setopt(curl.TCP_NODELAY, True)
144 curl.setopt(curl.PROTOCOLS, curl.PROTO_HTTP)
129 curl.setopt(curl.PROTOCOLS, curl.PROTO_HTTP)
145 curl.setopt(curl.USERAGENT, 'RhodeCode HTTP {}'.format(rhodecode.__version__))
130 curl.setopt(curl.USERAGENT, 'RhodeCode HTTP {}'.format(rhodecode.__version__))
146 self._curl = curl
131 self._curl = curl
147
132
148 def post(self, url, data, allow_redirects=False):
133 def post(self, url, data, allow_redirects=False):
149 response_buffer = StringIO()
134 response_buffer = StringIO()
150
135
151 curl = self._curl
136 curl = self._curl
152 curl.setopt(curl.URL, url)
137 curl.setopt(curl.URL, url)
153 curl.setopt(curl.POST, True)
138 curl.setopt(curl.POST, True)
154 curl.setopt(curl.POSTFIELDS, data)
139 curl.setopt(curl.POSTFIELDS, data)
155 curl.setopt(curl.FOLLOWLOCATION, allow_redirects)
140 curl.setopt(curl.FOLLOWLOCATION, allow_redirects)
156 curl.setopt(curl.WRITEDATA, response_buffer)
141 curl.setopt(curl.WRITEDATA, response_buffer)
157 curl.perform()
142 curl.perform()
158
143
159 status_code = curl.getinfo(pycurl.HTTP_CODE)
144 status_code = curl.getinfo(pycurl.HTTP_CODE)
160
145
161 return CurlResponse(response_buffer, status_code)
146 return CurlResponse(response_buffer, status_code)
162
147
163
148
164 class CurlResponse(object):
149 class CurlResponse(object):
165 """
150 """
166 The response of a request, modeled after the requests API.
151 The response of a request, modeled after the requests API.
167
152
168 This class provides a subset of the response interface known from the
153 This class provides a subset of the response interface known from the
169 library `requests`. It is intentionally kept similar, so that we can use
154 library `requests`. It is intentionally kept similar, so that we can use
170 `requests` as a drop in replacement for benchmarking purposes.
155 `requests` as a drop in replacement for benchmarking purposes.
171 """
156 """
172
157
173 def __init__(self, response_buffer, status_code):
158 def __init__(self, response_buffer, status_code):
174 self._response_buffer = response_buffer
159 self._response_buffer = response_buffer
175 self._status_code = status_code
160 self._status_code = status_code
176
161
177 @property
162 @property
178 def content(self):
163 def content(self):
179 return self._response_buffer.getvalue()
164 try:
165 return self._response_buffer.getvalue()
166 finally:
167 self._response_buffer.close()
180
168
181 @property
169 @property
182 def status_code(self):
170 def status_code(self):
183 return self._status_code
171 return self._status_code
184
172
173 def iter_content(self, chunk_size):
174 self._response_buffer.seek(0)
175 while 1:
176 chunk = self._response_buffer.read(chunk_size)
177 if not chunk:
178 break
179 yield chunk
180
185
181
186 def _create_http_rpc_session():
182 def _create_http_rpc_session():
187 session = CurlSession()
183 session = CurlSession()
188 return session
184 return session
@@ -1,1881 +1,1890 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Base module for all VCS systems
22 Base module for all VCS systems
23 """
23 """
24 import os
24 import os
25 import re
25 import re
26 import time
26 import time
27 import shutil
27 import shutil
28 import datetime
28 import datetime
29 import fnmatch
29 import fnmatch
30 import itertools
30 import itertools
31 import logging
31 import logging
32 import collections
32 import collections
33 import warnings
33 import warnings
34
34
35 from zope.cachedescriptors.property import Lazy as LazyProperty
35 from zope.cachedescriptors.property import Lazy as LazyProperty
36
36
37 from pyramid import compat
37 from pyramid import compat
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.translation import lazy_ugettext
40 from rhodecode.translation import lazy_ugettext
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 from rhodecode.lib.vcs import connection
42 from rhodecode.lib.vcs import connection
43 from rhodecode.lib.vcs.utils import author_name, author_email
43 from rhodecode.lib.vcs.utils import author_name, author_email
44 from rhodecode.lib.vcs.conf import settings
44 from rhodecode.lib.vcs.conf import settings
45 from rhodecode.lib.vcs.exceptions import (
45 from rhodecode.lib.vcs.exceptions import (
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 RepositoryError)
50 RepositoryError)
51
51
52
52
53 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
54
54
55
55
56 FILEMODE_DEFAULT = 0o100644
56 FILEMODE_DEFAULT = 0o100644
57 FILEMODE_EXECUTABLE = 0o100755
57 FILEMODE_EXECUTABLE = 0o100755
58 EMPTY_COMMIT_ID = '0' * 40
58 EMPTY_COMMIT_ID = '0' * 40
59
59
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61
61
62
62
63 class MergeFailureReason(object):
63 class MergeFailureReason(object):
64 """
64 """
65 Enumeration with all the reasons why the server side merge could fail.
65 Enumeration with all the reasons why the server side merge could fail.
66
66
67 DO NOT change the number of the reasons, as they may be stored in the
67 DO NOT change the number of the reasons, as they may be stored in the
68 database.
68 database.
69
69
70 Changing the name of a reason is acceptable and encouraged to deprecate old
70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 reasons.
71 reasons.
72 """
72 """
73
73
74 # Everything went well.
74 # Everything went well.
75 NONE = 0
75 NONE = 0
76
76
77 # An unexpected exception was raised. Check the logs for more details.
77 # An unexpected exception was raised. Check the logs for more details.
78 UNKNOWN = 1
78 UNKNOWN = 1
79
79
80 # The merge was not successful, there are conflicts.
80 # The merge was not successful, there are conflicts.
81 MERGE_FAILED = 2
81 MERGE_FAILED = 2
82
82
83 # The merge succeeded but we could not push it to the target repository.
83 # The merge succeeded but we could not push it to the target repository.
84 PUSH_FAILED = 3
84 PUSH_FAILED = 3
85
85
86 # The specified target is not a head in the target repository.
86 # The specified target is not a head in the target repository.
87 TARGET_IS_NOT_HEAD = 4
87 TARGET_IS_NOT_HEAD = 4
88
88
89 # The source repository contains more branches than the target. Pushing
89 # The source repository contains more branches than the target. Pushing
90 # the merge will create additional branches in the target.
90 # the merge will create additional branches in the target.
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92
92
93 # The target reference has multiple heads. That does not allow to correctly
93 # The target reference has multiple heads. That does not allow to correctly
94 # identify the target location. This could only happen for mercurial
94 # identify the target location. This could only happen for mercurial
95 # branches.
95 # branches.
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97
97
98 # The target repository is locked
98 # The target repository is locked
99 TARGET_IS_LOCKED = 7
99 TARGET_IS_LOCKED = 7
100
100
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 # A involved commit could not be found.
102 # A involved commit could not be found.
103 _DEPRECATED_MISSING_COMMIT = 8
103 _DEPRECATED_MISSING_COMMIT = 8
104
104
105 # The target repo reference is missing.
105 # The target repo reference is missing.
106 MISSING_TARGET_REF = 9
106 MISSING_TARGET_REF = 9
107
107
108 # The source repo reference is missing.
108 # The source repo reference is missing.
109 MISSING_SOURCE_REF = 10
109 MISSING_SOURCE_REF = 10
110
110
111 # The merge was not successful, there are conflicts related to sub
111 # The merge was not successful, there are conflicts related to sub
112 # repositories.
112 # repositories.
113 SUBREPO_MERGE_FAILED = 11
113 SUBREPO_MERGE_FAILED = 11
114
114
115
115
116 class UpdateFailureReason(object):
116 class UpdateFailureReason(object):
117 """
117 """
118 Enumeration with all the reasons why the pull request update could fail.
118 Enumeration with all the reasons why the pull request update could fail.
119
119
120 DO NOT change the number of the reasons, as they may be stored in the
120 DO NOT change the number of the reasons, as they may be stored in the
121 database.
121 database.
122
122
123 Changing the name of a reason is acceptable and encouraged to deprecate old
123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 reasons.
124 reasons.
125 """
125 """
126
126
127 # Everything went well.
127 # Everything went well.
128 NONE = 0
128 NONE = 0
129
129
130 # An unexpected exception was raised. Check the logs for more details.
130 # An unexpected exception was raised. Check the logs for more details.
131 UNKNOWN = 1
131 UNKNOWN = 1
132
132
133 # The pull request is up to date.
133 # The pull request is up to date.
134 NO_CHANGE = 2
134 NO_CHANGE = 2
135
135
136 # The pull request has a reference type that is not supported for update.
136 # The pull request has a reference type that is not supported for update.
137 WRONG_REF_TYPE = 3
137 WRONG_REF_TYPE = 3
138
138
139 # Update failed because the target reference is missing.
139 # Update failed because the target reference is missing.
140 MISSING_TARGET_REF = 4
140 MISSING_TARGET_REF = 4
141
141
142 # Update failed because the source reference is missing.
142 # Update failed because the source reference is missing.
143 MISSING_SOURCE_REF = 5
143 MISSING_SOURCE_REF = 5
144
144
145
145
146 class MergeResponse(object):
146 class MergeResponse(object):
147
147
148 # uses .format(**metadata) for variables
148 # uses .format(**metadata) for variables
149 MERGE_STATUS_MESSAGES = {
149 MERGE_STATUS_MESSAGES = {
150 MergeFailureReason.NONE: lazy_ugettext(
150 MergeFailureReason.NONE: lazy_ugettext(
151 u'This pull request can be automatically merged.'),
151 u'This pull request can be automatically merged.'),
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 u'This pull request cannot be merged because of an unhandled exception. '
153 u'This pull request cannot be merged because of an unhandled exception. '
154 u'{exception}'),
154 u'{exception}'),
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 u'This pull request cannot be merged because of merge conflicts.'),
156 u'This pull request cannot be merged because of merge conflicts.'),
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 u'This pull request could not be merged because push to '
158 u'This pull request could not be merged because push to '
159 u'target:`{target}@{merge_commit}` failed.'),
159 u'target:`{target}@{merge_commit}` failed.'),
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 u'This pull request cannot be merged because the target '
161 u'This pull request cannot be merged because the target '
162 u'`{target_ref.name}` is not a head.'),
162 u'`{target_ref.name}` is not a head.'),
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 u'This pull request cannot be merged because the source contains '
164 u'This pull request cannot be merged because the source contains '
165 u'more branches than the target.'),
165 u'more branches than the target.'),
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 u'has multiple heads: `{heads}`.'),
168 u'has multiple heads: `{heads}`.'),
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 u'This pull request cannot be merged because the target repository is '
170 u'This pull request cannot be merged because the target repository is '
171 u'locked by {locked_by}.'),
171 u'locked by {locked_by}.'),
172
172
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 u'This pull request cannot be merged because the target '
174 u'This pull request cannot be merged because the target '
175 u'reference `{target_ref.name}` is missing.'),
175 u'reference `{target_ref.name}` is missing.'),
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 u'This pull request cannot be merged because the source '
177 u'This pull request cannot be merged because the source '
178 u'reference `{source_ref.name}` is missing.'),
178 u'reference `{source_ref.name}` is missing.'),
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 u'This pull request cannot be merged because of conflicts related '
180 u'This pull request cannot be merged because of conflicts related '
181 u'to sub repositories.'),
181 u'to sub repositories.'),
182
182
183 # Deprecations
183 # Deprecations
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 u'This pull request cannot be merged because the target or the '
185 u'This pull request cannot be merged because the target or the '
186 u'source reference is missing.'),
186 u'source reference is missing.'),
187
187
188 }
188 }
189
189
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 self.possible = possible
191 self.possible = possible
192 self.executed = executed
192 self.executed = executed
193 self.merge_ref = merge_ref
193 self.merge_ref = merge_ref
194 self.failure_reason = failure_reason
194 self.failure_reason = failure_reason
195 self.metadata = metadata or {}
195 self.metadata = metadata or {}
196
196
197 def __repr__(self):
197 def __repr__(self):
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199
199
200 def __eq__(self, other):
200 def __eq__(self, other):
201 same_instance = isinstance(other, self.__class__)
201 same_instance = isinstance(other, self.__class__)
202 return same_instance \
202 return same_instance \
203 and self.possible == other.possible \
203 and self.possible == other.possible \
204 and self.executed == other.executed \
204 and self.executed == other.executed \
205 and self.failure_reason == other.failure_reason
205 and self.failure_reason == other.failure_reason
206
206
207 @property
207 @property
208 def label(self):
208 def label(self):
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 not k.startswith('_'))
210 not k.startswith('_'))
211 return label_dict.get(self.failure_reason)
211 return label_dict.get(self.failure_reason)
212
212
213 @property
213 @property
214 def merge_status_message(self):
214 def merge_status_message(self):
215 """
215 """
216 Return a human friendly error message for the given merge status code.
216 Return a human friendly error message for the given merge status code.
217 """
217 """
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219 try:
219 try:
220 return msg.format(**self.metadata)
220 return msg.format(**self.metadata)
221 except Exception:
221 except Exception:
222 log.exception('Failed to format %s message', self)
222 log.exception('Failed to format %s message', self)
223 return msg
223 return msg
224
224
225 def asdict(self):
225 def asdict(self):
226 data = {}
226 data = {}
227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 'merge_status_message']:
228 'merge_status_message']:
229 data[k] = getattr(self, k)
229 data[k] = getattr(self, k)
230 return data
230 return data
231
231
232
232
233 class BaseRepository(object):
233 class BaseRepository(object):
234 """
234 """
235 Base Repository for final backends
235 Base Repository for final backends
236
236
237 .. attribute:: DEFAULT_BRANCH_NAME
237 .. attribute:: DEFAULT_BRANCH_NAME
238
238
239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240
240
241 .. attribute:: commit_ids
241 .. attribute:: commit_ids
242
242
243 list of all available commit ids, in ascending order
243 list of all available commit ids, in ascending order
244
244
245 .. attribute:: path
245 .. attribute:: path
246
246
247 absolute path to the repository
247 absolute path to the repository
248
248
249 .. attribute:: bookmarks
249 .. attribute:: bookmarks
250
250
251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 there are no bookmarks or the backend implementation does not support
252 there are no bookmarks or the backend implementation does not support
253 bookmarks.
253 bookmarks.
254
254
255 .. attribute:: tags
255 .. attribute:: tags
256
256
257 Mapping from name to :term:`Commit ID` of the tag.
257 Mapping from name to :term:`Commit ID` of the tag.
258
258
259 """
259 """
260
260
261 DEFAULT_BRANCH_NAME = None
261 DEFAULT_BRANCH_NAME = None
262 DEFAULT_CONTACT = u"Unknown"
262 DEFAULT_CONTACT = u"Unknown"
263 DEFAULT_DESCRIPTION = u"unknown"
263 DEFAULT_DESCRIPTION = u"unknown"
264 EMPTY_COMMIT_ID = '0' * 40
264 EMPTY_COMMIT_ID = '0' * 40
265
265
266 path = None
266 path = None
267
267
268 _is_empty = None
268 _is_empty = None
269 _commit_ids = {}
269 _commit_ids = {}
270
270
271 def __init__(self, repo_path, config=None, create=False, **kwargs):
271 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 """
272 """
273 Initializes repository. Raises RepositoryError if repository could
273 Initializes repository. Raises RepositoryError if repository could
274 not be find at the given ``repo_path`` or directory at ``repo_path``
274 not be find at the given ``repo_path`` or directory at ``repo_path``
275 exists and ``create`` is set to True.
275 exists and ``create`` is set to True.
276
276
277 :param repo_path: local path of the repository
277 :param repo_path: local path of the repository
278 :param config: repository configuration
278 :param config: repository configuration
279 :param create=False: if set to True, would try to create repository.
279 :param create=False: if set to True, would try to create repository.
280 :param src_url=None: if set, should be proper url from which repository
280 :param src_url=None: if set, should be proper url from which repository
281 would be cloned; requires ``create`` parameter to be set to True -
281 would be cloned; requires ``create`` parameter to be set to True -
282 raises RepositoryError if src_url is set and create evaluates to
282 raises RepositoryError if src_url is set and create evaluates to
283 False
283 False
284 """
284 """
285 raise NotImplementedError
285 raise NotImplementedError
286
286
287 def __repr__(self):
287 def __repr__(self):
288 return '<%s at %s>' % (self.__class__.__name__, self.path)
288 return '<%s at %s>' % (self.__class__.__name__, self.path)
289
289
290 def __len__(self):
290 def __len__(self):
291 return self.count()
291 return self.count()
292
292
293 def __eq__(self, other):
293 def __eq__(self, other):
294 same_instance = isinstance(other, self.__class__)
294 same_instance = isinstance(other, self.__class__)
295 return same_instance and other.path == self.path
295 return same_instance and other.path == self.path
296
296
297 def __ne__(self, other):
297 def __ne__(self, other):
298 return not self.__eq__(other)
298 return not self.__eq__(other)
299
299
300 def get_create_shadow_cache_pr_path(self, db_repo):
300 def get_create_shadow_cache_pr_path(self, db_repo):
301 path = db_repo.cached_diffs_dir
301 path = db_repo.cached_diffs_dir
302 if not os.path.exists(path):
302 if not os.path.exists(path):
303 os.makedirs(path, 0o755)
303 os.makedirs(path, 0o755)
304 return path
304 return path
305
305
306 @classmethod
306 @classmethod
307 def get_default_config(cls, default=None):
307 def get_default_config(cls, default=None):
308 config = Config()
308 config = Config()
309 if default and isinstance(default, list):
309 if default and isinstance(default, list):
310 for section, key, val in default:
310 for section, key, val in default:
311 config.set(section, key, val)
311 config.set(section, key, val)
312 return config
312 return config
313
313
314 @LazyProperty
314 @LazyProperty
315 def _remote(self):
315 def _remote(self):
316 raise NotImplementedError
316 raise NotImplementedError
317
317
318 def _heads(self, branch=None):
318 def _heads(self, branch=None):
319 return []
319 return []
320
320
321 @LazyProperty
321 @LazyProperty
322 def EMPTY_COMMIT(self):
322 def EMPTY_COMMIT(self):
323 return EmptyCommit(self.EMPTY_COMMIT_ID)
323 return EmptyCommit(self.EMPTY_COMMIT_ID)
324
324
325 @LazyProperty
325 @LazyProperty
326 def alias(self):
326 def alias(self):
327 for k, v in settings.BACKENDS.items():
327 for k, v in settings.BACKENDS.items():
328 if v.split('.')[-1] == str(self.__class__.__name__):
328 if v.split('.')[-1] == str(self.__class__.__name__):
329 return k
329 return k
330
330
331 @LazyProperty
331 @LazyProperty
332 def name(self):
332 def name(self):
333 return safe_unicode(os.path.basename(self.path))
333 return safe_unicode(os.path.basename(self.path))
334
334
335 @LazyProperty
335 @LazyProperty
336 def description(self):
336 def description(self):
337 raise NotImplementedError
337 raise NotImplementedError
338
338
339 def refs(self):
339 def refs(self):
340 """
340 """
341 returns a `dict` with branches, bookmarks, tags, and closed_branches
341 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 for this repository
342 for this repository
343 """
343 """
344 return dict(
344 return dict(
345 branches=self.branches,
345 branches=self.branches,
346 branches_closed=self.branches_closed,
346 branches_closed=self.branches_closed,
347 tags=self.tags,
347 tags=self.tags,
348 bookmarks=self.bookmarks
348 bookmarks=self.bookmarks
349 )
349 )
350
350
351 @LazyProperty
351 @LazyProperty
352 def branches(self):
352 def branches(self):
353 """
353 """
354 A `dict` which maps branch names to commit ids.
354 A `dict` which maps branch names to commit ids.
355 """
355 """
356 raise NotImplementedError
356 raise NotImplementedError
357
357
358 @LazyProperty
358 @LazyProperty
359 def branches_closed(self):
359 def branches_closed(self):
360 """
360 """
361 A `dict` which maps tags names to commit ids.
361 A `dict` which maps tags names to commit ids.
362 """
362 """
363 raise NotImplementedError
363 raise NotImplementedError
364
364
365 @LazyProperty
365 @LazyProperty
366 def bookmarks(self):
366 def bookmarks(self):
367 """
367 """
368 A `dict` which maps tags names to commit ids.
368 A `dict` which maps tags names to commit ids.
369 """
369 """
370 raise NotImplementedError
370 raise NotImplementedError
371
371
372 @LazyProperty
372 @LazyProperty
373 def tags(self):
373 def tags(self):
374 """
374 """
375 A `dict` which maps tags names to commit ids.
375 A `dict` which maps tags names to commit ids.
376 """
376 """
377 raise NotImplementedError
377 raise NotImplementedError
378
378
379 @LazyProperty
379 @LazyProperty
380 def size(self):
380 def size(self):
381 """
381 """
382 Returns combined size in bytes for all repository files
382 Returns combined size in bytes for all repository files
383 """
383 """
384 tip = self.get_commit()
384 tip = self.get_commit()
385 return tip.size
385 return tip.size
386
386
387 def size_at_commit(self, commit_id):
387 def size_at_commit(self, commit_id):
388 commit = self.get_commit(commit_id)
388 commit = self.get_commit(commit_id)
389 return commit.size
389 return commit.size
390
390
391 def _check_for_empty(self):
391 def _check_for_empty(self):
392 no_commits = len(self._commit_ids) == 0
392 no_commits = len(self._commit_ids) == 0
393 if no_commits:
393 if no_commits:
394 # check on remote to be sure
394 # check on remote to be sure
395 return self._remote.is_empty()
395 return self._remote.is_empty()
396 else:
396 else:
397 return False
397 return False
398
398
399 def is_empty(self):
399 def is_empty(self):
400 if rhodecode.is_test:
400 if rhodecode.is_test:
401 return self._check_for_empty()
401 return self._check_for_empty()
402
402
403 if self._is_empty is None:
403 if self._is_empty is None:
404 # cache empty for production, but not tests
404 # cache empty for production, but not tests
405 self._is_empty = self._check_for_empty()
405 self._is_empty = self._check_for_empty()
406
406
407 return self._is_empty
407 return self._is_empty
408
408
409 @staticmethod
409 @staticmethod
410 def check_url(url, config):
410 def check_url(url, config):
411 """
411 """
412 Function will check given url and try to verify if it's a valid
412 Function will check given url and try to verify if it's a valid
413 link.
413 link.
414 """
414 """
415 raise NotImplementedError
415 raise NotImplementedError
416
416
417 @staticmethod
417 @staticmethod
418 def is_valid_repository(path):
418 def is_valid_repository(path):
419 """
419 """
420 Check if given `path` contains a valid repository of this backend
420 Check if given `path` contains a valid repository of this backend
421 """
421 """
422 raise NotImplementedError
422 raise NotImplementedError
423
423
424 # ==========================================================================
424 # ==========================================================================
425 # COMMITS
425 # COMMITS
426 # ==========================================================================
426 # ==========================================================================
427
427
428 @CachedProperty
428 @CachedProperty
429 def commit_ids(self):
429 def commit_ids(self):
430 raise NotImplementedError
430 raise NotImplementedError
431
431
432 def append_commit_id(self, commit_id):
432 def append_commit_id(self, commit_id):
433 if commit_id not in self.commit_ids:
433 if commit_id not in self.commit_ids:
434 self._rebuild_cache(self.commit_ids + [commit_id])
434 self._rebuild_cache(self.commit_ids + [commit_id])
435
435
436 # clear cache
436 # clear cache
437 self._invalidate_prop_cache('commit_ids')
437 self._invalidate_prop_cache('commit_ids')
438 self._is_empty = False
438 self._is_empty = False
439
439
440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
441 """
441 """
442 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
442 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
443 are both None, most recent commit is returned.
443 are both None, most recent commit is returned.
444
444
445 :param pre_load: Optional. List of commit attributes to load.
445 :param pre_load: Optional. List of commit attributes to load.
446
446
447 :raises ``EmptyRepositoryError``: if there are no commits
447 :raises ``EmptyRepositoryError``: if there are no commits
448 """
448 """
449 raise NotImplementedError
449 raise NotImplementedError
450
450
451 def __iter__(self):
451 def __iter__(self):
452 for commit_id in self.commit_ids:
452 for commit_id in self.commit_ids:
453 yield self.get_commit(commit_id=commit_id)
453 yield self.get_commit(commit_id=commit_id)
454
454
455 def get_commits(
455 def get_commits(
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
456 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
458 """
458 """
459 Returns iterator of `BaseCommit` objects from start to end
459 Returns iterator of `BaseCommit` objects from start to end
460 not inclusive. This should behave just like a list, ie. end is not
460 not inclusive. This should behave just like a list, ie. end is not
461 inclusive.
461 inclusive.
462
462
463 :param start_id: None or str, must be a valid commit id
463 :param start_id: None or str, must be a valid commit id
464 :param end_id: None or str, must be a valid commit id
464 :param end_id: None or str, must be a valid commit id
465 :param start_date:
465 :param start_date:
466 :param end_date:
466 :param end_date:
467 :param branch_name:
467 :param branch_name:
468 :param show_hidden:
468 :param show_hidden:
469 :param pre_load:
469 :param pre_load:
470 :param translate_tags:
470 :param translate_tags:
471 """
471 """
472 raise NotImplementedError
472 raise NotImplementedError
473
473
474 def __getitem__(self, key):
474 def __getitem__(self, key):
475 """
475 """
476 Allows index based access to the commit objects of this repository.
476 Allows index based access to the commit objects of this repository.
477 """
477 """
478 pre_load = ["author", "branch", "date", "message", "parents"]
478 pre_load = ["author", "branch", "date", "message", "parents"]
479 if isinstance(key, slice):
479 if isinstance(key, slice):
480 return self._get_range(key, pre_load)
480 return self._get_range(key, pre_load)
481 return self.get_commit(commit_idx=key, pre_load=pre_load)
481 return self.get_commit(commit_idx=key, pre_load=pre_load)
482
482
483 def _get_range(self, slice_obj, pre_load):
483 def _get_range(self, slice_obj, pre_load):
484 for commit_id in self.commit_ids.__getitem__(slice_obj):
484 for commit_id in self.commit_ids.__getitem__(slice_obj):
485 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
485 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
486
486
487 def count(self):
487 def count(self):
488 return len(self.commit_ids)
488 return len(self.commit_ids)
489
489
490 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
490 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
491 """
491 """
492 Creates and returns a tag for the given ``commit_id``.
492 Creates and returns a tag for the given ``commit_id``.
493
493
494 :param name: name for new tag
494 :param name: name for new tag
495 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
495 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
496 :param commit_id: commit id for which new tag would be created
496 :param commit_id: commit id for which new tag would be created
497 :param message: message of the tag's commit
497 :param message: message of the tag's commit
498 :param date: date of tag's commit
498 :param date: date of tag's commit
499
499
500 :raises TagAlreadyExistError: if tag with same name already exists
500 :raises TagAlreadyExistError: if tag with same name already exists
501 """
501 """
502 raise NotImplementedError
502 raise NotImplementedError
503
503
504 def remove_tag(self, name, user, message=None, date=None):
504 def remove_tag(self, name, user, message=None, date=None):
505 """
505 """
506 Removes tag with the given ``name``.
506 Removes tag with the given ``name``.
507
507
508 :param name: name of the tag to be removed
508 :param name: name of the tag to be removed
509 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
509 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
510 :param message: message of the tag's removal commit
510 :param message: message of the tag's removal commit
511 :param date: date of tag's removal commit
511 :param date: date of tag's removal commit
512
512
513 :raises TagDoesNotExistError: if tag with given name does not exists
513 :raises TagDoesNotExistError: if tag with given name does not exists
514 """
514 """
515 raise NotImplementedError
515 raise NotImplementedError
516
516
517 def get_diff(
517 def get_diff(
518 self, commit1, commit2, path=None, ignore_whitespace=False,
518 self, commit1, commit2, path=None, ignore_whitespace=False,
519 context=3, path1=None):
519 context=3, path1=None):
520 """
520 """
521 Returns (git like) *diff*, as plain text. Shows changes introduced by
521 Returns (git like) *diff*, as plain text. Shows changes introduced by
522 `commit2` since `commit1`.
522 `commit2` since `commit1`.
523
523
524 :param commit1: Entry point from which diff is shown. Can be
524 :param commit1: Entry point from which diff is shown. Can be
525 ``self.EMPTY_COMMIT`` - in this case, patch showing all
525 ``self.EMPTY_COMMIT`` - in this case, patch showing all
526 the changes since empty state of the repository until `commit2`
526 the changes since empty state of the repository until `commit2`
527 :param commit2: Until which commit changes should be shown.
527 :param commit2: Until which commit changes should be shown.
528 :param path: Can be set to a path of a file to create a diff of that
528 :param path: Can be set to a path of a file to create a diff of that
529 file. If `path1` is also set, this value is only associated to
529 file. If `path1` is also set, this value is only associated to
530 `commit2`.
530 `commit2`.
531 :param ignore_whitespace: If set to ``True``, would not show whitespace
531 :param ignore_whitespace: If set to ``True``, would not show whitespace
532 changes. Defaults to ``False``.
532 changes. Defaults to ``False``.
533 :param context: How many lines before/after changed lines should be
533 :param context: How many lines before/after changed lines should be
534 shown. Defaults to ``3``.
534 shown. Defaults to ``3``.
535 :param path1: Can be set to a path to associate with `commit1`. This
535 :param path1: Can be set to a path to associate with `commit1`. This
536 parameter works only for backends which support diff generation for
536 parameter works only for backends which support diff generation for
537 different paths. Other backends will raise a `ValueError` if `path1`
537 different paths. Other backends will raise a `ValueError` if `path1`
538 is set and has a different value than `path`.
538 is set and has a different value than `path`.
539 :param file_path: filter this diff by given path pattern
539 :param file_path: filter this diff by given path pattern
540 """
540 """
541 raise NotImplementedError
541 raise NotImplementedError
542
542
543 def strip(self, commit_id, branch=None):
543 def strip(self, commit_id, branch=None):
544 """
544 """
545 Strip given commit_id from the repository
545 Strip given commit_id from the repository
546 """
546 """
547 raise NotImplementedError
547 raise NotImplementedError
548
548
549 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
549 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
550 """
550 """
551 Return a latest common ancestor commit if one exists for this repo
551 Return a latest common ancestor commit if one exists for this repo
552 `commit_id1` vs `commit_id2` from `repo2`.
552 `commit_id1` vs `commit_id2` from `repo2`.
553
553
554 :param commit_id1: Commit it from this repository to use as a
554 :param commit_id1: Commit it from this repository to use as a
555 target for the comparison.
555 target for the comparison.
556 :param commit_id2: Source commit id to use for comparison.
556 :param commit_id2: Source commit id to use for comparison.
557 :param repo2: Source repository to use for comparison.
557 :param repo2: Source repository to use for comparison.
558 """
558 """
559 raise NotImplementedError
559 raise NotImplementedError
560
560
561 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
561 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
562 """
562 """
563 Compare this repository's revision `commit_id1` with `commit_id2`.
563 Compare this repository's revision `commit_id1` with `commit_id2`.
564
564
565 Returns a tuple(commits, ancestor) that would be merged from
565 Returns a tuple(commits, ancestor) that would be merged from
566 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
566 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
567 will be returned as ancestor.
567 will be returned as ancestor.
568
568
569 :param commit_id1: Commit it from this repository to use as a
569 :param commit_id1: Commit it from this repository to use as a
570 target for the comparison.
570 target for the comparison.
571 :param commit_id2: Source commit id to use for comparison.
571 :param commit_id2: Source commit id to use for comparison.
572 :param repo2: Source repository to use for comparison.
572 :param repo2: Source repository to use for comparison.
573 :param merge: If set to ``True`` will do a merge compare which also
573 :param merge: If set to ``True`` will do a merge compare which also
574 returns the common ancestor.
574 returns the common ancestor.
575 :param pre_load: Optional. List of commit attributes to load.
575 :param pre_load: Optional. List of commit attributes to load.
576 """
576 """
577 raise NotImplementedError
577 raise NotImplementedError
578
578
579 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
579 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
580 user_name='', user_email='', message='', dry_run=False,
580 user_name='', user_email='', message='', dry_run=False,
581 use_rebase=False, close_branch=False):
581 use_rebase=False, close_branch=False):
582 """
582 """
583 Merge the revisions specified in `source_ref` from `source_repo`
583 Merge the revisions specified in `source_ref` from `source_repo`
584 onto the `target_ref` of this repository.
584 onto the `target_ref` of this repository.
585
585
586 `source_ref` and `target_ref` are named tupls with the following
586 `source_ref` and `target_ref` are named tupls with the following
587 fields `type`, `name` and `commit_id`.
587 fields `type`, `name` and `commit_id`.
588
588
589 Returns a MergeResponse named tuple with the following fields
589 Returns a MergeResponse named tuple with the following fields
590 'possible', 'executed', 'source_commit', 'target_commit',
590 'possible', 'executed', 'source_commit', 'target_commit',
591 'merge_commit'.
591 'merge_commit'.
592
592
593 :param repo_id: `repo_id` target repo id.
593 :param repo_id: `repo_id` target repo id.
594 :param workspace_id: `workspace_id` unique identifier.
594 :param workspace_id: `workspace_id` unique identifier.
595 :param target_ref: `target_ref` points to the commit on top of which
595 :param target_ref: `target_ref` points to the commit on top of which
596 the `source_ref` should be merged.
596 the `source_ref` should be merged.
597 :param source_repo: The repository that contains the commits to be
597 :param source_repo: The repository that contains the commits to be
598 merged.
598 merged.
599 :param source_ref: `source_ref` points to the topmost commit from
599 :param source_ref: `source_ref` points to the topmost commit from
600 the `source_repo` which should be merged.
600 the `source_repo` which should be merged.
601 :param user_name: Merge commit `user_name`.
601 :param user_name: Merge commit `user_name`.
602 :param user_email: Merge commit `user_email`.
602 :param user_email: Merge commit `user_email`.
603 :param message: Merge commit `message`.
603 :param message: Merge commit `message`.
604 :param dry_run: If `True` the merge will not take place.
604 :param dry_run: If `True` the merge will not take place.
605 :param use_rebase: If `True` commits from the source will be rebased
605 :param use_rebase: If `True` commits from the source will be rebased
606 on top of the target instead of being merged.
606 on top of the target instead of being merged.
607 :param close_branch: If `True` branch will be close before merging it
607 :param close_branch: If `True` branch will be close before merging it
608 """
608 """
609 if dry_run:
609 if dry_run:
610 message = message or settings.MERGE_DRY_RUN_MESSAGE
610 message = message or settings.MERGE_DRY_RUN_MESSAGE
611 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
611 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
612 user_name = user_name or settings.MERGE_DRY_RUN_USER
612 user_name = user_name or settings.MERGE_DRY_RUN_USER
613 else:
613 else:
614 if not user_name:
614 if not user_name:
615 raise ValueError('user_name cannot be empty')
615 raise ValueError('user_name cannot be empty')
616 if not user_email:
616 if not user_email:
617 raise ValueError('user_email cannot be empty')
617 raise ValueError('user_email cannot be empty')
618 if not message:
618 if not message:
619 raise ValueError('message cannot be empty')
619 raise ValueError('message cannot be empty')
620
620
621 try:
621 try:
622 return self._merge_repo(
622 return self._merge_repo(
623 repo_id, workspace_id, target_ref, source_repo,
623 repo_id, workspace_id, target_ref, source_repo,
624 source_ref, message, user_name, user_email, dry_run=dry_run,
624 source_ref, message, user_name, user_email, dry_run=dry_run,
625 use_rebase=use_rebase, close_branch=close_branch)
625 use_rebase=use_rebase, close_branch=close_branch)
626 except RepositoryError as exc:
626 except RepositoryError as exc:
627 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
627 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
628 return MergeResponse(
628 return MergeResponse(
629 False, False, None, MergeFailureReason.UNKNOWN,
629 False, False, None, MergeFailureReason.UNKNOWN,
630 metadata={'exception': str(exc)})
630 metadata={'exception': str(exc)})
631
631
632 def _merge_repo(self, repo_id, workspace_id, target_ref,
632 def _merge_repo(self, repo_id, workspace_id, target_ref,
633 source_repo, source_ref, merge_message,
633 source_repo, source_ref, merge_message,
634 merger_name, merger_email, dry_run=False,
634 merger_name, merger_email, dry_run=False,
635 use_rebase=False, close_branch=False):
635 use_rebase=False, close_branch=False):
636 """Internal implementation of merge."""
636 """Internal implementation of merge."""
637 raise NotImplementedError
637 raise NotImplementedError
638
638
639 def _maybe_prepare_merge_workspace(
639 def _maybe_prepare_merge_workspace(
640 self, repo_id, workspace_id, target_ref, source_ref):
640 self, repo_id, workspace_id, target_ref, source_ref):
641 """
641 """
642 Create the merge workspace.
642 Create the merge workspace.
643
643
644 :param workspace_id: `workspace_id` unique identifier.
644 :param workspace_id: `workspace_id` unique identifier.
645 """
645 """
646 raise NotImplementedError
646 raise NotImplementedError
647
647
648 def _get_legacy_shadow_repository_path(self, workspace_id):
648 def _get_legacy_shadow_repository_path(self, workspace_id):
649 """
649 """
650 Legacy version that was used before. We still need it for
650 Legacy version that was used before. We still need it for
651 backward compat
651 backward compat
652 """
652 """
653 return os.path.join(
653 return os.path.join(
654 os.path.dirname(self.path),
654 os.path.dirname(self.path),
655 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
655 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
656
656
657 def _get_shadow_repository_path(self, repo_id, workspace_id):
657 def _get_shadow_repository_path(self, repo_id, workspace_id):
658 # The name of the shadow repository must start with '.', so it is
658 # The name of the shadow repository must start with '.', so it is
659 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
659 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
660 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
660 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
661 if os.path.exists(legacy_repository_path):
661 if os.path.exists(legacy_repository_path):
662 return legacy_repository_path
662 return legacy_repository_path
663 else:
663 else:
664 return os.path.join(
664 return os.path.join(
665 os.path.dirname(self.path),
665 os.path.dirname(self.path),
666 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
666 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
667
667
668 def cleanup_merge_workspace(self, repo_id, workspace_id):
668 def cleanup_merge_workspace(self, repo_id, workspace_id):
669 """
669 """
670 Remove merge workspace.
670 Remove merge workspace.
671
671
672 This function MUST not fail in case there is no workspace associated to
672 This function MUST not fail in case there is no workspace associated to
673 the given `workspace_id`.
673 the given `workspace_id`.
674
674
675 :param workspace_id: `workspace_id` unique identifier.
675 :param workspace_id: `workspace_id` unique identifier.
676 """
676 """
677 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
677 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
678 shadow_repository_path_del = '{}.{}.delete'.format(
678 shadow_repository_path_del = '{}.{}.delete'.format(
679 shadow_repository_path, time.time())
679 shadow_repository_path, time.time())
680
680
681 # move the shadow repo, so it never conflicts with the one used.
681 # move the shadow repo, so it never conflicts with the one used.
682 # we use this method because shutil.rmtree had some edge case problems
682 # we use this method because shutil.rmtree had some edge case problems
683 # removing symlinked repositories
683 # removing symlinked repositories
684 if not os.path.isdir(shadow_repository_path):
684 if not os.path.isdir(shadow_repository_path):
685 return
685 return
686
686
687 shutil.move(shadow_repository_path, shadow_repository_path_del)
687 shutil.move(shadow_repository_path, shadow_repository_path_del)
688 try:
688 try:
689 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
689 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
690 except Exception:
690 except Exception:
691 log.exception('Failed to gracefully remove shadow repo under %s',
691 log.exception('Failed to gracefully remove shadow repo under %s',
692 shadow_repository_path_del)
692 shadow_repository_path_del)
693 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
693 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
694
694
695 # ========== #
695 # ========== #
696 # COMMIT API #
696 # COMMIT API #
697 # ========== #
697 # ========== #
698
698
699 @LazyProperty
699 @LazyProperty
700 def in_memory_commit(self):
700 def in_memory_commit(self):
701 """
701 """
702 Returns :class:`InMemoryCommit` object for this repository.
702 Returns :class:`InMemoryCommit` object for this repository.
703 """
703 """
704 raise NotImplementedError
704 raise NotImplementedError
705
705
706 # ======================== #
706 # ======================== #
707 # UTILITIES FOR SUBCLASSES #
707 # UTILITIES FOR SUBCLASSES #
708 # ======================== #
708 # ======================== #
709
709
710 def _validate_diff_commits(self, commit1, commit2):
710 def _validate_diff_commits(self, commit1, commit2):
711 """
711 """
712 Validates that the given commits are related to this repository.
712 Validates that the given commits are related to this repository.
713
713
714 Intended as a utility for sub classes to have a consistent validation
714 Intended as a utility for sub classes to have a consistent validation
715 of input parameters in methods like :meth:`get_diff`.
715 of input parameters in methods like :meth:`get_diff`.
716 """
716 """
717 self._validate_commit(commit1)
717 self._validate_commit(commit1)
718 self._validate_commit(commit2)
718 self._validate_commit(commit2)
719 if (isinstance(commit1, EmptyCommit) and
719 if (isinstance(commit1, EmptyCommit) and
720 isinstance(commit2, EmptyCommit)):
720 isinstance(commit2, EmptyCommit)):
721 raise ValueError("Cannot compare two empty commits")
721 raise ValueError("Cannot compare two empty commits")
722
722
723 def _validate_commit(self, commit):
723 def _validate_commit(self, commit):
724 if not isinstance(commit, BaseCommit):
724 if not isinstance(commit, BaseCommit):
725 raise TypeError(
725 raise TypeError(
726 "%s is not of type BaseCommit" % repr(commit))
726 "%s is not of type BaseCommit" % repr(commit))
727 if commit.repository != self and not isinstance(commit, EmptyCommit):
727 if commit.repository != self and not isinstance(commit, EmptyCommit):
728 raise ValueError(
728 raise ValueError(
729 "Commit %s must be a valid commit from this repository %s, "
729 "Commit %s must be a valid commit from this repository %s, "
730 "related to this repository instead %s." %
730 "related to this repository instead %s." %
731 (commit, self, commit.repository))
731 (commit, self, commit.repository))
732
732
733 def _validate_commit_id(self, commit_id):
733 def _validate_commit_id(self, commit_id):
734 if not isinstance(commit_id, compat.string_types):
734 if not isinstance(commit_id, compat.string_types):
735 raise TypeError("commit_id must be a string value")
735 raise TypeError("commit_id must be a string value")
736
736
737 def _validate_commit_idx(self, commit_idx):
737 def _validate_commit_idx(self, commit_idx):
738 if not isinstance(commit_idx, (int, long)):
738 if not isinstance(commit_idx, (int, long)):
739 raise TypeError("commit_idx must be a numeric value")
739 raise TypeError("commit_idx must be a numeric value")
740
740
741 def _validate_branch_name(self, branch_name):
741 def _validate_branch_name(self, branch_name):
742 if branch_name and branch_name not in self.branches_all:
742 if branch_name and branch_name not in self.branches_all:
743 msg = ("Branch %s not found in %s" % (branch_name, self))
743 msg = ("Branch %s not found in %s" % (branch_name, self))
744 raise BranchDoesNotExistError(msg)
744 raise BranchDoesNotExistError(msg)
745
745
746 #
746 #
747 # Supporting deprecated API parts
747 # Supporting deprecated API parts
748 # TODO: johbo: consider to move this into a mixin
748 # TODO: johbo: consider to move this into a mixin
749 #
749 #
750
750
751 @property
751 @property
752 def EMPTY_CHANGESET(self):
752 def EMPTY_CHANGESET(self):
753 warnings.warn(
753 warnings.warn(
754 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
754 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
755 return self.EMPTY_COMMIT_ID
755 return self.EMPTY_COMMIT_ID
756
756
757 @property
757 @property
758 def revisions(self):
758 def revisions(self):
759 warnings.warn("Use commits attribute instead", DeprecationWarning)
759 warnings.warn("Use commits attribute instead", DeprecationWarning)
760 return self.commit_ids
760 return self.commit_ids
761
761
762 @revisions.setter
762 @revisions.setter
763 def revisions(self, value):
763 def revisions(self, value):
764 warnings.warn("Use commits attribute instead", DeprecationWarning)
764 warnings.warn("Use commits attribute instead", DeprecationWarning)
765 self.commit_ids = value
765 self.commit_ids = value
766
766
767 def get_changeset(self, revision=None, pre_load=None):
767 def get_changeset(self, revision=None, pre_load=None):
768 warnings.warn("Use get_commit instead", DeprecationWarning)
768 warnings.warn("Use get_commit instead", DeprecationWarning)
769 commit_id = None
769 commit_id = None
770 commit_idx = None
770 commit_idx = None
771 if isinstance(revision, compat.string_types):
771 if isinstance(revision, compat.string_types):
772 commit_id = revision
772 commit_id = revision
773 else:
773 else:
774 commit_idx = revision
774 commit_idx = revision
775 return self.get_commit(
775 return self.get_commit(
776 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
776 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
777
777
778 def get_changesets(
778 def get_changesets(
779 self, start=None, end=None, start_date=None, end_date=None,
779 self, start=None, end=None, start_date=None, end_date=None,
780 branch_name=None, pre_load=None):
780 branch_name=None, pre_load=None):
781 warnings.warn("Use get_commits instead", DeprecationWarning)
781 warnings.warn("Use get_commits instead", DeprecationWarning)
782 start_id = self._revision_to_commit(start)
782 start_id = self._revision_to_commit(start)
783 end_id = self._revision_to_commit(end)
783 end_id = self._revision_to_commit(end)
784 return self.get_commits(
784 return self.get_commits(
785 start_id=start_id, end_id=end_id, start_date=start_date,
785 start_id=start_id, end_id=end_id, start_date=start_date,
786 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
786 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
787
787
788 def _revision_to_commit(self, revision):
788 def _revision_to_commit(self, revision):
789 """
789 """
790 Translates a revision to a commit_id
790 Translates a revision to a commit_id
791
791
792 Helps to support the old changeset based API which allows to use
792 Helps to support the old changeset based API which allows to use
793 commit ids and commit indices interchangeable.
793 commit ids and commit indices interchangeable.
794 """
794 """
795 if revision is None:
795 if revision is None:
796 return revision
796 return revision
797
797
798 if isinstance(revision, compat.string_types):
798 if isinstance(revision, compat.string_types):
799 commit_id = revision
799 commit_id = revision
800 else:
800 else:
801 commit_id = self.commit_ids[revision]
801 commit_id = self.commit_ids[revision]
802 return commit_id
802 return commit_id
803
803
804 @property
804 @property
805 def in_memory_changeset(self):
805 def in_memory_changeset(self):
806 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
806 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
807 return self.in_memory_commit
807 return self.in_memory_commit
808
808
809 def get_path_permissions(self, username):
809 def get_path_permissions(self, username):
810 """
810 """
811 Returns a path permission checker or None if not supported
811 Returns a path permission checker or None if not supported
812
812
813 :param username: session user name
813 :param username: session user name
814 :return: an instance of BasePathPermissionChecker or None
814 :return: an instance of BasePathPermissionChecker or None
815 """
815 """
816 return None
816 return None
817
817
818 def install_hooks(self, force=False):
818 def install_hooks(self, force=False):
819 return self._remote.install_hooks(force)
819 return self._remote.install_hooks(force)
820
820
821 def get_hooks_info(self):
821 def get_hooks_info(self):
822 return self._remote.get_hooks_info()
822 return self._remote.get_hooks_info()
823
823
824
824
825 class BaseCommit(object):
825 class BaseCommit(object):
826 """
826 """
827 Each backend should implement it's commit representation.
827 Each backend should implement it's commit representation.
828
828
829 **Attributes**
829 **Attributes**
830
830
831 ``repository``
831 ``repository``
832 repository object within which commit exists
832 repository object within which commit exists
833
833
834 ``id``
834 ``id``
835 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
835 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
836 just ``tip``.
836 just ``tip``.
837
837
838 ``raw_id``
838 ``raw_id``
839 raw commit representation (i.e. full 40 length sha for git
839 raw commit representation (i.e. full 40 length sha for git
840 backend)
840 backend)
841
841
842 ``short_id``
842 ``short_id``
843 shortened (if apply) version of ``raw_id``; it would be simple
843 shortened (if apply) version of ``raw_id``; it would be simple
844 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
844 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
845 as ``raw_id`` for subversion
845 as ``raw_id`` for subversion
846
846
847 ``idx``
847 ``idx``
848 commit index
848 commit index
849
849
850 ``files``
850 ``files``
851 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
851 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
852
852
853 ``dirs``
853 ``dirs``
854 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
854 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
855
855
856 ``nodes``
856 ``nodes``
857 combined list of ``Node`` objects
857 combined list of ``Node`` objects
858
858
859 ``author``
859 ``author``
860 author of the commit, as unicode
860 author of the commit, as unicode
861
861
862 ``message``
862 ``message``
863 message of the commit, as unicode
863 message of the commit, as unicode
864
864
865 ``parents``
865 ``parents``
866 list of parent commits
866 list of parent commits
867
867
868 """
868 """
869
869
870 branch = None
870 branch = None
871 """
871 """
872 Depending on the backend this should be set to the branch name of the
872 Depending on the backend this should be set to the branch name of the
873 commit. Backends not supporting branches on commits should leave this
873 commit. Backends not supporting branches on commits should leave this
874 value as ``None``.
874 value as ``None``.
875 """
875 """
876
876
877 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
877 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
878 """
878 """
879 This template is used to generate a default prefix for repository archives
879 This template is used to generate a default prefix for repository archives
880 if no prefix has been specified.
880 if no prefix has been specified.
881 """
881 """
882
882
883 def __str__(self):
883 def __str__(self):
884 return '<%s at %s:%s>' % (
884 return '<%s at %s:%s>' % (
885 self.__class__.__name__, self.idx, self.short_id)
885 self.__class__.__name__, self.idx, self.short_id)
886
886
887 def __repr__(self):
887 def __repr__(self):
888 return self.__str__()
888 return self.__str__()
889
889
890 def __unicode__(self):
890 def __unicode__(self):
891 return u'%s:%s' % (self.idx, self.short_id)
891 return u'%s:%s' % (self.idx, self.short_id)
892
892
893 def __eq__(self, other):
893 def __eq__(self, other):
894 same_instance = isinstance(other, self.__class__)
894 same_instance = isinstance(other, self.__class__)
895 return same_instance and self.raw_id == other.raw_id
895 return same_instance and self.raw_id == other.raw_id
896
896
897 def __json__(self):
897 def __json__(self):
898 parents = []
898 parents = []
899 try:
899 try:
900 for parent in self.parents:
900 for parent in self.parents:
901 parents.append({'raw_id': parent.raw_id})
901 parents.append({'raw_id': parent.raw_id})
902 except NotImplementedError:
902 except NotImplementedError:
903 # empty commit doesn't have parents implemented
903 # empty commit doesn't have parents implemented
904 pass
904 pass
905
905
906 return {
906 return {
907 'short_id': self.short_id,
907 'short_id': self.short_id,
908 'raw_id': self.raw_id,
908 'raw_id': self.raw_id,
909 'revision': self.idx,
909 'revision': self.idx,
910 'message': self.message,
910 'message': self.message,
911 'date': self.date,
911 'date': self.date,
912 'author': self.author,
912 'author': self.author,
913 'parents': parents,
913 'parents': parents,
914 'branch': self.branch
914 'branch': self.branch
915 }
915 }
916
916
917 def __getstate__(self):
917 def __getstate__(self):
918 d = self.__dict__.copy()
918 d = self.__dict__.copy()
919 d.pop('_remote', None)
919 d.pop('_remote', None)
920 d.pop('repository', None)
920 d.pop('repository', None)
921 return d
921 return d
922
922
923 def _get_refs(self):
923 def _get_refs(self):
924 return {
924 return {
925 'branches': [self.branch] if self.branch else [],
925 'branches': [self.branch] if self.branch else [],
926 'bookmarks': getattr(self, 'bookmarks', []),
926 'bookmarks': getattr(self, 'bookmarks', []),
927 'tags': self.tags
927 'tags': self.tags
928 }
928 }
929
929
930 @LazyProperty
930 @LazyProperty
931 def last(self):
931 def last(self):
932 """
932 """
933 ``True`` if this is last commit in repository, ``False``
933 ``True`` if this is last commit in repository, ``False``
934 otherwise; trying to access this attribute while there is no
934 otherwise; trying to access this attribute while there is no
935 commits would raise `EmptyRepositoryError`
935 commits would raise `EmptyRepositoryError`
936 """
936 """
937 if self.repository is None:
937 if self.repository is None:
938 raise CommitError("Cannot check if it's most recent commit")
938 raise CommitError("Cannot check if it's most recent commit")
939 return self.raw_id == self.repository.commit_ids[-1]
939 return self.raw_id == self.repository.commit_ids[-1]
940
940
941 @LazyProperty
941 @LazyProperty
942 def parents(self):
942 def parents(self):
943 """
943 """
944 Returns list of parent commits.
944 Returns list of parent commits.
945 """
945 """
946 raise NotImplementedError
946 raise NotImplementedError
947
947
948 @LazyProperty
948 @LazyProperty
949 def first_parent(self):
949 def first_parent(self):
950 """
950 """
951 Returns list of parent commits.
951 Returns list of parent commits.
952 """
952 """
953 return self.parents[0] if self.parents else EmptyCommit()
953 return self.parents[0] if self.parents else EmptyCommit()
954
954
955 @property
955 @property
956 def merge(self):
956 def merge(self):
957 """
957 """
958 Returns boolean if commit is a merge.
958 Returns boolean if commit is a merge.
959 """
959 """
960 return len(self.parents) > 1
960 return len(self.parents) > 1
961
961
962 @LazyProperty
962 @LazyProperty
963 def children(self):
963 def children(self):
964 """
964 """
965 Returns list of child commits.
965 Returns list of child commits.
966 """
966 """
967 raise NotImplementedError
967 raise NotImplementedError
968
968
969 @LazyProperty
969 @LazyProperty
970 def id(self):
970 def id(self):
971 """
971 """
972 Returns string identifying this commit.
972 Returns string identifying this commit.
973 """
973 """
974 raise NotImplementedError
974 raise NotImplementedError
975
975
976 @LazyProperty
976 @LazyProperty
977 def raw_id(self):
977 def raw_id(self):
978 """
978 """
979 Returns raw string identifying this commit.
979 Returns raw string identifying this commit.
980 """
980 """
981 raise NotImplementedError
981 raise NotImplementedError
982
982
983 @LazyProperty
983 @LazyProperty
984 def short_id(self):
984 def short_id(self):
985 """
985 """
986 Returns shortened version of ``raw_id`` attribute, as string,
986 Returns shortened version of ``raw_id`` attribute, as string,
987 identifying this commit, useful for presentation to users.
987 identifying this commit, useful for presentation to users.
988 """
988 """
989 raise NotImplementedError
989 raise NotImplementedError
990
990
991 @LazyProperty
991 @LazyProperty
992 def idx(self):
992 def idx(self):
993 """
993 """
994 Returns integer identifying this commit.
994 Returns integer identifying this commit.
995 """
995 """
996 raise NotImplementedError
996 raise NotImplementedError
997
997
998 @LazyProperty
998 @LazyProperty
999 def committer(self):
999 def committer(self):
1000 """
1000 """
1001 Returns committer for this commit
1001 Returns committer for this commit
1002 """
1002 """
1003 raise NotImplementedError
1003 raise NotImplementedError
1004
1004
1005 @LazyProperty
1005 @LazyProperty
1006 def committer_name(self):
1006 def committer_name(self):
1007 """
1007 """
1008 Returns committer name for this commit
1008 Returns committer name for this commit
1009 """
1009 """
1010
1010
1011 return author_name(self.committer)
1011 return author_name(self.committer)
1012
1012
1013 @LazyProperty
1013 @LazyProperty
1014 def committer_email(self):
1014 def committer_email(self):
1015 """
1015 """
1016 Returns committer email address for this commit
1016 Returns committer email address for this commit
1017 """
1017 """
1018
1018
1019 return author_email(self.committer)
1019 return author_email(self.committer)
1020
1020
1021 @LazyProperty
1021 @LazyProperty
1022 def author(self):
1022 def author(self):
1023 """
1023 """
1024 Returns author for this commit
1024 Returns author for this commit
1025 """
1025 """
1026
1026
1027 raise NotImplementedError
1027 raise NotImplementedError
1028
1028
1029 @LazyProperty
1029 @LazyProperty
1030 def author_name(self):
1030 def author_name(self):
1031 """
1031 """
1032 Returns author name for this commit
1032 Returns author name for this commit
1033 """
1033 """
1034
1034
1035 return author_name(self.author)
1035 return author_name(self.author)
1036
1036
1037 @LazyProperty
1037 @LazyProperty
1038 def author_email(self):
1038 def author_email(self):
1039 """
1039 """
1040 Returns author email address for this commit
1040 Returns author email address for this commit
1041 """
1041 """
1042
1042
1043 return author_email(self.author)
1043 return author_email(self.author)
1044
1044
1045 def get_file_mode(self, path):
1045 def get_file_mode(self, path):
1046 """
1046 """
1047 Returns stat mode of the file at `path`.
1047 Returns stat mode of the file at `path`.
1048 """
1048 """
1049 raise NotImplementedError
1049 raise NotImplementedError
1050
1050
1051 def is_link(self, path):
1051 def is_link(self, path):
1052 """
1052 """
1053 Returns ``True`` if given `path` is a symlink
1053 Returns ``True`` if given `path` is a symlink
1054 """
1054 """
1055 raise NotImplementedError
1055 raise NotImplementedError
1056
1056
1057 def get_file_content(self, path):
1057 def get_file_content(self, path):
1058 """
1058 """
1059 Returns content of the file at the given `path`.
1059 Returns content of the file at the given `path`.
1060 """
1060 """
1061 raise NotImplementedError
1061 raise NotImplementedError
1062
1062
1063 def get_file_content_streamed(self, path):
1064 """
1065 returns a streaming response from vcsserver with file content
1066 """
1067 raise NotImplementedError
1068
1063 def get_file_size(self, path):
1069 def get_file_size(self, path):
1064 """
1070 """
1065 Returns size of the file at the given `path`.
1071 Returns size of the file at the given `path`.
1066 """
1072 """
1067 raise NotImplementedError
1073 raise NotImplementedError
1068
1074
1069 def get_path_commit(self, path, pre_load=None):
1075 def get_path_commit(self, path, pre_load=None):
1070 """
1076 """
1071 Returns last commit of the file at the given `path`.
1077 Returns last commit of the file at the given `path`.
1072
1078
1073 :param pre_load: Optional. List of commit attributes to load.
1079 :param pre_load: Optional. List of commit attributes to load.
1074 """
1080 """
1075 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1081 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1076 if not commits:
1082 if not commits:
1077 raise RepositoryError(
1083 raise RepositoryError(
1078 'Failed to fetch history for path {}. '
1084 'Failed to fetch history for path {}. '
1079 'Please check if such path exists in your repository'.format(
1085 'Please check if such path exists in your repository'.format(
1080 path))
1086 path))
1081 return commits[0]
1087 return commits[0]
1082
1088
1083 def get_path_history(self, path, limit=None, pre_load=None):
1089 def get_path_history(self, path, limit=None, pre_load=None):
1084 """
1090 """
1085 Returns history of file as reversed list of :class:`BaseCommit`
1091 Returns history of file as reversed list of :class:`BaseCommit`
1086 objects for which file at given `path` has been modified.
1092 objects for which file at given `path` has been modified.
1087
1093
1088 :param limit: Optional. Allows to limit the size of the returned
1094 :param limit: Optional. Allows to limit the size of the returned
1089 history. This is intended as a hint to the underlying backend, so
1095 history. This is intended as a hint to the underlying backend, so
1090 that it can apply optimizations depending on the limit.
1096 that it can apply optimizations depending on the limit.
1091 :param pre_load: Optional. List of commit attributes to load.
1097 :param pre_load: Optional. List of commit attributes to load.
1092 """
1098 """
1093 raise NotImplementedError
1099 raise NotImplementedError
1094
1100
1095 def get_file_annotate(self, path, pre_load=None):
1101 def get_file_annotate(self, path, pre_load=None):
1096 """
1102 """
1097 Returns a generator of four element tuples with
1103 Returns a generator of four element tuples with
1098 lineno, sha, commit lazy loader and line
1104 lineno, sha, commit lazy loader and line
1099
1105
1100 :param pre_load: Optional. List of commit attributes to load.
1106 :param pre_load: Optional. List of commit attributes to load.
1101 """
1107 """
1102 raise NotImplementedError
1108 raise NotImplementedError
1103
1109
1104 def get_nodes(self, path):
1110 def get_nodes(self, path):
1105 """
1111 """
1106 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1112 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1107 state of commit at the given ``path``.
1113 state of commit at the given ``path``.
1108
1114
1109 :raises ``CommitError``: if node at the given ``path`` is not
1115 :raises ``CommitError``: if node at the given ``path`` is not
1110 instance of ``DirNode``
1116 instance of ``DirNode``
1111 """
1117 """
1112 raise NotImplementedError
1118 raise NotImplementedError
1113
1119
1114 def get_node(self, path):
1120 def get_node(self, path):
1115 """
1121 """
1116 Returns ``Node`` object from the given ``path``.
1122 Returns ``Node`` object from the given ``path``.
1117
1123
1118 :raises ``NodeDoesNotExistError``: if there is no node at the given
1124 :raises ``NodeDoesNotExistError``: if there is no node at the given
1119 ``path``
1125 ``path``
1120 """
1126 """
1121 raise NotImplementedError
1127 raise NotImplementedError
1122
1128
1123 def get_largefile_node(self, path):
1129 def get_largefile_node(self, path):
1124 """
1130 """
1125 Returns the path to largefile from Mercurial/Git-lfs storage.
1131 Returns the path to largefile from Mercurial/Git-lfs storage.
1126 or None if it's not a largefile node
1132 or None if it's not a largefile node
1127 """
1133 """
1128 return None
1134 return None
1129
1135
1130 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1136 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1131 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1137 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1132 """
1138 """
1133 Creates an archive containing the contents of the repository.
1139 Creates an archive containing the contents of the repository.
1134
1140
1135 :param archive_dest_path: path to the file which to create the archive.
1141 :param archive_dest_path: path to the file which to create the archive.
1136 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1142 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1137 :param prefix: name of root directory in archive.
1143 :param prefix: name of root directory in archive.
1138 Default is repository name and commit's short_id joined with dash:
1144 Default is repository name and commit's short_id joined with dash:
1139 ``"{repo_name}-{short_id}"``.
1145 ``"{repo_name}-{short_id}"``.
1140 :param write_metadata: write a metadata file into archive.
1146 :param write_metadata: write a metadata file into archive.
1141 :param mtime: custom modification time for archive creation, defaults
1147 :param mtime: custom modification time for archive creation, defaults
1142 to time.time() if not given.
1148 to time.time() if not given.
1143 :param archive_at_path: pack files at this path (default '/')
1149 :param archive_at_path: pack files at this path (default '/')
1144
1150
1145 :raise VCSError: If prefix has a problem.
1151 :raise VCSError: If prefix has a problem.
1146 """
1152 """
1147 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1153 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1148 if kind not in allowed_kinds:
1154 if kind not in allowed_kinds:
1149 raise ImproperArchiveTypeError(
1155 raise ImproperArchiveTypeError(
1150 'Archive kind (%s) not supported use one of %s' %
1156 'Archive kind (%s) not supported use one of %s' %
1151 (kind, allowed_kinds))
1157 (kind, allowed_kinds))
1152
1158
1153 prefix = self._validate_archive_prefix(prefix)
1159 prefix = self._validate_archive_prefix(prefix)
1154
1160
1155 mtime = mtime is not None or time.mktime(self.date.timetuple())
1161 mtime = mtime is not None or time.mktime(self.date.timetuple())
1156
1162
1157 file_info = []
1163 file_info = []
1158 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1164 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1159 for _r, _d, files in cur_rev.walk(archive_at_path):
1165 for _r, _d, files in cur_rev.walk(archive_at_path):
1160 for f in files:
1166 for f in files:
1161 f_path = os.path.join(prefix, f.path)
1167 f_path = os.path.join(prefix, f.path)
1162 file_info.append(
1168 file_info.append(
1163 (f_path, f.mode, f.is_link(), f.raw_bytes))
1169 (f_path, f.mode, f.is_link(), f.raw_bytes))
1164
1170
1165 if write_metadata:
1171 if write_metadata:
1166 metadata = [
1172 metadata = [
1167 ('repo_name', self.repository.name),
1173 ('repo_name', self.repository.name),
1168 ('commit_id', self.raw_id),
1174 ('commit_id', self.raw_id),
1169 ('mtime', mtime),
1175 ('mtime', mtime),
1170 ('branch', self.branch),
1176 ('branch', self.branch),
1171 ('tags', ','.join(self.tags)),
1177 ('tags', ','.join(self.tags)),
1172 ]
1178 ]
1173 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1179 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1174 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1180 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1175
1181
1176 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1182 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1177
1183
1178 def _validate_archive_prefix(self, prefix):
1184 def _validate_archive_prefix(self, prefix):
1179 if prefix is None:
1185 if prefix is None:
1180 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1186 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1181 repo_name=safe_str(self.repository.name),
1187 repo_name=safe_str(self.repository.name),
1182 short_id=self.short_id)
1188 short_id=self.short_id)
1183 elif not isinstance(prefix, str):
1189 elif not isinstance(prefix, str):
1184 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1190 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1185 elif prefix.startswith('/'):
1191 elif prefix.startswith('/'):
1186 raise VCSError("Prefix cannot start with leading slash")
1192 raise VCSError("Prefix cannot start with leading slash")
1187 elif prefix.strip() == '':
1193 elif prefix.strip() == '':
1188 raise VCSError("Prefix cannot be empty")
1194 raise VCSError("Prefix cannot be empty")
1189 return prefix
1195 return prefix
1190
1196
1191 @LazyProperty
1197 @LazyProperty
1192 def root(self):
1198 def root(self):
1193 """
1199 """
1194 Returns ``RootNode`` object for this commit.
1200 Returns ``RootNode`` object for this commit.
1195 """
1201 """
1196 return self.get_node('')
1202 return self.get_node('')
1197
1203
1198 def next(self, branch=None):
1204 def next(self, branch=None):
1199 """
1205 """
1200 Returns next commit from current, if branch is gives it will return
1206 Returns next commit from current, if branch is gives it will return
1201 next commit belonging to this branch
1207 next commit belonging to this branch
1202
1208
1203 :param branch: show commits within the given named branch
1209 :param branch: show commits within the given named branch
1204 """
1210 """
1205 indexes = xrange(self.idx + 1, self.repository.count())
1211 indexes = xrange(self.idx + 1, self.repository.count())
1206 return self._find_next(indexes, branch)
1212 return self._find_next(indexes, branch)
1207
1213
1208 def prev(self, branch=None):
1214 def prev(self, branch=None):
1209 """
1215 """
1210 Returns previous commit from current, if branch is gives it will
1216 Returns previous commit from current, if branch is gives it will
1211 return previous commit belonging to this branch
1217 return previous commit belonging to this branch
1212
1218
1213 :param branch: show commit within the given named branch
1219 :param branch: show commit within the given named branch
1214 """
1220 """
1215 indexes = xrange(self.idx - 1, -1, -1)
1221 indexes = xrange(self.idx - 1, -1, -1)
1216 return self._find_next(indexes, branch)
1222 return self._find_next(indexes, branch)
1217
1223
1218 def _find_next(self, indexes, branch=None):
1224 def _find_next(self, indexes, branch=None):
1219 if branch and self.branch != branch:
1225 if branch and self.branch != branch:
1220 raise VCSError('Branch option used on commit not belonging '
1226 raise VCSError('Branch option used on commit not belonging '
1221 'to that branch')
1227 'to that branch')
1222
1228
1223 for next_idx in indexes:
1229 for next_idx in indexes:
1224 commit = self.repository.get_commit(commit_idx=next_idx)
1230 commit = self.repository.get_commit(commit_idx=next_idx)
1225 if branch and branch != commit.branch:
1231 if branch and branch != commit.branch:
1226 continue
1232 continue
1227 return commit
1233 return commit
1228 raise CommitDoesNotExistError
1234 raise CommitDoesNotExistError
1229
1235
1230 def diff(self, ignore_whitespace=True, context=3):
1236 def diff(self, ignore_whitespace=True, context=3):
1231 """
1237 """
1232 Returns a `Diff` object representing the change made by this commit.
1238 Returns a `Diff` object representing the change made by this commit.
1233 """
1239 """
1234 parent = self.first_parent
1240 parent = self.first_parent
1235 diff = self.repository.get_diff(
1241 diff = self.repository.get_diff(
1236 parent, self,
1242 parent, self,
1237 ignore_whitespace=ignore_whitespace,
1243 ignore_whitespace=ignore_whitespace,
1238 context=context)
1244 context=context)
1239 return diff
1245 return diff
1240
1246
1241 @LazyProperty
1247 @LazyProperty
1242 def added(self):
1248 def added(self):
1243 """
1249 """
1244 Returns list of added ``FileNode`` objects.
1250 Returns list of added ``FileNode`` objects.
1245 """
1251 """
1246 raise NotImplementedError
1252 raise NotImplementedError
1247
1253
1248 @LazyProperty
1254 @LazyProperty
1249 def changed(self):
1255 def changed(self):
1250 """
1256 """
1251 Returns list of modified ``FileNode`` objects.
1257 Returns list of modified ``FileNode`` objects.
1252 """
1258 """
1253 raise NotImplementedError
1259 raise NotImplementedError
1254
1260
1255 @LazyProperty
1261 @LazyProperty
1256 def removed(self):
1262 def removed(self):
1257 """
1263 """
1258 Returns list of removed ``FileNode`` objects.
1264 Returns list of removed ``FileNode`` objects.
1259 """
1265 """
1260 raise NotImplementedError
1266 raise NotImplementedError
1261
1267
1262 @LazyProperty
1268 @LazyProperty
1263 def size(self):
1269 def size(self):
1264 """
1270 """
1265 Returns total number of bytes from contents of all filenodes.
1271 Returns total number of bytes from contents of all filenodes.
1266 """
1272 """
1267 return sum((node.size for node in self.get_filenodes_generator()))
1273 return sum((node.size for node in self.get_filenodes_generator()))
1268
1274
1269 def walk(self, topurl=''):
1275 def walk(self, topurl=''):
1270 """
1276 """
1271 Similar to os.walk method. Insted of filesystem it walks through
1277 Similar to os.walk method. Insted of filesystem it walks through
1272 commit starting at given ``topurl``. Returns generator of tuples
1278 commit starting at given ``topurl``. Returns generator of tuples
1273 (topnode, dirnodes, filenodes).
1279 (topnode, dirnodes, filenodes).
1274 """
1280 """
1275 topnode = self.get_node(topurl)
1281 topnode = self.get_node(topurl)
1276 if not topnode.is_dir():
1282 if not topnode.is_dir():
1277 return
1283 return
1278 yield (topnode, topnode.dirs, topnode.files)
1284 yield (topnode, topnode.dirs, topnode.files)
1279 for dirnode in topnode.dirs:
1285 for dirnode in topnode.dirs:
1280 for tup in self.walk(dirnode.path):
1286 for tup in self.walk(dirnode.path):
1281 yield tup
1287 yield tup
1282
1288
1283 def get_filenodes_generator(self):
1289 def get_filenodes_generator(self):
1284 """
1290 """
1285 Returns generator that yields *all* file nodes.
1291 Returns generator that yields *all* file nodes.
1286 """
1292 """
1287 for topnode, dirs, files in self.walk():
1293 for topnode, dirs, files in self.walk():
1288 for node in files:
1294 for node in files:
1289 yield node
1295 yield node
1290
1296
1291 #
1297 #
1292 # Utilities for sub classes to support consistent behavior
1298 # Utilities for sub classes to support consistent behavior
1293 #
1299 #
1294
1300
1295 def no_node_at_path(self, path):
1301 def no_node_at_path(self, path):
1296 return NodeDoesNotExistError(
1302 return NodeDoesNotExistError(
1297 u"There is no file nor directory at the given path: "
1303 u"There is no file nor directory at the given path: "
1298 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1304 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1299
1305
1300 def _fix_path(self, path):
1306 def _fix_path(self, path):
1301 """
1307 """
1302 Paths are stored without trailing slash so we need to get rid off it if
1308 Paths are stored without trailing slash so we need to get rid off it if
1303 needed.
1309 needed.
1304 """
1310 """
1305 return path.rstrip('/')
1311 return path.rstrip('/')
1306
1312
1307 #
1313 #
1308 # Deprecated API based on changesets
1314 # Deprecated API based on changesets
1309 #
1315 #
1310
1316
1311 @property
1317 @property
1312 def revision(self):
1318 def revision(self):
1313 warnings.warn("Use idx instead", DeprecationWarning)
1319 warnings.warn("Use idx instead", DeprecationWarning)
1314 return self.idx
1320 return self.idx
1315
1321
1316 @revision.setter
1322 @revision.setter
1317 def revision(self, value):
1323 def revision(self, value):
1318 warnings.warn("Use idx instead", DeprecationWarning)
1324 warnings.warn("Use idx instead", DeprecationWarning)
1319 self.idx = value
1325 self.idx = value
1320
1326
1321 def get_file_changeset(self, path):
1327 def get_file_changeset(self, path):
1322 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1328 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1323 return self.get_path_commit(path)
1329 return self.get_path_commit(path)
1324
1330
1325
1331
1326 class BaseChangesetClass(type):
1332 class BaseChangesetClass(type):
1327
1333
1328 def __instancecheck__(self, instance):
1334 def __instancecheck__(self, instance):
1329 return isinstance(instance, BaseCommit)
1335 return isinstance(instance, BaseCommit)
1330
1336
1331
1337
1332 class BaseChangeset(BaseCommit):
1338 class BaseChangeset(BaseCommit):
1333
1339
1334 __metaclass__ = BaseChangesetClass
1340 __metaclass__ = BaseChangesetClass
1335
1341
1336 def __new__(cls, *args, **kwargs):
1342 def __new__(cls, *args, **kwargs):
1337 warnings.warn(
1343 warnings.warn(
1338 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1344 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1339 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1345 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1340
1346
1341
1347
1342 class BaseInMemoryCommit(object):
1348 class BaseInMemoryCommit(object):
1343 """
1349 """
1344 Represents differences between repository's state (most recent head) and
1350 Represents differences between repository's state (most recent head) and
1345 changes made *in place*.
1351 changes made *in place*.
1346
1352
1347 **Attributes**
1353 **Attributes**
1348
1354
1349 ``repository``
1355 ``repository``
1350 repository object for this in-memory-commit
1356 repository object for this in-memory-commit
1351
1357
1352 ``added``
1358 ``added``
1353 list of ``FileNode`` objects marked as *added*
1359 list of ``FileNode`` objects marked as *added*
1354
1360
1355 ``changed``
1361 ``changed``
1356 list of ``FileNode`` objects marked as *changed*
1362 list of ``FileNode`` objects marked as *changed*
1357
1363
1358 ``removed``
1364 ``removed``
1359 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1365 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1360 *removed*
1366 *removed*
1361
1367
1362 ``parents``
1368 ``parents``
1363 list of :class:`BaseCommit` instances representing parents of
1369 list of :class:`BaseCommit` instances representing parents of
1364 in-memory commit. Should always be 2-element sequence.
1370 in-memory commit. Should always be 2-element sequence.
1365
1371
1366 """
1372 """
1367
1373
1368 def __init__(self, repository):
1374 def __init__(self, repository):
1369 self.repository = repository
1375 self.repository = repository
1370 self.added = []
1376 self.added = []
1371 self.changed = []
1377 self.changed = []
1372 self.removed = []
1378 self.removed = []
1373 self.parents = []
1379 self.parents = []
1374
1380
1375 def add(self, *filenodes):
1381 def add(self, *filenodes):
1376 """
1382 """
1377 Marks given ``FileNode`` objects as *to be committed*.
1383 Marks given ``FileNode`` objects as *to be committed*.
1378
1384
1379 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1385 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1380 latest commit
1386 latest commit
1381 :raises ``NodeAlreadyAddedError``: if node with same path is already
1387 :raises ``NodeAlreadyAddedError``: if node with same path is already
1382 marked as *added*
1388 marked as *added*
1383 """
1389 """
1384 # Check if not already marked as *added* first
1390 # Check if not already marked as *added* first
1385 for node in filenodes:
1391 for node in filenodes:
1386 if node.path in (n.path for n in self.added):
1392 if node.path in (n.path for n in self.added):
1387 raise NodeAlreadyAddedError(
1393 raise NodeAlreadyAddedError(
1388 "Such FileNode %s is already marked for addition"
1394 "Such FileNode %s is already marked for addition"
1389 % node.path)
1395 % node.path)
1390 for node in filenodes:
1396 for node in filenodes:
1391 self.added.append(node)
1397 self.added.append(node)
1392
1398
1393 def change(self, *filenodes):
1399 def change(self, *filenodes):
1394 """
1400 """
1395 Marks given ``FileNode`` objects to be *changed* in next commit.
1401 Marks given ``FileNode`` objects to be *changed* in next commit.
1396
1402
1397 :raises ``EmptyRepositoryError``: if there are no commits yet
1403 :raises ``EmptyRepositoryError``: if there are no commits yet
1398 :raises ``NodeAlreadyExistsError``: if node with same path is already
1404 :raises ``NodeAlreadyExistsError``: if node with same path is already
1399 marked to be *changed*
1405 marked to be *changed*
1400 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1406 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1401 marked to be *removed*
1407 marked to be *removed*
1402 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1408 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1403 commit
1409 commit
1404 :raises ``NodeNotChangedError``: if node hasn't really be changed
1410 :raises ``NodeNotChangedError``: if node hasn't really be changed
1405 """
1411 """
1406 for node in filenodes:
1412 for node in filenodes:
1407 if node.path in (n.path for n in self.removed):
1413 if node.path in (n.path for n in self.removed):
1408 raise NodeAlreadyRemovedError(
1414 raise NodeAlreadyRemovedError(
1409 "Node at %s is already marked as removed" % node.path)
1415 "Node at %s is already marked as removed" % node.path)
1410 try:
1416 try:
1411 self.repository.get_commit()
1417 self.repository.get_commit()
1412 except EmptyRepositoryError:
1418 except EmptyRepositoryError:
1413 raise EmptyRepositoryError(
1419 raise EmptyRepositoryError(
1414 "Nothing to change - try to *add* new nodes rather than "
1420 "Nothing to change - try to *add* new nodes rather than "
1415 "changing them")
1421 "changing them")
1416 for node in filenodes:
1422 for node in filenodes:
1417 if node.path in (n.path for n in self.changed):
1423 if node.path in (n.path for n in self.changed):
1418 raise NodeAlreadyChangedError(
1424 raise NodeAlreadyChangedError(
1419 "Node at '%s' is already marked as changed" % node.path)
1425 "Node at '%s' is already marked as changed" % node.path)
1420 self.changed.append(node)
1426 self.changed.append(node)
1421
1427
1422 def remove(self, *filenodes):
1428 def remove(self, *filenodes):
1423 """
1429 """
1424 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1430 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1425 *removed* in next commit.
1431 *removed* in next commit.
1426
1432
1427 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1433 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1428 be *removed*
1434 be *removed*
1429 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1435 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1430 be *changed*
1436 be *changed*
1431 """
1437 """
1432 for node in filenodes:
1438 for node in filenodes:
1433 if node.path in (n.path for n in self.removed):
1439 if node.path in (n.path for n in self.removed):
1434 raise NodeAlreadyRemovedError(
1440 raise NodeAlreadyRemovedError(
1435 "Node is already marked to for removal at %s" % node.path)
1441 "Node is already marked to for removal at %s" % node.path)
1436 if node.path in (n.path for n in self.changed):
1442 if node.path in (n.path for n in self.changed):
1437 raise NodeAlreadyChangedError(
1443 raise NodeAlreadyChangedError(
1438 "Node is already marked to be changed at %s" % node.path)
1444 "Node is already marked to be changed at %s" % node.path)
1439 # We only mark node as *removed* - real removal is done by
1445 # We only mark node as *removed* - real removal is done by
1440 # commit method
1446 # commit method
1441 self.removed.append(node)
1447 self.removed.append(node)
1442
1448
1443 def reset(self):
1449 def reset(self):
1444 """
1450 """
1445 Resets this instance to initial state (cleans ``added``, ``changed``
1451 Resets this instance to initial state (cleans ``added``, ``changed``
1446 and ``removed`` lists).
1452 and ``removed`` lists).
1447 """
1453 """
1448 self.added = []
1454 self.added = []
1449 self.changed = []
1455 self.changed = []
1450 self.removed = []
1456 self.removed = []
1451 self.parents = []
1457 self.parents = []
1452
1458
1453 def get_ipaths(self):
1459 def get_ipaths(self):
1454 """
1460 """
1455 Returns generator of paths from nodes marked as added, changed or
1461 Returns generator of paths from nodes marked as added, changed or
1456 removed.
1462 removed.
1457 """
1463 """
1458 for node in itertools.chain(self.added, self.changed, self.removed):
1464 for node in itertools.chain(self.added, self.changed, self.removed):
1459 yield node.path
1465 yield node.path
1460
1466
1461 def get_paths(self):
1467 def get_paths(self):
1462 """
1468 """
1463 Returns list of paths from nodes marked as added, changed or removed.
1469 Returns list of paths from nodes marked as added, changed or removed.
1464 """
1470 """
1465 return list(self.get_ipaths())
1471 return list(self.get_ipaths())
1466
1472
1467 def check_integrity(self, parents=None):
1473 def check_integrity(self, parents=None):
1468 """
1474 """
1469 Checks in-memory commit's integrity. Also, sets parents if not
1475 Checks in-memory commit's integrity. Also, sets parents if not
1470 already set.
1476 already set.
1471
1477
1472 :raises CommitError: if any error occurs (i.e.
1478 :raises CommitError: if any error occurs (i.e.
1473 ``NodeDoesNotExistError``).
1479 ``NodeDoesNotExistError``).
1474 """
1480 """
1475 if not self.parents:
1481 if not self.parents:
1476 parents = parents or []
1482 parents = parents or []
1477 if len(parents) == 0:
1483 if len(parents) == 0:
1478 try:
1484 try:
1479 parents = [self.repository.get_commit(), None]
1485 parents = [self.repository.get_commit(), None]
1480 except EmptyRepositoryError:
1486 except EmptyRepositoryError:
1481 parents = [None, None]
1487 parents = [None, None]
1482 elif len(parents) == 1:
1488 elif len(parents) == 1:
1483 parents += [None]
1489 parents += [None]
1484 self.parents = parents
1490 self.parents = parents
1485
1491
1486 # Local parents, only if not None
1492 # Local parents, only if not None
1487 parents = [p for p in self.parents if p]
1493 parents = [p for p in self.parents if p]
1488
1494
1489 # Check nodes marked as added
1495 # Check nodes marked as added
1490 for p in parents:
1496 for p in parents:
1491 for node in self.added:
1497 for node in self.added:
1492 try:
1498 try:
1493 p.get_node(node.path)
1499 p.get_node(node.path)
1494 except NodeDoesNotExistError:
1500 except NodeDoesNotExistError:
1495 pass
1501 pass
1496 else:
1502 else:
1497 raise NodeAlreadyExistsError(
1503 raise NodeAlreadyExistsError(
1498 "Node `%s` already exists at %s" % (node.path, p))
1504 "Node `%s` already exists at %s" % (node.path, p))
1499
1505
1500 # Check nodes marked as changed
1506 # Check nodes marked as changed
1501 missing = set(self.changed)
1507 missing = set(self.changed)
1502 not_changed = set(self.changed)
1508 not_changed = set(self.changed)
1503 if self.changed and not parents:
1509 if self.changed and not parents:
1504 raise NodeDoesNotExistError(str(self.changed[0].path))
1510 raise NodeDoesNotExistError(str(self.changed[0].path))
1505 for p in parents:
1511 for p in parents:
1506 for node in self.changed:
1512 for node in self.changed:
1507 try:
1513 try:
1508 old = p.get_node(node.path)
1514 old = p.get_node(node.path)
1509 missing.remove(node)
1515 missing.remove(node)
1510 # if content actually changed, remove node from not_changed
1516 # if content actually changed, remove node from not_changed
1511 if old.content != node.content:
1517 if old.content != node.content:
1512 not_changed.remove(node)
1518 not_changed.remove(node)
1513 except NodeDoesNotExistError:
1519 except NodeDoesNotExistError:
1514 pass
1520 pass
1515 if self.changed and missing:
1521 if self.changed and missing:
1516 raise NodeDoesNotExistError(
1522 raise NodeDoesNotExistError(
1517 "Node `%s` marked as modified but missing in parents: %s"
1523 "Node `%s` marked as modified but missing in parents: %s"
1518 % (node.path, parents))
1524 % (node.path, parents))
1519
1525
1520 if self.changed and not_changed:
1526 if self.changed and not_changed:
1521 raise NodeNotChangedError(
1527 raise NodeNotChangedError(
1522 "Node `%s` wasn't actually changed (parents: %s)"
1528 "Node `%s` wasn't actually changed (parents: %s)"
1523 % (not_changed.pop().path, parents))
1529 % (not_changed.pop().path, parents))
1524
1530
1525 # Check nodes marked as removed
1531 # Check nodes marked as removed
1526 if self.removed and not parents:
1532 if self.removed and not parents:
1527 raise NodeDoesNotExistError(
1533 raise NodeDoesNotExistError(
1528 "Cannot remove node at %s as there "
1534 "Cannot remove node at %s as there "
1529 "were no parents specified" % self.removed[0].path)
1535 "were no parents specified" % self.removed[0].path)
1530 really_removed = set()
1536 really_removed = set()
1531 for p in parents:
1537 for p in parents:
1532 for node in self.removed:
1538 for node in self.removed:
1533 try:
1539 try:
1534 p.get_node(node.path)
1540 p.get_node(node.path)
1535 really_removed.add(node)
1541 really_removed.add(node)
1536 except CommitError:
1542 except CommitError:
1537 pass
1543 pass
1538 not_removed = set(self.removed) - really_removed
1544 not_removed = set(self.removed) - really_removed
1539 if not_removed:
1545 if not_removed:
1540 # TODO: johbo: This code branch does not seem to be covered
1546 # TODO: johbo: This code branch does not seem to be covered
1541 raise NodeDoesNotExistError(
1547 raise NodeDoesNotExistError(
1542 "Cannot remove node at %s from "
1548 "Cannot remove node at %s from "
1543 "following parents: %s" % (not_removed, parents))
1549 "following parents: %s" % (not_removed, parents))
1544
1550
1545 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1551 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1546 """
1552 """
1547 Performs in-memory commit (doesn't check workdir in any way) and
1553 Performs in-memory commit (doesn't check workdir in any way) and
1548 returns newly created :class:`BaseCommit`. Updates repository's
1554 returns newly created :class:`BaseCommit`. Updates repository's
1549 attribute `commits`.
1555 attribute `commits`.
1550
1556
1551 .. note::
1557 .. note::
1552
1558
1553 While overriding this method each backend's should call
1559 While overriding this method each backend's should call
1554 ``self.check_integrity(parents)`` in the first place.
1560 ``self.check_integrity(parents)`` in the first place.
1555
1561
1556 :param message: message of the commit
1562 :param message: message of the commit
1557 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1563 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1558 :param parents: single parent or sequence of parents from which commit
1564 :param parents: single parent or sequence of parents from which commit
1559 would be derived
1565 would be derived
1560 :param date: ``datetime.datetime`` instance. Defaults to
1566 :param date: ``datetime.datetime`` instance. Defaults to
1561 ``datetime.datetime.now()``.
1567 ``datetime.datetime.now()``.
1562 :param branch: branch name, as string. If none given, default backend's
1568 :param branch: branch name, as string. If none given, default backend's
1563 branch would be used.
1569 branch would be used.
1564
1570
1565 :raises ``CommitError``: if any error occurs while committing
1571 :raises ``CommitError``: if any error occurs while committing
1566 """
1572 """
1567 raise NotImplementedError
1573 raise NotImplementedError
1568
1574
1569
1575
1570 class BaseInMemoryChangesetClass(type):
1576 class BaseInMemoryChangesetClass(type):
1571
1577
1572 def __instancecheck__(self, instance):
1578 def __instancecheck__(self, instance):
1573 return isinstance(instance, BaseInMemoryCommit)
1579 return isinstance(instance, BaseInMemoryCommit)
1574
1580
1575
1581
1576 class BaseInMemoryChangeset(BaseInMemoryCommit):
1582 class BaseInMemoryChangeset(BaseInMemoryCommit):
1577
1583
1578 __metaclass__ = BaseInMemoryChangesetClass
1584 __metaclass__ = BaseInMemoryChangesetClass
1579
1585
1580 def __new__(cls, *args, **kwargs):
1586 def __new__(cls, *args, **kwargs):
1581 warnings.warn(
1587 warnings.warn(
1582 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1588 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1583 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1589 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1584
1590
1585
1591
1586 class EmptyCommit(BaseCommit):
1592 class EmptyCommit(BaseCommit):
1587 """
1593 """
1588 An dummy empty commit. It's possible to pass hash when creating
1594 An dummy empty commit. It's possible to pass hash when creating
1589 an EmptyCommit
1595 an EmptyCommit
1590 """
1596 """
1591
1597
1592 def __init__(
1598 def __init__(
1593 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1599 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1594 message='', author='', date=None):
1600 message='', author='', date=None):
1595 self._empty_commit_id = commit_id
1601 self._empty_commit_id = commit_id
1596 # TODO: johbo: Solve idx parameter, default value does not make
1602 # TODO: johbo: Solve idx parameter, default value does not make
1597 # too much sense
1603 # too much sense
1598 self.idx = idx
1604 self.idx = idx
1599 self.message = message
1605 self.message = message
1600 self.author = author
1606 self.author = author
1601 self.date = date or datetime.datetime.fromtimestamp(0)
1607 self.date = date or datetime.datetime.fromtimestamp(0)
1602 self.repository = repo
1608 self.repository = repo
1603 self.alias = alias
1609 self.alias = alias
1604
1610
1605 @LazyProperty
1611 @LazyProperty
1606 def raw_id(self):
1612 def raw_id(self):
1607 """
1613 """
1608 Returns raw string identifying this commit, useful for web
1614 Returns raw string identifying this commit, useful for web
1609 representation.
1615 representation.
1610 """
1616 """
1611
1617
1612 return self._empty_commit_id
1618 return self._empty_commit_id
1613
1619
1614 @LazyProperty
1620 @LazyProperty
1615 def branch(self):
1621 def branch(self):
1616 if self.alias:
1622 if self.alias:
1617 from rhodecode.lib.vcs.backends import get_backend
1623 from rhodecode.lib.vcs.backends import get_backend
1618 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1624 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1619
1625
1620 @LazyProperty
1626 @LazyProperty
1621 def short_id(self):
1627 def short_id(self):
1622 return self.raw_id[:12]
1628 return self.raw_id[:12]
1623
1629
1624 @LazyProperty
1630 @LazyProperty
1625 def id(self):
1631 def id(self):
1626 return self.raw_id
1632 return self.raw_id
1627
1633
1628 def get_path_commit(self, path):
1634 def get_path_commit(self, path):
1629 return self
1635 return self
1630
1636
1631 def get_file_content(self, path):
1637 def get_file_content(self, path):
1632 return u''
1638 return u''
1633
1639
1640 def get_file_content_streamed(self, path):
1641 yield self.get_file_content()
1642
1634 def get_file_size(self, path):
1643 def get_file_size(self, path):
1635 return 0
1644 return 0
1636
1645
1637
1646
1638 class EmptyChangesetClass(type):
1647 class EmptyChangesetClass(type):
1639
1648
1640 def __instancecheck__(self, instance):
1649 def __instancecheck__(self, instance):
1641 return isinstance(instance, EmptyCommit)
1650 return isinstance(instance, EmptyCommit)
1642
1651
1643
1652
1644 class EmptyChangeset(EmptyCommit):
1653 class EmptyChangeset(EmptyCommit):
1645
1654
1646 __metaclass__ = EmptyChangesetClass
1655 __metaclass__ = EmptyChangesetClass
1647
1656
1648 def __new__(cls, *args, **kwargs):
1657 def __new__(cls, *args, **kwargs):
1649 warnings.warn(
1658 warnings.warn(
1650 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1659 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1651 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1660 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1652
1661
1653 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1662 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1654 alias=None, revision=-1, message='', author='', date=None):
1663 alias=None, revision=-1, message='', author='', date=None):
1655 if requested_revision is not None:
1664 if requested_revision is not None:
1656 warnings.warn(
1665 warnings.warn(
1657 "Parameter requested_revision not supported anymore",
1666 "Parameter requested_revision not supported anymore",
1658 DeprecationWarning)
1667 DeprecationWarning)
1659 super(EmptyChangeset, self).__init__(
1668 super(EmptyChangeset, self).__init__(
1660 commit_id=cs, repo=repo, alias=alias, idx=revision,
1669 commit_id=cs, repo=repo, alias=alias, idx=revision,
1661 message=message, author=author, date=date)
1670 message=message, author=author, date=date)
1662
1671
1663 @property
1672 @property
1664 def revision(self):
1673 def revision(self):
1665 warnings.warn("Use idx instead", DeprecationWarning)
1674 warnings.warn("Use idx instead", DeprecationWarning)
1666 return self.idx
1675 return self.idx
1667
1676
1668 @revision.setter
1677 @revision.setter
1669 def revision(self, value):
1678 def revision(self, value):
1670 warnings.warn("Use idx instead", DeprecationWarning)
1679 warnings.warn("Use idx instead", DeprecationWarning)
1671 self.idx = value
1680 self.idx = value
1672
1681
1673
1682
1674 class EmptyRepository(BaseRepository):
1683 class EmptyRepository(BaseRepository):
1675 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1684 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1676 pass
1685 pass
1677
1686
1678 def get_diff(self, *args, **kwargs):
1687 def get_diff(self, *args, **kwargs):
1679 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1688 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1680 return GitDiff('')
1689 return GitDiff('')
1681
1690
1682
1691
1683 class CollectionGenerator(object):
1692 class CollectionGenerator(object):
1684
1693
1685 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1694 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1686 self.repo = repo
1695 self.repo = repo
1687 self.commit_ids = commit_ids
1696 self.commit_ids = commit_ids
1688 # TODO: (oliver) this isn't currently hooked up
1697 # TODO: (oliver) this isn't currently hooked up
1689 self.collection_size = None
1698 self.collection_size = None
1690 self.pre_load = pre_load
1699 self.pre_load = pre_load
1691 self.translate_tag = translate_tag
1700 self.translate_tag = translate_tag
1692
1701
1693 def __len__(self):
1702 def __len__(self):
1694 if self.collection_size is not None:
1703 if self.collection_size is not None:
1695 return self.collection_size
1704 return self.collection_size
1696 return self.commit_ids.__len__()
1705 return self.commit_ids.__len__()
1697
1706
1698 def __iter__(self):
1707 def __iter__(self):
1699 for commit_id in self.commit_ids:
1708 for commit_id in self.commit_ids:
1700 # TODO: johbo: Mercurial passes in commit indices or commit ids
1709 # TODO: johbo: Mercurial passes in commit indices or commit ids
1701 yield self._commit_factory(commit_id)
1710 yield self._commit_factory(commit_id)
1702
1711
1703 def _commit_factory(self, commit_id):
1712 def _commit_factory(self, commit_id):
1704 """
1713 """
1705 Allows backends to override the way commits are generated.
1714 Allows backends to override the way commits are generated.
1706 """
1715 """
1707 return self.repo.get_commit(
1716 return self.repo.get_commit(
1708 commit_id=commit_id, pre_load=self.pre_load,
1717 commit_id=commit_id, pre_load=self.pre_load,
1709 translate_tag=self.translate_tag)
1718 translate_tag=self.translate_tag)
1710
1719
1711 def __getslice__(self, i, j):
1720 def __getslice__(self, i, j):
1712 """
1721 """
1713 Returns an iterator of sliced repository
1722 Returns an iterator of sliced repository
1714 """
1723 """
1715 commit_ids = self.commit_ids[i:j]
1724 commit_ids = self.commit_ids[i:j]
1716 return self.__class__(
1725 return self.__class__(
1717 self.repo, commit_ids, pre_load=self.pre_load,
1726 self.repo, commit_ids, pre_load=self.pre_load,
1718 translate_tag=self.translate_tag)
1727 translate_tag=self.translate_tag)
1719
1728
1720 def __repr__(self):
1729 def __repr__(self):
1721 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1730 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1722
1731
1723
1732
1724 class Config(object):
1733 class Config(object):
1725 """
1734 """
1726 Represents the configuration for a repository.
1735 Represents the configuration for a repository.
1727
1736
1728 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1737 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1729 standard library. It implements only the needed subset.
1738 standard library. It implements only the needed subset.
1730 """
1739 """
1731
1740
1732 def __init__(self):
1741 def __init__(self):
1733 self._values = {}
1742 self._values = {}
1734
1743
1735 def copy(self):
1744 def copy(self):
1736 clone = Config()
1745 clone = Config()
1737 for section, values in self._values.items():
1746 for section, values in self._values.items():
1738 clone._values[section] = values.copy()
1747 clone._values[section] = values.copy()
1739 return clone
1748 return clone
1740
1749
1741 def __repr__(self):
1750 def __repr__(self):
1742 return '<Config(%s sections) at %s>' % (
1751 return '<Config(%s sections) at %s>' % (
1743 len(self._values), hex(id(self)))
1752 len(self._values), hex(id(self)))
1744
1753
1745 def items(self, section):
1754 def items(self, section):
1746 return self._values.get(section, {}).iteritems()
1755 return self._values.get(section, {}).iteritems()
1747
1756
1748 def get(self, section, option):
1757 def get(self, section, option):
1749 return self._values.get(section, {}).get(option)
1758 return self._values.get(section, {}).get(option)
1750
1759
1751 def set(self, section, option, value):
1760 def set(self, section, option, value):
1752 section_values = self._values.setdefault(section, {})
1761 section_values = self._values.setdefault(section, {})
1753 section_values[option] = value
1762 section_values[option] = value
1754
1763
1755 def clear_section(self, section):
1764 def clear_section(self, section):
1756 self._values[section] = {}
1765 self._values[section] = {}
1757
1766
1758 def serialize(self):
1767 def serialize(self):
1759 """
1768 """
1760 Creates a list of three tuples (section, key, value) representing
1769 Creates a list of three tuples (section, key, value) representing
1761 this config object.
1770 this config object.
1762 """
1771 """
1763 items = []
1772 items = []
1764 for section in self._values:
1773 for section in self._values:
1765 for option, value in self._values[section].items():
1774 for option, value in self._values[section].items():
1766 items.append(
1775 items.append(
1767 (safe_str(section), safe_str(option), safe_str(value)))
1776 (safe_str(section), safe_str(option), safe_str(value)))
1768 return items
1777 return items
1769
1778
1770
1779
1771 class Diff(object):
1780 class Diff(object):
1772 """
1781 """
1773 Represents a diff result from a repository backend.
1782 Represents a diff result from a repository backend.
1774
1783
1775 Subclasses have to provide a backend specific value for
1784 Subclasses have to provide a backend specific value for
1776 :attr:`_header_re` and :attr:`_meta_re`.
1785 :attr:`_header_re` and :attr:`_meta_re`.
1777 """
1786 """
1778 _meta_re = None
1787 _meta_re = None
1779 _header_re = None
1788 _header_re = None
1780
1789
1781 def __init__(self, raw_diff):
1790 def __init__(self, raw_diff):
1782 self.raw = raw_diff
1791 self.raw = raw_diff
1783
1792
1784 def chunks(self):
1793 def chunks(self):
1785 """
1794 """
1786 split the diff in chunks of separate --git a/file b/file chunks
1795 split the diff in chunks of separate --git a/file b/file chunks
1787 to make diffs consistent we must prepend with \n, and make sure
1796 to make diffs consistent we must prepend with \n, and make sure
1788 we can detect last chunk as this was also has special rule
1797 we can detect last chunk as this was also has special rule
1789 """
1798 """
1790
1799
1791 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1800 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1792 header = diff_parts[0]
1801 header = diff_parts[0]
1793
1802
1794 if self._meta_re:
1803 if self._meta_re:
1795 match = self._meta_re.match(header)
1804 match = self._meta_re.match(header)
1796
1805
1797 chunks = diff_parts[1:]
1806 chunks = diff_parts[1:]
1798 total_chunks = len(chunks)
1807 total_chunks = len(chunks)
1799
1808
1800 return (
1809 return (
1801 DiffChunk(chunk, self, cur_chunk == total_chunks)
1810 DiffChunk(chunk, self, cur_chunk == total_chunks)
1802 for cur_chunk, chunk in enumerate(chunks, start=1))
1811 for cur_chunk, chunk in enumerate(chunks, start=1))
1803
1812
1804
1813
1805 class DiffChunk(object):
1814 class DiffChunk(object):
1806
1815
1807 def __init__(self, chunk, diff, last_chunk):
1816 def __init__(self, chunk, diff, last_chunk):
1808 self._diff = diff
1817 self._diff = diff
1809
1818
1810 # since we split by \ndiff --git that part is lost from original diff
1819 # since we split by \ndiff --git that part is lost from original diff
1811 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1820 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1812 if not last_chunk:
1821 if not last_chunk:
1813 chunk += '\n'
1822 chunk += '\n'
1814
1823
1815 match = self._diff._header_re.match(chunk)
1824 match = self._diff._header_re.match(chunk)
1816 self.header = match.groupdict()
1825 self.header = match.groupdict()
1817 self.diff = chunk[match.end():]
1826 self.diff = chunk[match.end():]
1818 self.raw = chunk
1827 self.raw = chunk
1819
1828
1820
1829
1821 class BasePathPermissionChecker(object):
1830 class BasePathPermissionChecker(object):
1822
1831
1823 @staticmethod
1832 @staticmethod
1824 def create_from_patterns(includes, excludes):
1833 def create_from_patterns(includes, excludes):
1825 if includes and '*' in includes and not excludes:
1834 if includes and '*' in includes and not excludes:
1826 return AllPathPermissionChecker()
1835 return AllPathPermissionChecker()
1827 elif excludes and '*' in excludes:
1836 elif excludes and '*' in excludes:
1828 return NonePathPermissionChecker()
1837 return NonePathPermissionChecker()
1829 else:
1838 else:
1830 return PatternPathPermissionChecker(includes, excludes)
1839 return PatternPathPermissionChecker(includes, excludes)
1831
1840
1832 @property
1841 @property
1833 def has_full_access(self):
1842 def has_full_access(self):
1834 raise NotImplemented()
1843 raise NotImplemented()
1835
1844
1836 def has_access(self, path):
1845 def has_access(self, path):
1837 raise NotImplemented()
1846 raise NotImplemented()
1838
1847
1839
1848
1840 class AllPathPermissionChecker(BasePathPermissionChecker):
1849 class AllPathPermissionChecker(BasePathPermissionChecker):
1841
1850
1842 @property
1851 @property
1843 def has_full_access(self):
1852 def has_full_access(self):
1844 return True
1853 return True
1845
1854
1846 def has_access(self, path):
1855 def has_access(self, path):
1847 return True
1856 return True
1848
1857
1849
1858
1850 class NonePathPermissionChecker(BasePathPermissionChecker):
1859 class NonePathPermissionChecker(BasePathPermissionChecker):
1851
1860
1852 @property
1861 @property
1853 def has_full_access(self):
1862 def has_full_access(self):
1854 return False
1863 return False
1855
1864
1856 def has_access(self, path):
1865 def has_access(self, path):
1857 return False
1866 return False
1858
1867
1859
1868
1860 class PatternPathPermissionChecker(BasePathPermissionChecker):
1869 class PatternPathPermissionChecker(BasePathPermissionChecker):
1861
1870
1862 def __init__(self, includes, excludes):
1871 def __init__(self, includes, excludes):
1863 self.includes = includes
1872 self.includes = includes
1864 self.excludes = excludes
1873 self.excludes = excludes
1865 self.includes_re = [] if not includes else [
1874 self.includes_re = [] if not includes else [
1866 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1875 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1867 self.excludes_re = [] if not excludes else [
1876 self.excludes_re = [] if not excludes else [
1868 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1877 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1869
1878
1870 @property
1879 @property
1871 def has_full_access(self):
1880 def has_full_access(self):
1872 return '*' in self.includes and not self.excludes
1881 return '*' in self.includes and not self.excludes
1873
1882
1874 def has_access(self, path):
1883 def has_access(self, path):
1875 for regex in self.excludes_re:
1884 for regex in self.excludes_re:
1876 if regex.match(path):
1885 if regex.match(path):
1877 return False
1886 return False
1878 for regex in self.includes_re:
1887 for regex in self.includes_re:
1879 if regex.match(path):
1888 if regex.match(path):
1880 return True
1889 return True
1881 return False
1890 return False
@@ -1,474 +1,479 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 GIT commit module
22 GIT commit module
23 """
23 """
24
24
25 import re
25 import re
26 import stat
26 import stat
27 from itertools import chain
27 from itertools import chain
28 from StringIO import StringIO
28 from StringIO import StringIO
29
29
30 from zope.cachedescriptors.property import Lazy as LazyProperty
30 from zope.cachedescriptors.property import Lazy as LazyProperty
31
31
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 from rhodecode.lib.utils import safe_unicode, safe_str
33 from rhodecode.lib.utils import safe_unicode, safe_str
34 from rhodecode.lib.utils2 import safe_int
34 from rhodecode.lib.utils2 import safe_int
35 from rhodecode.lib.vcs.conf import settings
35 from rhodecode.lib.vcs.conf import settings
36 from rhodecode.lib.vcs.backends import base
36 from rhodecode.lib.vcs.backends import base
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 from rhodecode.lib.vcs.nodes import (
38 from rhodecode.lib.vcs.nodes import (
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 RemovedFileNodesGenerator, LargeFileNode)
41 RemovedFileNodesGenerator, LargeFileNode)
42 from rhodecode.lib.vcs.compat import configparser
42 from rhodecode.lib.vcs.compat import configparser
43
43
44
44
45 class GitCommit(base.BaseCommit):
45 class GitCommit(base.BaseCommit):
46 """
46 """
47 Represents state of the repository at single commit id.
47 Represents state of the repository at single commit id.
48 """
48 """
49
49
50 _filter_pre_load = [
50 _filter_pre_load = [
51 # done through a more complex tree walk on parents
51 # done through a more complex tree walk on parents
52 "affected_files",
52 "affected_files",
53 # done through subprocess not remote call
53 # done through subprocess not remote call
54 "children",
54 "children",
55 # done through a more complex tree walk on parents
55 # done through a more complex tree walk on parents
56 "status",
56 "status",
57 # mercurial specific property not supported here
57 # mercurial specific property not supported here
58 "_file_paths",
58 "_file_paths",
59 # mercurial specific property not supported here
59 # mercurial specific property not supported here
60 'obsolete',
60 'obsolete',
61 # mercurial specific property not supported here
61 # mercurial specific property not supported here
62 'phase',
62 'phase',
63 # mercurial specific property not supported here
63 # mercurial specific property not supported here
64 'hidden'
64 'hidden'
65 ]
65 ]
66
66
67 def __init__(self, repository, raw_id, idx, pre_load=None):
67 def __init__(self, repository, raw_id, idx, pre_load=None):
68 self.repository = repository
68 self.repository = repository
69 self._remote = repository._remote
69 self._remote = repository._remote
70 # TODO: johbo: Tweak of raw_id should not be necessary
70 # TODO: johbo: Tweak of raw_id should not be necessary
71 self.raw_id = safe_str(raw_id)
71 self.raw_id = safe_str(raw_id)
72 self.idx = idx
72 self.idx = idx
73
73
74 self._set_bulk_properties(pre_load)
74 self._set_bulk_properties(pre_load)
75
75
76 # caches
76 # caches
77 self._stat_modes = {} # stat info for paths
77 self._stat_modes = {} # stat info for paths
78 self._paths = {} # path processed with parse_tree
78 self._paths = {} # path processed with parse_tree
79 self.nodes = {}
79 self.nodes = {}
80 self._submodules = None
80 self._submodules = None
81
81
82 def _set_bulk_properties(self, pre_load):
82 def _set_bulk_properties(self, pre_load):
83
83
84 if not pre_load:
84 if not pre_load:
85 return
85 return
86 pre_load = [entry for entry in pre_load
86 pre_load = [entry for entry in pre_load
87 if entry not in self._filter_pre_load]
87 if entry not in self._filter_pre_load]
88 if not pre_load:
88 if not pre_load:
89 return
89 return
90
90
91 result = self._remote.bulk_request(self.raw_id, pre_load)
91 result = self._remote.bulk_request(self.raw_id, pre_load)
92 for attr, value in result.items():
92 for attr, value in result.items():
93 if attr in ["author", "message"]:
93 if attr in ["author", "message"]:
94 if value:
94 if value:
95 value = safe_unicode(value)
95 value = safe_unicode(value)
96 elif attr == "date":
96 elif attr == "date":
97 value = utcdate_fromtimestamp(*value)
97 value = utcdate_fromtimestamp(*value)
98 elif attr == "parents":
98 elif attr == "parents":
99 value = self._make_commits(value)
99 value = self._make_commits(value)
100 elif attr == "branch":
100 elif attr == "branch":
101 value = value[0] if value else None
101 value = value[0] if value else None
102 self.__dict__[attr] = value
102 self.__dict__[attr] = value
103
103
104 @LazyProperty
104 @LazyProperty
105 def _commit(self):
105 def _commit(self):
106 return self._remote[self.raw_id]
106 return self._remote[self.raw_id]
107
107
108 @LazyProperty
108 @LazyProperty
109 def _tree_id(self):
109 def _tree_id(self):
110 return self._remote[self._commit['tree']]['id']
110 return self._remote[self._commit['tree']]['id']
111
111
112 @LazyProperty
112 @LazyProperty
113 def id(self):
113 def id(self):
114 return self.raw_id
114 return self.raw_id
115
115
116 @LazyProperty
116 @LazyProperty
117 def short_id(self):
117 def short_id(self):
118 return self.raw_id[:12]
118 return self.raw_id[:12]
119
119
120 @LazyProperty
120 @LazyProperty
121 def message(self):
121 def message(self):
122 return safe_unicode(self._remote.message(self.id))
122 return safe_unicode(self._remote.message(self.id))
123
123
124 @LazyProperty
124 @LazyProperty
125 def committer(self):
125 def committer(self):
126 return safe_unicode(self._remote.author(self.id))
126 return safe_unicode(self._remote.author(self.id))
127
127
128 @LazyProperty
128 @LazyProperty
129 def author(self):
129 def author(self):
130 return safe_unicode(self._remote.author(self.id))
130 return safe_unicode(self._remote.author(self.id))
131
131
132 @LazyProperty
132 @LazyProperty
133 def date(self):
133 def date(self):
134 unix_ts, tz = self._remote.date(self.raw_id)
134 unix_ts, tz = self._remote.date(self.raw_id)
135 return utcdate_fromtimestamp(unix_ts, tz)
135 return utcdate_fromtimestamp(unix_ts, tz)
136
136
137 @LazyProperty
137 @LazyProperty
138 def status(self):
138 def status(self):
139 """
139 """
140 Returns modified, added, removed, deleted files for current commit
140 Returns modified, added, removed, deleted files for current commit
141 """
141 """
142 return self.changed, self.added, self.removed
142 return self.changed, self.added, self.removed
143
143
144 @LazyProperty
144 @LazyProperty
145 def tags(self):
145 def tags(self):
146 tags = [safe_unicode(name) for name,
146 tags = [safe_unicode(name) for name,
147 commit_id in self.repository.tags.iteritems()
147 commit_id in self.repository.tags.iteritems()
148 if commit_id == self.raw_id]
148 if commit_id == self.raw_id]
149 return tags
149 return tags
150
150
151 @LazyProperty
151 @LazyProperty
152 def commit_branches(self):
152 def commit_branches(self):
153 branches = []
153 branches = []
154 for name, commit_id in self.repository.branches.iteritems():
154 for name, commit_id in self.repository.branches.iteritems():
155 if commit_id == self.raw_id:
155 if commit_id == self.raw_id:
156 branches.append(name)
156 branches.append(name)
157 return branches
157 return branches
158
158
159 @LazyProperty
159 @LazyProperty
160 def branch(self):
160 def branch(self):
161 branches = self._remote.branch(self.raw_id)
161 branches = self._remote.branch(self.raw_id)
162
162
163 if branches:
163 if branches:
164 # actually commit can have multiple branches in git
164 # actually commit can have multiple branches in git
165 return safe_unicode(branches[0])
165 return safe_unicode(branches[0])
166
166
167 def _get_tree_id_for_path(self, path):
167 def _get_tree_id_for_path(self, path):
168 path = safe_str(path)
168 path = safe_str(path)
169 if path in self._paths:
169 if path in self._paths:
170 return self._paths[path]
170 return self._paths[path]
171
171
172 tree_id = self._tree_id
172 tree_id = self._tree_id
173
173
174 path = path.strip('/')
174 path = path.strip('/')
175 if path == '':
175 if path == '':
176 data = [tree_id, "tree"]
176 data = [tree_id, "tree"]
177 self._paths[''] = data
177 self._paths[''] = data
178 return data
178 return data
179
179
180 tree_id, tree_type, tree_mode = \
180 tree_id, tree_type, tree_mode = \
181 self._remote.tree_and_type_for_path(self.raw_id, path)
181 self._remote.tree_and_type_for_path(self.raw_id, path)
182 if tree_id is None:
182 if tree_id is None:
183 raise self.no_node_at_path(path)
183 raise self.no_node_at_path(path)
184
184
185 self._paths[path] = [tree_id, tree_type]
185 self._paths[path] = [tree_id, tree_type]
186 self._stat_modes[path] = tree_mode
186 self._stat_modes[path] = tree_mode
187
187
188 if path not in self._paths:
188 if path not in self._paths:
189 raise self.no_node_at_path(path)
189 raise self.no_node_at_path(path)
190
190
191 return self._paths[path]
191 return self._paths[path]
192
192
193 def _get_kind(self, path):
193 def _get_kind(self, path):
194 tree_id, type_ = self._get_tree_id_for_path(path)
194 tree_id, type_ = self._get_tree_id_for_path(path)
195 if type_ == 'blob':
195 if type_ == 'blob':
196 return NodeKind.FILE
196 return NodeKind.FILE
197 elif type_ == 'tree':
197 elif type_ == 'tree':
198 return NodeKind.DIR
198 return NodeKind.DIR
199 elif type_ == 'link':
199 elif type_ == 'link':
200 return NodeKind.SUBMODULE
200 return NodeKind.SUBMODULE
201 return None
201 return None
202
202
203 def _get_filectx(self, path):
203 def _get_filectx(self, path):
204 path = self._fix_path(path)
204 path = self._fix_path(path)
205 if self._get_kind(path) != NodeKind.FILE:
205 if self._get_kind(path) != NodeKind.FILE:
206 raise CommitError(
206 raise CommitError(
207 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
207 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
208 return path
208 return path
209
209
210 def _get_file_nodes(self):
210 def _get_file_nodes(self):
211 return chain(*(t[2] for t in self.walk()))
211 return chain(*(t[2] for t in self.walk()))
212
212
213 @LazyProperty
213 @LazyProperty
214 def parents(self):
214 def parents(self):
215 """
215 """
216 Returns list of parent commits.
216 Returns list of parent commits.
217 """
217 """
218 parent_ids = self._remote.parents(self.id)
218 parent_ids = self._remote.parents(self.id)
219 return self._make_commits(parent_ids)
219 return self._make_commits(parent_ids)
220
220
221 @LazyProperty
221 @LazyProperty
222 def children(self):
222 def children(self):
223 """
223 """
224 Returns list of child commits.
224 Returns list of child commits.
225 """
225 """
226
226
227 children = self._remote.children(self.raw_id)
227 children = self._remote.children(self.raw_id)
228 return self._make_commits(children)
228 return self._make_commits(children)
229
229
230 def _make_commits(self, commit_ids):
230 def _make_commits(self, commit_ids):
231 def commit_maker(_commit_id):
231 def commit_maker(_commit_id):
232 return self.repository.get_commit(commit_id=commit_id)
232 return self.repository.get_commit(commit_id=commit_id)
233
233
234 return [commit_maker(commit_id) for commit_id in commit_ids]
234 return [commit_maker(commit_id) for commit_id in commit_ids]
235
235
236 def get_file_mode(self, path):
236 def get_file_mode(self, path):
237 """
237 """
238 Returns stat mode of the file at the given `path`.
238 Returns stat mode of the file at the given `path`.
239 """
239 """
240 path = safe_str(path)
240 path = safe_str(path)
241 # ensure path is traversed
241 # ensure path is traversed
242 self._get_tree_id_for_path(path)
242 self._get_tree_id_for_path(path)
243 return self._stat_modes[path]
243 return self._stat_modes[path]
244
244
245 def is_link(self, path):
245 def is_link(self, path):
246 return stat.S_ISLNK(self.get_file_mode(path))
246 return stat.S_ISLNK(self.get_file_mode(path))
247
247
248 def get_file_content(self, path):
248 def get_file_content(self, path):
249 """
249 """
250 Returns content of the file at given `path`.
250 Returns content of the file at given `path`.
251 """
251 """
252 tree_id, _ = self._get_tree_id_for_path(path)
252 tree_id, _ = self._get_tree_id_for_path(path)
253 return self._remote.blob_as_pretty_string(tree_id)
253 return self._remote.blob_as_pretty_string(tree_id)
254
254
255 def get_file_content_streamed(self, path):
256 tree_id, _ = self._get_tree_id_for_path(path)
257 stream_method = getattr(self._remote, 'stream:blob_as_pretty_string')
258 return stream_method(tree_id)
259
255 def get_file_size(self, path):
260 def get_file_size(self, path):
256 """
261 """
257 Returns size of the file at given `path`.
262 Returns size of the file at given `path`.
258 """
263 """
259 tree_id, _ = self._get_tree_id_for_path(path)
264 tree_id, _ = self._get_tree_id_for_path(path)
260 return self._remote.blob_raw_length(tree_id)
265 return self._remote.blob_raw_length(tree_id)
261
266
262 def get_path_history(self, path, limit=None, pre_load=None):
267 def get_path_history(self, path, limit=None, pre_load=None):
263 """
268 """
264 Returns history of file as reversed list of `GitCommit` objects for
269 Returns history of file as reversed list of `GitCommit` objects for
265 which file at given `path` has been modified.
270 which file at given `path` has been modified.
266 """
271 """
267
272
268 path = self._get_filectx(path)
273 path = self._get_filectx(path)
269 hist = self._remote.node_history(self.raw_id, path, limit)
274 hist = self._remote.node_history(self.raw_id, path, limit)
270 return [
275 return [
271 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
276 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
272 for commit_id in hist]
277 for commit_id in hist]
273
278
274 def get_file_annotate(self, path, pre_load=None):
279 def get_file_annotate(self, path, pre_load=None):
275 """
280 """
276 Returns a generator of four element tuples with
281 Returns a generator of four element tuples with
277 lineno, commit_id, commit lazy loader and line
282 lineno, commit_id, commit lazy loader and line
278 """
283 """
279
284
280 result = self._remote.node_annotate(self.raw_id, path)
285 result = self._remote.node_annotate(self.raw_id, path)
281
286
282 for ln_no, commit_id, content in result:
287 for ln_no, commit_id, content in result:
283 yield (
288 yield (
284 ln_no, commit_id,
289 ln_no, commit_id,
285 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
290 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
286 content)
291 content)
287
292
288 def get_nodes(self, path):
293 def get_nodes(self, path):
289
294
290 if self._get_kind(path) != NodeKind.DIR:
295 if self._get_kind(path) != NodeKind.DIR:
291 raise CommitError(
296 raise CommitError(
292 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
297 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
293 path = self._fix_path(path)
298 path = self._fix_path(path)
294
299
295 tree_id, _ = self._get_tree_id_for_path(path)
300 tree_id, _ = self._get_tree_id_for_path(path)
296
301
297 dirnodes = []
302 dirnodes = []
298 filenodes = []
303 filenodes = []
299
304
300 # extracted tree ID gives us our files...
305 # extracted tree ID gives us our files...
301 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
306 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
302 if type_ == 'link':
307 if type_ == 'link':
303 url = self._get_submodule_url('/'.join((path, name)))
308 url = self._get_submodule_url('/'.join((path, name)))
304 dirnodes.append(SubModuleNode(
309 dirnodes.append(SubModuleNode(
305 name, url=url, commit=id_, alias=self.repository.alias))
310 name, url=url, commit=id_, alias=self.repository.alias))
306 continue
311 continue
307
312
308 if path != '':
313 if path != '':
309 obj_path = '/'.join((path, name))
314 obj_path = '/'.join((path, name))
310 else:
315 else:
311 obj_path = name
316 obj_path = name
312 if obj_path not in self._stat_modes:
317 if obj_path not in self._stat_modes:
313 self._stat_modes[obj_path] = stat_
318 self._stat_modes[obj_path] = stat_
314
319
315 if type_ == 'tree':
320 if type_ == 'tree':
316 dirnodes.append(DirNode(obj_path, commit=self))
321 dirnodes.append(DirNode(obj_path, commit=self))
317 elif type_ == 'blob':
322 elif type_ == 'blob':
318 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
323 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
319 else:
324 else:
320 raise CommitError(
325 raise CommitError(
321 "Requested object should be Tree or Blob, is %s", type_)
326 "Requested object should be Tree or Blob, is %s", type_)
322
327
323 nodes = dirnodes + filenodes
328 nodes = dirnodes + filenodes
324 for node in nodes:
329 for node in nodes:
325 if node.path not in self.nodes:
330 if node.path not in self.nodes:
326 self.nodes[node.path] = node
331 self.nodes[node.path] = node
327 nodes.sort()
332 nodes.sort()
328 return nodes
333 return nodes
329
334
330 def get_node(self, path, pre_load=None):
335 def get_node(self, path, pre_load=None):
331 if isinstance(path, unicode):
336 if isinstance(path, unicode):
332 path = path.encode('utf-8')
337 path = path.encode('utf-8')
333 path = self._fix_path(path)
338 path = self._fix_path(path)
334 if path not in self.nodes:
339 if path not in self.nodes:
335 try:
340 try:
336 tree_id, type_ = self._get_tree_id_for_path(path)
341 tree_id, type_ = self._get_tree_id_for_path(path)
337 except CommitError:
342 except CommitError:
338 raise NodeDoesNotExistError(
343 raise NodeDoesNotExistError(
339 "Cannot find one of parents' directories for a given "
344 "Cannot find one of parents' directories for a given "
340 "path: %s" % path)
345 "path: %s" % path)
341
346
342 if type_ == 'link':
347 if type_ == 'link':
343 url = self._get_submodule_url(path)
348 url = self._get_submodule_url(path)
344 node = SubModuleNode(path, url=url, commit=tree_id,
349 node = SubModuleNode(path, url=url, commit=tree_id,
345 alias=self.repository.alias)
350 alias=self.repository.alias)
346 elif type_ == 'tree':
351 elif type_ == 'tree':
347 if path == '':
352 if path == '':
348 node = RootNode(commit=self)
353 node = RootNode(commit=self)
349 else:
354 else:
350 node = DirNode(path, commit=self)
355 node = DirNode(path, commit=self)
351 elif type_ == 'blob':
356 elif type_ == 'blob':
352 node = FileNode(path, commit=self, pre_load=pre_load)
357 node = FileNode(path, commit=self, pre_load=pre_load)
353 self._stat_modes[path] = node.mode
358 self._stat_modes[path] = node.mode
354 else:
359 else:
355 raise self.no_node_at_path(path)
360 raise self.no_node_at_path(path)
356
361
357 # cache node
362 # cache node
358 self.nodes[path] = node
363 self.nodes[path] = node
359
364
360 return self.nodes[path]
365 return self.nodes[path]
361
366
362 def get_largefile_node(self, path):
367 def get_largefile_node(self, path):
363 tree_id, _ = self._get_tree_id_for_path(path)
368 tree_id, _ = self._get_tree_id_for_path(path)
364 pointer_spec = self._remote.is_large_file(tree_id)
369 pointer_spec = self._remote.is_large_file(tree_id)
365
370
366 if pointer_spec:
371 if pointer_spec:
367 # content of that file regular FileNode is the hash of largefile
372 # content of that file regular FileNode is the hash of largefile
368 file_id = pointer_spec.get('oid_hash')
373 file_id = pointer_spec.get('oid_hash')
369 if self._remote.in_largefiles_store(file_id):
374 if self._remote.in_largefiles_store(file_id):
370 lf_path = self._remote.store_path(file_id)
375 lf_path = self._remote.store_path(file_id)
371 return LargeFileNode(lf_path, commit=self, org_path=path)
376 return LargeFileNode(lf_path, commit=self, org_path=path)
372
377
373 @LazyProperty
378 @LazyProperty
374 def affected_files(self):
379 def affected_files(self):
375 """
380 """
376 Gets a fast accessible file changes for given commit
381 Gets a fast accessible file changes for given commit
377 """
382 """
378 added, modified, deleted = self._changes_cache
383 added, modified, deleted = self._changes_cache
379 return list(added.union(modified).union(deleted))
384 return list(added.union(modified).union(deleted))
380
385
381 @LazyProperty
386 @LazyProperty
382 def _changes_cache(self):
387 def _changes_cache(self):
383 added = set()
388 added = set()
384 modified = set()
389 modified = set()
385 deleted = set()
390 deleted = set()
386 _r = self._remote
391 _r = self._remote
387
392
388 parents = self.parents
393 parents = self.parents
389 if not self.parents:
394 if not self.parents:
390 parents = [base.EmptyCommit()]
395 parents = [base.EmptyCommit()]
391 for parent in parents:
396 for parent in parents:
392 if isinstance(parent, base.EmptyCommit):
397 if isinstance(parent, base.EmptyCommit):
393 oid = None
398 oid = None
394 else:
399 else:
395 oid = parent.raw_id
400 oid = parent.raw_id
396 changes = _r.tree_changes(oid, self.raw_id)
401 changes = _r.tree_changes(oid, self.raw_id)
397 for (oldpath, newpath), (_, _), (_, _) in changes:
402 for (oldpath, newpath), (_, _), (_, _) in changes:
398 if newpath and oldpath:
403 if newpath and oldpath:
399 modified.add(newpath)
404 modified.add(newpath)
400 elif newpath and not oldpath:
405 elif newpath and not oldpath:
401 added.add(newpath)
406 added.add(newpath)
402 elif not newpath and oldpath:
407 elif not newpath and oldpath:
403 deleted.add(oldpath)
408 deleted.add(oldpath)
404 return added, modified, deleted
409 return added, modified, deleted
405
410
406 def _get_paths_for_status(self, status):
411 def _get_paths_for_status(self, status):
407 """
412 """
408 Returns sorted list of paths for given ``status``.
413 Returns sorted list of paths for given ``status``.
409
414
410 :param status: one of: *added*, *modified* or *deleted*
415 :param status: one of: *added*, *modified* or *deleted*
411 """
416 """
412 added, modified, deleted = self._changes_cache
417 added, modified, deleted = self._changes_cache
413 return sorted({
418 return sorted({
414 'added': list(added),
419 'added': list(added),
415 'modified': list(modified),
420 'modified': list(modified),
416 'deleted': list(deleted)}[status]
421 'deleted': list(deleted)}[status]
417 )
422 )
418
423
419 @LazyProperty
424 @LazyProperty
420 def added(self):
425 def added(self):
421 """
426 """
422 Returns list of added ``FileNode`` objects.
427 Returns list of added ``FileNode`` objects.
423 """
428 """
424 if not self.parents:
429 if not self.parents:
425 return list(self._get_file_nodes())
430 return list(self._get_file_nodes())
426 return AddedFileNodesGenerator(
431 return AddedFileNodesGenerator(
427 [n for n in self._get_paths_for_status('added')], self)
432 [n for n in self._get_paths_for_status('added')], self)
428
433
429 @LazyProperty
434 @LazyProperty
430 def changed(self):
435 def changed(self):
431 """
436 """
432 Returns list of modified ``FileNode`` objects.
437 Returns list of modified ``FileNode`` objects.
433 """
438 """
434 if not self.parents:
439 if not self.parents:
435 return []
440 return []
436 return ChangedFileNodesGenerator(
441 return ChangedFileNodesGenerator(
437 [n for n in self._get_paths_for_status('modified')], self)
442 [n for n in self._get_paths_for_status('modified')], self)
438
443
439 @LazyProperty
444 @LazyProperty
440 def removed(self):
445 def removed(self):
441 """
446 """
442 Returns list of removed ``FileNode`` objects.
447 Returns list of removed ``FileNode`` objects.
443 """
448 """
444 if not self.parents:
449 if not self.parents:
445 return []
450 return []
446 return RemovedFileNodesGenerator(
451 return RemovedFileNodesGenerator(
447 [n for n in self._get_paths_for_status('deleted')], self)
452 [n for n in self._get_paths_for_status('deleted')], self)
448
453
449 def _get_submodule_url(self, submodule_path):
454 def _get_submodule_url(self, submodule_path):
450 git_modules_path = '.gitmodules'
455 git_modules_path = '.gitmodules'
451
456
452 if self._submodules is None:
457 if self._submodules is None:
453 self._submodules = {}
458 self._submodules = {}
454
459
455 try:
460 try:
456 submodules_node = self.get_node(git_modules_path)
461 submodules_node = self.get_node(git_modules_path)
457 except NodeDoesNotExistError:
462 except NodeDoesNotExistError:
458 return None
463 return None
459
464
460 content = submodules_node.content
465 content = submodules_node.content
461
466
462 # ConfigParser fails if there are whitespaces
467 # ConfigParser fails if there are whitespaces
463 content = '\n'.join(l.strip() for l in content.split('\n'))
468 content = '\n'.join(l.strip() for l in content.split('\n'))
464
469
465 parser = configparser.ConfigParser()
470 parser = configparser.ConfigParser()
466 parser.readfp(StringIO(content))
471 parser.readfp(StringIO(content))
467
472
468 for section in parser.sections():
473 for section in parser.sections():
469 path = parser.get(section, 'path')
474 path = parser.get(section, 'path')
470 url = parser.get(section, 'url')
475 url = parser.get(section, 'url')
471 if path and url:
476 if path and url:
472 self._submodules[path.strip('/')] = url
477 self._submodules[path.strip('/')] = url
473
478
474 return self._submodules.get(submodule_path.strip('/'))
479 return self._submodules.get(submodule_path.strip('/'))
@@ -1,380 +1,385 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 HG commit module
22 HG commit module
23 """
23 """
24
24
25 import os
25 import os
26
26
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
29 from rhodecode.lib.datelib import utcdate_fromtimestamp
30 from rhodecode.lib.utils import safe_str, safe_unicode
30 from rhodecode.lib.utils import safe_str, safe_unicode
31 from rhodecode.lib.vcs import path as vcspath
31 from rhodecode.lib.vcs import path as vcspath
32 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.backends import base
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
34 from rhodecode.lib.vcs.exceptions import CommitError
34 from rhodecode.lib.vcs.exceptions import CommitError
35 from rhodecode.lib.vcs.nodes import (
35 from rhodecode.lib.vcs.nodes import (
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
38 LargeFileNode, LARGEFILE_PREFIX)
38 LargeFileNode, LARGEFILE_PREFIX)
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
40
40
41
41
42 class MercurialCommit(base.BaseCommit):
42 class MercurialCommit(base.BaseCommit):
43 """
43 """
44 Represents state of the repository at the single commit.
44 Represents state of the repository at the single commit.
45 """
45 """
46
46
47 _filter_pre_load = [
47 _filter_pre_load = [
48 # git specific property not supported here
48 # git specific property not supported here
49 "_commit",
49 "_commit",
50 ]
50 ]
51
51
52 def __init__(self, repository, raw_id, idx, pre_load=None):
52 def __init__(self, repository, raw_id, idx, pre_load=None):
53 raw_id = safe_str(raw_id)
53 raw_id = safe_str(raw_id)
54
54
55 self.repository = repository
55 self.repository = repository
56 self._remote = repository._remote
56 self._remote = repository._remote
57
57
58 self.raw_id = raw_id
58 self.raw_id = raw_id
59 self.idx = idx
59 self.idx = idx
60
60
61 self._set_bulk_properties(pre_load)
61 self._set_bulk_properties(pre_load)
62
62
63 # caches
63 # caches
64 self.nodes = {}
64 self.nodes = {}
65
65
66 def _set_bulk_properties(self, pre_load):
66 def _set_bulk_properties(self, pre_load):
67 if not pre_load:
67 if not pre_load:
68 return
68 return
69 pre_load = [entry for entry in pre_load
69 pre_load = [entry for entry in pre_load
70 if entry not in self._filter_pre_load]
70 if entry not in self._filter_pre_load]
71 if not pre_load:
71 if not pre_load:
72 return
72 return
73
73
74 result = self._remote.bulk_request(self.raw_id, pre_load)
74 result = self._remote.bulk_request(self.raw_id, pre_load)
75 for attr, value in result.items():
75 for attr, value in result.items():
76 if attr in ["author", "branch", "message"]:
76 if attr in ["author", "branch", "message"]:
77 value = safe_unicode(value)
77 value = safe_unicode(value)
78 elif attr == "affected_files":
78 elif attr == "affected_files":
79 value = map(safe_unicode, value)
79 value = map(safe_unicode, value)
80 elif attr == "date":
80 elif attr == "date":
81 value = utcdate_fromtimestamp(*value)
81 value = utcdate_fromtimestamp(*value)
82 elif attr in ["children", "parents"]:
82 elif attr in ["children", "parents"]:
83 value = self._make_commits(value)
83 value = self._make_commits(value)
84 elif attr in ["phase"]:
84 elif attr in ["phase"]:
85 value = self._get_phase_text(value)
85 value = self._get_phase_text(value)
86 self.__dict__[attr] = value
86 self.__dict__[attr] = value
87
87
88 @LazyProperty
88 @LazyProperty
89 def tags(self):
89 def tags(self):
90 tags = [name for name, commit_id in self.repository.tags.iteritems()
90 tags = [name for name, commit_id in self.repository.tags.iteritems()
91 if commit_id == self.raw_id]
91 if commit_id == self.raw_id]
92 return tags
92 return tags
93
93
94 @LazyProperty
94 @LazyProperty
95 def branch(self):
95 def branch(self):
96 return safe_unicode(self._remote.ctx_branch(self.raw_id))
96 return safe_unicode(self._remote.ctx_branch(self.raw_id))
97
97
98 @LazyProperty
98 @LazyProperty
99 def bookmarks(self):
99 def bookmarks(self):
100 bookmarks = [
100 bookmarks = [
101 name for name, commit_id in self.repository.bookmarks.iteritems()
101 name for name, commit_id in self.repository.bookmarks.iteritems()
102 if commit_id == self.raw_id]
102 if commit_id == self.raw_id]
103 return bookmarks
103 return bookmarks
104
104
105 @LazyProperty
105 @LazyProperty
106 def message(self):
106 def message(self):
107 return safe_unicode(self._remote.ctx_description(self.raw_id))
107 return safe_unicode(self._remote.ctx_description(self.raw_id))
108
108
109 @LazyProperty
109 @LazyProperty
110 def committer(self):
110 def committer(self):
111 return safe_unicode(self.author)
111 return safe_unicode(self.author)
112
112
113 @LazyProperty
113 @LazyProperty
114 def author(self):
114 def author(self):
115 return safe_unicode(self._remote.ctx_user(self.raw_id))
115 return safe_unicode(self._remote.ctx_user(self.raw_id))
116
116
117 @LazyProperty
117 @LazyProperty
118 def date(self):
118 def date(self):
119 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
119 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
120
120
121 @LazyProperty
121 @LazyProperty
122 def status(self):
122 def status(self):
123 """
123 """
124 Returns modified, added, removed, deleted files for current commit
124 Returns modified, added, removed, deleted files for current commit
125 """
125 """
126 return self._remote.ctx_status(self.raw_id)
126 return self._remote.ctx_status(self.raw_id)
127
127
128 @LazyProperty
128 @LazyProperty
129 def _file_paths(self):
129 def _file_paths(self):
130 return self._remote.ctx_list(self.raw_id)
130 return self._remote.ctx_list(self.raw_id)
131
131
132 @LazyProperty
132 @LazyProperty
133 def _dir_paths(self):
133 def _dir_paths(self):
134 p = list(set(get_dirs_for_path(*self._file_paths)))
134 p = list(set(get_dirs_for_path(*self._file_paths)))
135 p.insert(0, '')
135 p.insert(0, '')
136 return p
136 return p
137
137
138 @LazyProperty
138 @LazyProperty
139 def _paths(self):
139 def _paths(self):
140 return self._dir_paths + self._file_paths
140 return self._dir_paths + self._file_paths
141
141
142 @LazyProperty
142 @LazyProperty
143 def id(self):
143 def id(self):
144 if self.last:
144 if self.last:
145 return u'tip'
145 return u'tip'
146 return self.short_id
146 return self.short_id
147
147
148 @LazyProperty
148 @LazyProperty
149 def short_id(self):
149 def short_id(self):
150 return self.raw_id[:12]
150 return self.raw_id[:12]
151
151
152 def _make_commits(self, indexes, pre_load=None):
152 def _make_commits(self, indexes, pre_load=None):
153 return [self.repository.get_commit(commit_idx=idx, pre_load=pre_load)
153 return [self.repository.get_commit(commit_idx=idx, pre_load=pre_load)
154 for idx in indexes if idx >= 0]
154 for idx in indexes if idx >= 0]
155
155
156 @LazyProperty
156 @LazyProperty
157 def parents(self):
157 def parents(self):
158 """
158 """
159 Returns list of parent commits.
159 Returns list of parent commits.
160 """
160 """
161 parents = self._remote.ctx_parents(self.raw_id)
161 parents = self._remote.ctx_parents(self.raw_id)
162 return self._make_commits(parents)
162 return self._make_commits(parents)
163
163
164 def _get_phase_text(self, phase_id):
164 def _get_phase_text(self, phase_id):
165 return {
165 return {
166 0: 'public',
166 0: 'public',
167 1: 'draft',
167 1: 'draft',
168 2: 'secret',
168 2: 'secret',
169 }.get(phase_id) or ''
169 }.get(phase_id) or ''
170
170
171 @LazyProperty
171 @LazyProperty
172 def phase(self):
172 def phase(self):
173 phase_id = self._remote.ctx_phase(self.raw_id)
173 phase_id = self._remote.ctx_phase(self.raw_id)
174 phase_text = self._get_phase_text(phase_id)
174 phase_text = self._get_phase_text(phase_id)
175
175
176 return safe_unicode(phase_text)
176 return safe_unicode(phase_text)
177
177
178 @LazyProperty
178 @LazyProperty
179 def obsolete(self):
179 def obsolete(self):
180 obsolete = self._remote.ctx_obsolete(self.raw_id)
180 obsolete = self._remote.ctx_obsolete(self.raw_id)
181 return obsolete
181 return obsolete
182
182
183 @LazyProperty
183 @LazyProperty
184 def hidden(self):
184 def hidden(self):
185 hidden = self._remote.ctx_hidden(self.raw_id)
185 hidden = self._remote.ctx_hidden(self.raw_id)
186 return hidden
186 return hidden
187
187
188 @LazyProperty
188 @LazyProperty
189 def children(self):
189 def children(self):
190 """
190 """
191 Returns list of child commits.
191 Returns list of child commits.
192 """
192 """
193 children = self._remote.ctx_children(self.raw_id)
193 children = self._remote.ctx_children(self.raw_id)
194 return self._make_commits(children)
194 return self._make_commits(children)
195
195
196 def _fix_path(self, path):
196 def _fix_path(self, path):
197 """
197 """
198 Mercurial keeps filenodes as str so we need to encode from unicode
198 Mercurial keeps filenodes as str so we need to encode from unicode
199 to str.
199 to str.
200 """
200 """
201 return safe_str(super(MercurialCommit, self)._fix_path(path))
201 return safe_str(super(MercurialCommit, self)._fix_path(path))
202
202
203 def _get_kind(self, path):
203 def _get_kind(self, path):
204 path = self._fix_path(path)
204 path = self._fix_path(path)
205 if path in self._file_paths:
205 if path in self._file_paths:
206 return NodeKind.FILE
206 return NodeKind.FILE
207 elif path in self._dir_paths:
207 elif path in self._dir_paths:
208 return NodeKind.DIR
208 return NodeKind.DIR
209 else:
209 else:
210 raise CommitError(
210 raise CommitError(
211 "Node does not exist at the given path '%s'" % (path, ))
211 "Node does not exist at the given path '%s'" % (path, ))
212
212
213 def _get_filectx(self, path):
213 def _get_filectx(self, path):
214 path = self._fix_path(path)
214 path = self._fix_path(path)
215 if self._get_kind(path) != NodeKind.FILE:
215 if self._get_kind(path) != NodeKind.FILE:
216 raise CommitError(
216 raise CommitError(
217 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
217 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
218 return path
218 return path
219
219
220 def get_file_mode(self, path):
220 def get_file_mode(self, path):
221 """
221 """
222 Returns stat mode of the file at the given ``path``.
222 Returns stat mode of the file at the given ``path``.
223 """
223 """
224 path = self._get_filectx(path)
224 path = self._get_filectx(path)
225 if 'x' in self._remote.fctx_flags(self.raw_id, path):
225 if 'x' in self._remote.fctx_flags(self.raw_id, path):
226 return base.FILEMODE_EXECUTABLE
226 return base.FILEMODE_EXECUTABLE
227 else:
227 else:
228 return base.FILEMODE_DEFAULT
228 return base.FILEMODE_DEFAULT
229
229
230 def is_link(self, path):
230 def is_link(self, path):
231 path = self._get_filectx(path)
231 path = self._get_filectx(path)
232 return 'l' in self._remote.fctx_flags(self.raw_id, path)
232 return 'l' in self._remote.fctx_flags(self.raw_id, path)
233
233
234 def get_file_content(self, path):
234 def get_file_content(self, path):
235 """
235 """
236 Returns content of the file at given ``path``.
236 Returns content of the file at given ``path``.
237 """
237 """
238 path = self._get_filectx(path)
238 path = self._get_filectx(path)
239 return self._remote.fctx_node_data(self.raw_id, path)
239 return self._remote.fctx_node_data(self.raw_id, path)
240
240
241 def get_file_content_streamed(self, path):
242 path = self._get_filectx(path)
243 stream_method = getattr(self._remote, 'stream:fctx_node_data')
244 return stream_method(self.raw_id, path)
245
241 def get_file_size(self, path):
246 def get_file_size(self, path):
242 """
247 """
243 Returns size of the file at given ``path``.
248 Returns size of the file at given ``path``.
244 """
249 """
245 path = self._get_filectx(path)
250 path = self._get_filectx(path)
246 return self._remote.fctx_size(self.raw_id, path)
251 return self._remote.fctx_size(self.raw_id, path)
247
252
248 def get_path_history(self, path, limit=None, pre_load=None):
253 def get_path_history(self, path, limit=None, pre_load=None):
249 """
254 """
250 Returns history of file as reversed list of `MercurialCommit` objects
255 Returns history of file as reversed list of `MercurialCommit` objects
251 for which file at given ``path`` has been modified.
256 for which file at given ``path`` has been modified.
252 """
257 """
253 path = self._get_filectx(path)
258 path = self._get_filectx(path)
254 hist = self._remote.node_history(self.raw_id, path, limit)
259 hist = self._remote.node_history(self.raw_id, path, limit)
255 return [
260 return [
256 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
261 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
257 for commit_id in hist]
262 for commit_id in hist]
258
263
259 def get_file_annotate(self, path, pre_load=None):
264 def get_file_annotate(self, path, pre_load=None):
260 """
265 """
261 Returns a generator of four element tuples with
266 Returns a generator of four element tuples with
262 lineno, commit_id, commit lazy loader and line
267 lineno, commit_id, commit lazy loader and line
263 """
268 """
264 result = self._remote.fctx_annotate(self.raw_id, path)
269 result = self._remote.fctx_annotate(self.raw_id, path)
265
270
266 for ln_no, commit_id, content in result:
271 for ln_no, commit_id, content in result:
267 yield (
272 yield (
268 ln_no, commit_id,
273 ln_no, commit_id,
269 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
274 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
270 content)
275 content)
271
276
272 def get_nodes(self, path):
277 def get_nodes(self, path):
273 """
278 """
274 Returns combined ``DirNode`` and ``FileNode`` objects list representing
279 Returns combined ``DirNode`` and ``FileNode`` objects list representing
275 state of commit at the given ``path``. If node at the given ``path``
280 state of commit at the given ``path``. If node at the given ``path``
276 is not instance of ``DirNode``, CommitError would be raised.
281 is not instance of ``DirNode``, CommitError would be raised.
277 """
282 """
278
283
279 if self._get_kind(path) != NodeKind.DIR:
284 if self._get_kind(path) != NodeKind.DIR:
280 raise CommitError(
285 raise CommitError(
281 "Directory does not exist for idx %s at '%s'" % (self.raw_id, path))
286 "Directory does not exist for idx %s at '%s'" % (self.raw_id, path))
282 path = self._fix_path(path)
287 path = self._fix_path(path)
283
288
284 filenodes = [
289 filenodes = [
285 FileNode(f, commit=self) for f in self._file_paths
290 FileNode(f, commit=self) for f in self._file_paths
286 if os.path.dirname(f) == path]
291 if os.path.dirname(f) == path]
287 # TODO: johbo: Check if this can be done in a more obvious way
292 # TODO: johbo: Check if this can be done in a more obvious way
288 dirs = path == '' and '' or [
293 dirs = path == '' and '' or [
289 d for d in self._dir_paths
294 d for d in self._dir_paths
290 if d and vcspath.dirname(d) == path]
295 if d and vcspath.dirname(d) == path]
291 dirnodes = [
296 dirnodes = [
292 DirNode(d, commit=self) for d in dirs
297 DirNode(d, commit=self) for d in dirs
293 if os.path.dirname(d) == path]
298 if os.path.dirname(d) == path]
294
299
295 alias = self.repository.alias
300 alias = self.repository.alias
296 for k, vals in self._submodules.iteritems():
301 for k, vals in self._submodules.iteritems():
297 if vcspath.dirname(k) == path:
302 if vcspath.dirname(k) == path:
298 loc = vals[0]
303 loc = vals[0]
299 commit = vals[1]
304 commit = vals[1]
300 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
305 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
301
306
302 nodes = dirnodes + filenodes
307 nodes = dirnodes + filenodes
303 for node in nodes:
308 for node in nodes:
304 if node.path not in self.nodes:
309 if node.path not in self.nodes:
305 self.nodes[node.path] = node
310 self.nodes[node.path] = node
306 nodes.sort()
311 nodes.sort()
307
312
308 return nodes
313 return nodes
309
314
310 def get_node(self, path, pre_load=None):
315 def get_node(self, path, pre_load=None):
311 """
316 """
312 Returns `Node` object from the given `path`. If there is no node at
317 Returns `Node` object from the given `path`. If there is no node at
313 the given `path`, `NodeDoesNotExistError` would be raised.
318 the given `path`, `NodeDoesNotExistError` would be raised.
314 """
319 """
315 path = self._fix_path(path)
320 path = self._fix_path(path)
316
321
317 if path not in self.nodes:
322 if path not in self.nodes:
318 if path in self._file_paths:
323 if path in self._file_paths:
319 node = FileNode(path, commit=self, pre_load=pre_load)
324 node = FileNode(path, commit=self, pre_load=pre_load)
320 elif path in self._dir_paths:
325 elif path in self._dir_paths:
321 if path == '':
326 if path == '':
322 node = RootNode(commit=self)
327 node = RootNode(commit=self)
323 else:
328 else:
324 node = DirNode(path, commit=self)
329 node = DirNode(path, commit=self)
325 else:
330 else:
326 raise self.no_node_at_path(path)
331 raise self.no_node_at_path(path)
327
332
328 # cache node
333 # cache node
329 self.nodes[path] = node
334 self.nodes[path] = node
330 return self.nodes[path]
335 return self.nodes[path]
331
336
332 def get_largefile_node(self, path):
337 def get_largefile_node(self, path):
333 pointer_spec = self._remote.is_large_file(path)
338 pointer_spec = self._remote.is_large_file(path)
334 if pointer_spec:
339 if pointer_spec:
335 # content of that file regular FileNode is the hash of largefile
340 # content of that file regular FileNode is the hash of largefile
336 file_id = self.get_file_content(path).strip()
341 file_id = self.get_file_content(path).strip()
337
342
338 if self._remote.in_largefiles_store(file_id):
343 if self._remote.in_largefiles_store(file_id):
339 lf_path = self._remote.store_path(file_id)
344 lf_path = self._remote.store_path(file_id)
340 return LargeFileNode(lf_path, commit=self, org_path=path)
345 return LargeFileNode(lf_path, commit=self, org_path=path)
341 elif self._remote.in_user_cache(file_id):
346 elif self._remote.in_user_cache(file_id):
342 lf_path = self._remote.store_path(file_id)
347 lf_path = self._remote.store_path(file_id)
343 self._remote.link(file_id, path)
348 self._remote.link(file_id, path)
344 return LargeFileNode(lf_path, commit=self, org_path=path)
349 return LargeFileNode(lf_path, commit=self, org_path=path)
345
350
346 @LazyProperty
351 @LazyProperty
347 def _submodules(self):
352 def _submodules(self):
348 """
353 """
349 Returns a dictionary with submodule information from substate file
354 Returns a dictionary with submodule information from substate file
350 of hg repository.
355 of hg repository.
351 """
356 """
352 return self._remote.ctx_substate(self.raw_id)
357 return self._remote.ctx_substate(self.raw_id)
353
358
354 @LazyProperty
359 @LazyProperty
355 def affected_files(self):
360 def affected_files(self):
356 """
361 """
357 Gets a fast accessible file changes for given commit
362 Gets a fast accessible file changes for given commit
358 """
363 """
359 return self._remote.ctx_files(self.raw_id)
364 return self._remote.ctx_files(self.raw_id)
360
365
361 @property
366 @property
362 def added(self):
367 def added(self):
363 """
368 """
364 Returns list of added ``FileNode`` objects.
369 Returns list of added ``FileNode`` objects.
365 """
370 """
366 return AddedFileNodesGenerator([n for n in self.status[1]], self)
371 return AddedFileNodesGenerator([n for n in self.status[1]], self)
367
372
368 @property
373 @property
369 def changed(self):
374 def changed(self):
370 """
375 """
371 Returns list of modified ``FileNode`` objects.
376 Returns list of modified ``FileNode`` objects.
372 """
377 """
373 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
378 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
374
379
375 @property
380 @property
376 def removed(self):
381 def removed(self):
377 """
382 """
378 Returns list of removed ``FileNode`` objects.
383 Returns list of removed ``FileNode`` objects.
379 """
384 """
380 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
385 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
@@ -1,236 +1,241 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 SVN commit module
22 SVN commit module
23 """
23 """
24
24
25
25
26 import dateutil.parser
26 import dateutil.parser
27 from zope.cachedescriptors.property import Lazy as LazyProperty
27 from zope.cachedescriptors.property import Lazy as LazyProperty
28
28
29 from rhodecode.lib.utils import safe_str, safe_unicode
29 from rhodecode.lib.utils import safe_str, safe_unicode
30 from rhodecode.lib.vcs import nodes, path as vcspath
30 from rhodecode.lib.vcs import nodes, path as vcspath
31 from rhodecode.lib.vcs.backends import base
31 from rhodecode.lib.vcs.backends import base
32 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
32 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
33
33
34
34
35 _SVN_PROP_TRUE = '*'
35 _SVN_PROP_TRUE = '*'
36
36
37
37
38 class SubversionCommit(base.BaseCommit):
38 class SubversionCommit(base.BaseCommit):
39 """
39 """
40 Subversion specific implementation of commits
40 Subversion specific implementation of commits
41
41
42 .. attribute:: branch
42 .. attribute:: branch
43
43
44 The Subversion backend does not support to assign branches to
44 The Subversion backend does not support to assign branches to
45 specific commits. This attribute has always the value `None`.
45 specific commits. This attribute has always the value `None`.
46
46
47 """
47 """
48
48
49 def __init__(self, repository, commit_id):
49 def __init__(self, repository, commit_id):
50 self.repository = repository
50 self.repository = repository
51 self.idx = self.repository._get_commit_idx(commit_id)
51 self.idx = self.repository._get_commit_idx(commit_id)
52 self._svn_rev = self.idx + 1
52 self._svn_rev = self.idx + 1
53 self._remote = repository._remote
53 self._remote = repository._remote
54 # TODO: handling of raw_id should be a method on repository itself,
54 # TODO: handling of raw_id should be a method on repository itself,
55 # which knows how to translate commit index and commit id
55 # which knows how to translate commit index and commit id
56 self.raw_id = commit_id
56 self.raw_id = commit_id
57 self.short_id = commit_id
57 self.short_id = commit_id
58 self.id = 'r%s' % (commit_id, )
58 self.id = 'r%s' % (commit_id, )
59
59
60 # TODO: Implement the following placeholder attributes
60 # TODO: Implement the following placeholder attributes
61 self.nodes = {}
61 self.nodes = {}
62 self.tags = []
62 self.tags = []
63
63
64 @property
64 @property
65 def author(self):
65 def author(self):
66 return safe_unicode(self._properties.get('svn:author'))
66 return safe_unicode(self._properties.get('svn:author'))
67
67
68 @property
68 @property
69 def date(self):
69 def date(self):
70 return _date_from_svn_properties(self._properties)
70 return _date_from_svn_properties(self._properties)
71
71
72 @property
72 @property
73 def message(self):
73 def message(self):
74 return safe_unicode(self._properties.get('svn:log'))
74 return safe_unicode(self._properties.get('svn:log'))
75
75
76 @LazyProperty
76 @LazyProperty
77 def _properties(self):
77 def _properties(self):
78 return self._remote.revision_properties(self._svn_rev)
78 return self._remote.revision_properties(self._svn_rev)
79
79
80 @LazyProperty
80 @LazyProperty
81 def parents(self):
81 def parents(self):
82 parent_idx = self.idx - 1
82 parent_idx = self.idx - 1
83 if parent_idx >= 0:
83 if parent_idx >= 0:
84 parent = self.repository.get_commit(commit_idx=parent_idx)
84 parent = self.repository.get_commit(commit_idx=parent_idx)
85 return [parent]
85 return [parent]
86 return []
86 return []
87
87
88 @LazyProperty
88 @LazyProperty
89 def children(self):
89 def children(self):
90 child_idx = self.idx + 1
90 child_idx = self.idx + 1
91 if child_idx < len(self.repository.commit_ids):
91 if child_idx < len(self.repository.commit_ids):
92 child = self.repository.get_commit(commit_idx=child_idx)
92 child = self.repository.get_commit(commit_idx=child_idx)
93 return [child]
93 return [child]
94 return []
94 return []
95
95
96 def get_file_mode(self, path):
96 def get_file_mode(self, path):
97 # Note: Subversion flags files which are executable with a special
97 # Note: Subversion flags files which are executable with a special
98 # property `svn:executable` which is set to the value ``"*"``.
98 # property `svn:executable` which is set to the value ``"*"``.
99 if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE:
99 if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE:
100 return base.FILEMODE_EXECUTABLE
100 return base.FILEMODE_EXECUTABLE
101 else:
101 else:
102 return base.FILEMODE_DEFAULT
102 return base.FILEMODE_DEFAULT
103
103
104 def is_link(self, path):
104 def is_link(self, path):
105 # Note: Subversion has a flag for special files, the content of the
105 # Note: Subversion has a flag for special files, the content of the
106 # file contains the type of that file.
106 # file contains the type of that file.
107 if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE:
107 if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE:
108 return self.get_file_content(path).startswith('link')
108 return self.get_file_content(path).startswith('link')
109 return False
109 return False
110
110
111 def _get_file_property(self, path, name):
111 def _get_file_property(self, path, name):
112 file_properties = self._remote.node_properties(
112 file_properties = self._remote.node_properties(
113 safe_str(path), self._svn_rev)
113 safe_str(path), self._svn_rev)
114 return file_properties.get(name)
114 return file_properties.get(name)
115
115
116 def get_file_content(self, path):
116 def get_file_content(self, path):
117 path = self._fix_path(path)
117 path = self._fix_path(path)
118 return self._remote.get_file_content(safe_str(path), self._svn_rev)
118 return self._remote.get_file_content(safe_str(path), self._svn_rev)
119
119
120 def get_file_content_streamed(self, path):
121 path = self._fix_path(path)
122 stream_method = getattr(self._remote, 'stream:get_file_content')
123 return stream_method(safe_str(path), self._svn_rev)
124
120 def get_file_size(self, path):
125 def get_file_size(self, path):
121 path = self._fix_path(path)
126 path = self._fix_path(path)
122 return self._remote.get_file_size(safe_str(path), self._svn_rev)
127 return self._remote.get_file_size(safe_str(path), self._svn_rev)
123
128
124 def get_path_history(self, path, limit=None, pre_load=None):
129 def get_path_history(self, path, limit=None, pre_load=None):
125 path = safe_str(self._fix_path(path))
130 path = safe_str(self._fix_path(path))
126 history = self._remote.node_history(path, self._svn_rev, limit)
131 history = self._remote.node_history(path, self._svn_rev, limit)
127 return [
132 return [
128 self.repository.get_commit(commit_id=str(svn_rev))
133 self.repository.get_commit(commit_id=str(svn_rev))
129 for svn_rev in history]
134 for svn_rev in history]
130
135
131 def get_file_annotate(self, path, pre_load=None):
136 def get_file_annotate(self, path, pre_load=None):
132 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
137 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
133
138
134 for zero_based_line_no, svn_rev, content in result:
139 for zero_based_line_no, svn_rev, content in result:
135 commit_id = str(svn_rev)
140 commit_id = str(svn_rev)
136 line_no = zero_based_line_no + 1
141 line_no = zero_based_line_no + 1
137 yield (
142 yield (
138 line_no,
143 line_no,
139 commit_id,
144 commit_id,
140 lambda: self.repository.get_commit(commit_id=commit_id),
145 lambda: self.repository.get_commit(commit_id=commit_id),
141 content)
146 content)
142
147
143 def get_node(self, path, pre_load=None):
148 def get_node(self, path, pre_load=None):
144 path = self._fix_path(path)
149 path = self._fix_path(path)
145 if path not in self.nodes:
150 if path not in self.nodes:
146
151
147 if path == '':
152 if path == '':
148 node = nodes.RootNode(commit=self)
153 node = nodes.RootNode(commit=self)
149 else:
154 else:
150 node_type = self._remote.get_node_type(
155 node_type = self._remote.get_node_type(
151 safe_str(path), self._svn_rev)
156 safe_str(path), self._svn_rev)
152 if node_type == 'dir':
157 if node_type == 'dir':
153 node = nodes.DirNode(path, commit=self)
158 node = nodes.DirNode(path, commit=self)
154 elif node_type == 'file':
159 elif node_type == 'file':
155 node = nodes.FileNode(path, commit=self, pre_load=pre_load)
160 node = nodes.FileNode(path, commit=self, pre_load=pre_load)
156 else:
161 else:
157 raise self.no_node_at_path(path)
162 raise self.no_node_at_path(path)
158
163
159 self.nodes[path] = node
164 self.nodes[path] = node
160 return self.nodes[path]
165 return self.nodes[path]
161
166
162 def get_nodes(self, path):
167 def get_nodes(self, path):
163 if self._get_kind(path) != nodes.NodeKind.DIR:
168 if self._get_kind(path) != nodes.NodeKind.DIR:
164 raise CommitError(
169 raise CommitError(
165 "Directory does not exist for commit %s at "
170 "Directory does not exist for commit %s at "
166 " '%s'" % (self.raw_id, path))
171 " '%s'" % (self.raw_id, path))
167 path = self._fix_path(path)
172 path = self._fix_path(path)
168
173
169 path_nodes = []
174 path_nodes = []
170 for name, kind in self._remote.get_nodes(
175 for name, kind in self._remote.get_nodes(
171 safe_str(path), revision=self._svn_rev):
176 safe_str(path), revision=self._svn_rev):
172 node_path = vcspath.join(path, name)
177 node_path = vcspath.join(path, name)
173 if kind == 'dir':
178 if kind == 'dir':
174 node = nodes.DirNode(node_path, commit=self)
179 node = nodes.DirNode(node_path, commit=self)
175 elif kind == 'file':
180 elif kind == 'file':
176 node = nodes.FileNode(node_path, commit=self)
181 node = nodes.FileNode(node_path, commit=self)
177 else:
182 else:
178 raise ValueError("Node kind %s not supported." % (kind, ))
183 raise ValueError("Node kind %s not supported." % (kind, ))
179 self.nodes[node_path] = node
184 self.nodes[node_path] = node
180 path_nodes.append(node)
185 path_nodes.append(node)
181
186
182 return path_nodes
187 return path_nodes
183
188
184 def _get_kind(self, path):
189 def _get_kind(self, path):
185 path = self._fix_path(path)
190 path = self._fix_path(path)
186 kind = self._remote.get_node_type(path, self._svn_rev)
191 kind = self._remote.get_node_type(path, self._svn_rev)
187 if kind == 'file':
192 if kind == 'file':
188 return nodes.NodeKind.FILE
193 return nodes.NodeKind.FILE
189 elif kind == 'dir':
194 elif kind == 'dir':
190 return nodes.NodeKind.DIR
195 return nodes.NodeKind.DIR
191 else:
196 else:
192 raise CommitError(
197 raise CommitError(
193 "Node does not exist at the given path '%s'" % (path, ))
198 "Node does not exist at the given path '%s'" % (path, ))
194
199
195 @LazyProperty
200 @LazyProperty
196 def _changes_cache(self):
201 def _changes_cache(self):
197 return self._remote.revision_changes(self._svn_rev)
202 return self._remote.revision_changes(self._svn_rev)
198
203
199 @LazyProperty
204 @LazyProperty
200 def affected_files(self):
205 def affected_files(self):
201 changed_files = set()
206 changed_files = set()
202 for files in self._changes_cache.itervalues():
207 for files in self._changes_cache.itervalues():
203 changed_files.update(files)
208 changed_files.update(files)
204 return list(changed_files)
209 return list(changed_files)
205
210
206 @LazyProperty
211 @LazyProperty
207 def id(self):
212 def id(self):
208 return self.raw_id
213 return self.raw_id
209
214
210 @property
215 @property
211 def added(self):
216 def added(self):
212 return nodes.AddedFileNodesGenerator(
217 return nodes.AddedFileNodesGenerator(
213 self._changes_cache['added'], self)
218 self._changes_cache['added'], self)
214
219
215 @property
220 @property
216 def changed(self):
221 def changed(self):
217 return nodes.ChangedFileNodesGenerator(
222 return nodes.ChangedFileNodesGenerator(
218 self._changes_cache['changed'], self)
223 self._changes_cache['changed'], self)
219
224
220 @property
225 @property
221 def removed(self):
226 def removed(self):
222 return nodes.RemovedFileNodesGenerator(
227 return nodes.RemovedFileNodesGenerator(
223 self._changes_cache['removed'], self)
228 self._changes_cache['removed'], self)
224
229
225
230
226 def _date_from_svn_properties(properties):
231 def _date_from_svn_properties(properties):
227 """
232 """
228 Parses the date out of given svn properties.
233 Parses the date out of given svn properties.
229
234
230 :return: :class:`datetime.datetime` instance. The object is naive.
235 :return: :class:`datetime.datetime` instance. The object is naive.
231 """
236 """
232
237
233 aware_date = dateutil.parser.parse(properties.get('svn:date'))
238 aware_date = dateutil.parser.parse(properties.get('svn:date'))
234 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
239 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
235 final_date = aware_date
240 final_date = aware_date
236 return final_date.replace(tzinfo=None)
241 return final_date.replace(tzinfo=None)
@@ -1,318 +1,346 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Client for the VCSServer implemented based on HTTP.
22 Client for the VCSServer implemented based on HTTP.
23 """
23 """
24
24
25 import copy
25 import copy
26 import logging
26 import logging
27 import threading
27 import threading
28 import time
28 import time
29 import urllib2
29 import urllib2
30 import urlparse
30 import urlparse
31 import uuid
31 import uuid
32 import traceback
32 import traceback
33
33
34 import pycurl
34 import pycurl
35 import msgpack
35 import msgpack
36 import requests
36 import requests
37 from requests.packages.urllib3.util.retry import Retry
37 from requests.packages.urllib3.util.retry import Retry
38
38
39 import rhodecode
39 import rhodecode
40 from rhodecode.lib.system_info import get_cert_path
40 from rhodecode.lib.system_info import get_cert_path
41 from rhodecode.lib.vcs import exceptions, CurlSession
41 from rhodecode.lib.vcs import exceptions, CurlSession
42
42
43 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
44
44
45
45
46 # TODO: mikhail: Keep it in sync with vcsserver's
46 # TODO: mikhail: Keep it in sync with vcsserver's
47 # HTTPApplication.ALLOWED_EXCEPTIONS
47 # HTTPApplication.ALLOWED_EXCEPTIONS
48 EXCEPTIONS_MAP = {
48 EXCEPTIONS_MAP = {
49 'KeyError': KeyError,
49 'KeyError': KeyError,
50 'URLError': urllib2.URLError,
50 'URLError': urllib2.URLError,
51 }
51 }
52
52
53
53
54 class RepoMaker(object):
55
56 def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory):
57 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
58 self._session_factory = session_factory
59 self.backend_type = backend_type
60
61 def __call__(self, path, repo_id, config, with_wire=None):
62 log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path)
63 return RemoteRepo(path, repo_id, config, self.url, self._session_factory(),
64 with_wire=with_wire)
65
66 def __getattr__(self, name):
67 def f(*args, **kwargs):
68 return self._call(name, *args, **kwargs)
69 return f
70
71 @exceptions.map_vcs_exceptions
72 def _call(self, name, *args, **kwargs):
73 payload = {
74 'id': str(uuid.uuid4()),
75 'method': name,
76 'backend': self.backend_type,
77 'params': {'args': args, 'kwargs': kwargs}
78 }
79 return _remote_call(
80 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
81
82
83 class ServiceConnection(object):
84 def __init__(self, server_and_port, backend_endpoint, session_factory):
85 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
86 self._session_factory = session_factory
87
88 def __getattr__(self, name):
89 def f(*args, **kwargs):
90 return self._call(name, *args, **kwargs)
91
92 return f
93
94 @exceptions.map_vcs_exceptions
95 def _call(self, name, *args, **kwargs):
96 payload = {
97 'id': str(uuid.uuid4()),
98 'method': name,
99 'params': {'args': args, 'kwargs': kwargs}
100 }
101 return _remote_call(
102 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
103
104
105 class RemoteRepo(object):
106
107 def __init__(self, path, repo_id, config, url, session, with_wire=None):
108 self.url = url
109 self._session = session
110 with_wire = with_wire or {}
111
112 repo_state_uid = with_wire.get('repo_state_uid') or 'state'
113 self._wire = {
114 "path": path, # repo path
115 "repo_id": repo_id,
116 "config": config,
117 "repo_state_uid": repo_state_uid,
118 "context": self._create_vcs_cache_context(path, repo_state_uid)
119 }
120
121 if with_wire:
122 self._wire.update(with_wire)
123
124 # NOTE(johbo): Trading complexity for performance. Avoiding the call to
125 # log.debug brings a few percent gain even if is is not active.
126 if log.isEnabledFor(logging.DEBUG):
127 self._call_with_logging = True
128
129 self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__'))
130
131 def __getattr__(self, name):
132 def f(*args, **kwargs):
133 return self._call(name, *args, **kwargs)
134 return f
135
136 @exceptions.map_vcs_exceptions
137 def _call(self, name, *args, **kwargs):
138 # TODO: oliver: This is currently necessary pre-call since the
139 # config object is being changed for hooking scenarios
140 wire = copy.deepcopy(self._wire)
141 wire["config"] = wire["config"].serialize()
142 wire["config"].append(('vcs', 'ssl_dir', self.cert_dir))
143
144 payload = {
145 'id': str(uuid.uuid4()),
146 'method': name,
147 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
148 }
149
150 if self._call_with_logging:
151 start = time.time()
152 context_uid = wire.get('context')
153 log.debug('Calling %s@%s with args:%.10240r. wire_context: %s',
154 self.url, name, args, context_uid)
155 result = _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session)
156 if self._call_with_logging:
157 log.debug('Call %s@%s took: %.4fs. wire_context: %s',
158 self.url, name, time.time()-start, context_uid)
159 return result
160
161 def __getitem__(self, key):
162 return self.revision(key)
163
164 def _create_vcs_cache_context(self, *args):
165 """
166 Creates a unique string which is passed to the VCSServer on every
167 remote call. It is used as cache key in the VCSServer.
168 """
169 hash_key = '-'.join(map(str, args))
170 return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key))
171
172 def invalidate_vcs_cache(self):
173 """
174 This invalidates the context which is sent to the VCSServer on every
175 call to a remote method. It forces the VCSServer to create a fresh
176 repository instance on the next call to a remote method.
177 """
178 self._wire['context'] = str(uuid.uuid4())
179
180
181 class RemoteObject(object):
182
183 def __init__(self, url, session):
184 self._url = url
185 self._session = session
186
187 # johbo: Trading complexity for performance. Avoiding the call to
188 # log.debug brings a few percent gain even if is is not active.
189 if log.isEnabledFor(logging.DEBUG):
190 self._call = self._call_with_logging
191
192 def __getattr__(self, name):
193 def f(*args, **kwargs):
194 return self._call(name, *args, **kwargs)
195 return f
196
197 @exceptions.map_vcs_exceptions
198 def _call(self, name, *args, **kwargs):
199 payload = {
200 'id': str(uuid.uuid4()),
201 'method': name,
202 'params': {'args': args, 'kwargs': kwargs}
203 }
204 return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session)
205
206 def _call_with_logging(self, name, *args, **kwargs):
207 log.debug('Calling %s@%s', self._url, name)
208 return RemoteObject._call(self, name, *args, **kwargs)
209
210
211 def _remote_call(url, payload, exceptions_map, session):
54 def _remote_call(url, payload, exceptions_map, session):
212 try:
55 try:
213 response = session.post(url, data=msgpack.packb(payload))
56 response = session.post(url, data=msgpack.packb(payload))
214 except pycurl.error as e:
57 except pycurl.error as e:
215 msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc())
58 msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc())
216 raise exceptions.HttpVCSCommunicationError(msg)
59 raise exceptions.HttpVCSCommunicationError(msg)
217 except Exception as e:
60 except Exception as e:
218 message = getattr(e, 'message', '')
61 message = getattr(e, 'message', '')
219 if 'Failed to connect' in message:
62 if 'Failed to connect' in message:
220 # gevent doesn't return proper pycurl errors
63 # gevent doesn't return proper pycurl errors
221 raise exceptions.HttpVCSCommunicationError(e)
64 raise exceptions.HttpVCSCommunicationError(e)
222 else:
65 else:
223 raise
66 raise
224
67
225 if response.status_code >= 400:
68 if response.status_code >= 400:
226 log.error('Call to %s returned non 200 HTTP code: %s',
69 log.error('Call to %s returned non 200 HTTP code: %s',
227 url, response.status_code)
70 url, response.status_code)
228 raise exceptions.HttpVCSCommunicationError(repr(response.content))
71 raise exceptions.HttpVCSCommunicationError(repr(response.content))
229
72
230 try:
73 try:
231 response = msgpack.unpackb(response.content)
74 response = msgpack.unpackb(response.content)
232 except Exception:
75 except Exception:
233 log.exception('Failed to decode response %r', response.content)
76 log.exception('Failed to decode response %r', response.content)
234 raise
77 raise
235
78
236 error = response.get('error')
79 error = response.get('error')
237 if error:
80 if error:
238 type_ = error.get('type', 'Exception')
81 type_ = error.get('type', 'Exception')
239 exc = exceptions_map.get(type_, Exception)
82 exc = exceptions_map.get(type_, Exception)
240 exc = exc(error.get('message'))
83 exc = exc(error.get('message'))
241 try:
84 try:
242 exc._vcs_kind = error['_vcs_kind']
85 exc._vcs_kind = error['_vcs_kind']
243 except KeyError:
86 except KeyError:
244 pass
87 pass
245
88
246 try:
89 try:
247 exc._vcs_server_traceback = error['traceback']
90 exc._vcs_server_traceback = error['traceback']
248 exc._vcs_server_org_exc_name = error['org_exc']
91 exc._vcs_server_org_exc_name = error['org_exc']
249 exc._vcs_server_org_exc_tb = error['org_exc_tb']
92 exc._vcs_server_org_exc_tb = error['org_exc_tb']
250 except KeyError:
93 except KeyError:
251 pass
94 pass
252
95
253 raise exc
96 raise exc
254 return response.get('result')
97 return response.get('result')
255
98
256
99
100 def _streaming_remote_call(url, payload, exceptions_map, session, chunk_size):
101 try:
102 response = session.post(url, data=msgpack.packb(payload))
103 except pycurl.error as e:
104 msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc())
105 raise exceptions.HttpVCSCommunicationError(msg)
106 except Exception as e:
107 message = getattr(e, 'message', '')
108 if 'Failed to connect' in message:
109 # gevent doesn't return proper pycurl errors
110 raise exceptions.HttpVCSCommunicationError(e)
111 else:
112 raise
113
114 if response.status_code >= 400:
115 log.error('Call to %s returned non 200 HTTP code: %s',
116 url, response.status_code)
117 raise exceptions.HttpVCSCommunicationError(repr(response.content))
118
119 return response.iter_content(chunk_size=chunk_size)
120
121
122 class ServiceConnection(object):
123 def __init__(self, server_and_port, backend_endpoint, session_factory):
124 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
125 self._session_factory = session_factory
126
127 def __getattr__(self, name):
128 def f(*args, **kwargs):
129 return self._call(name, *args, **kwargs)
130
131 return f
132
133 @exceptions.map_vcs_exceptions
134 def _call(self, name, *args, **kwargs):
135 payload = {
136 'id': str(uuid.uuid4()),
137 'method': name,
138 'params': {'args': args, 'kwargs': kwargs}
139 }
140 return _remote_call(
141 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
142
143
144 class RemoteVCSMaker(object):
145
146 def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory):
147 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
148 self.stream_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint+'/stream')
149
150 self._session_factory = session_factory
151 self.backend_type = backend_type
152
153 def __call__(self, path, repo_id, config, with_wire=None):
154 log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path)
155 return RemoteRepo(path, repo_id, config, self, with_wire=with_wire)
156
157 def __getattr__(self, name):
158 def remote_attr(*args, **kwargs):
159 return self._call(name, *args, **kwargs)
160 return remote_attr
161
162 @exceptions.map_vcs_exceptions
163 def _call(self, func_name, *args, **kwargs):
164 payload = {
165 'id': str(uuid.uuid4()),
166 'method': func_name,
167 'backend': self.backend_type,
168 'params': {'args': args, 'kwargs': kwargs}
169 }
170 url = self.url
171 return _remote_call(url, payload, EXCEPTIONS_MAP, self._session_factory())
172
173
174 class RemoteRepo(object):
175 CHUNK_SIZE = 16384
176
177 def __init__(self, path, repo_id, config, remote_maker, with_wire=None):
178 self.url = remote_maker.url
179 self.stream_url = remote_maker.stream_url
180 self._session = remote_maker._session_factory()
181
182 with_wire = with_wire or {}
183
184 repo_state_uid = with_wire.get('repo_state_uid') or 'state'
185 self._wire = {
186 "path": path, # repo path
187 "repo_id": repo_id,
188 "config": config,
189 "repo_state_uid": repo_state_uid,
190 "context": self._create_vcs_cache_context(path, repo_state_uid)
191 }
192
193 if with_wire:
194 self._wire.update(with_wire)
195
196 # NOTE(johbo): Trading complexity for performance. Avoiding the call to
197 # log.debug brings a few percent gain even if is is not active.
198 if log.isEnabledFor(logging.DEBUG):
199 self._call_with_logging = True
200
201 self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__'))
202
203 def __getattr__(self, name):
204
205 if name.startswith('stream:'):
206 def repo_remote_attr(*args, **kwargs):
207 return self._call_stream(name, *args, **kwargs)
208 else:
209 def repo_remote_attr(*args, **kwargs):
210 return self._call(name, *args, **kwargs)
211
212 return repo_remote_attr
213
214 def _base_call(self, name, *args, **kwargs):
215 # TODO: oliver: This is currently necessary pre-call since the
216 # config object is being changed for hooking scenarios
217 wire = copy.deepcopy(self._wire)
218 wire["config"] = wire["config"].serialize()
219 wire["config"].append(('vcs', 'ssl_dir', self.cert_dir))
220
221 payload = {
222 'id': str(uuid.uuid4()),
223 'method': name,
224 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
225 }
226
227 context_uid = wire.get('context')
228 return context_uid, payload
229
230 @exceptions.map_vcs_exceptions
231 def _call(self, name, *args, **kwargs):
232 context_uid, payload = self._base_call(name, *args, **kwargs)
233 url = self.url
234
235 start = time.time()
236 if self._call_with_logging:
237 log.debug('Calling %s@%s with args:%.10240r. wire_context: %s',
238 url, name, args, context_uid)
239
240 result = _remote_call(url, payload, EXCEPTIONS_MAP, self._session)
241 if self._call_with_logging:
242 log.debug('Call %s@%s took: %.4fs. wire_context: %s',
243 url, name, time.time()-start, context_uid)
244 return result
245
246 @exceptions.map_vcs_exceptions
247 def _call_stream(self, name, *args, **kwargs):
248 context_uid, payload = self._base_call(name, *args, **kwargs)
249 payload['chunk_size'] = self.CHUNK_SIZE
250 url = self.stream_url
251
252 start = time.time()
253 if self._call_with_logging:
254 log.debug('Calling %s@%s with args:%.10240r. wire_context: %s',
255 url, name, args, context_uid)
256
257 result = _streaming_remote_call(url, payload, EXCEPTIONS_MAP, self._session,
258 self.CHUNK_SIZE)
259
260 if self._call_with_logging:
261 log.debug('Call %s@%s took: %.4fs. wire_context: %s',
262 url, name, time.time()-start, context_uid)
263 return result
264
265 def __getitem__(self, key):
266 return self.revision(key)
267
268 def _create_vcs_cache_context(self, *args):
269 """
270 Creates a unique string which is passed to the VCSServer on every
271 remote call. It is used as cache key in the VCSServer.
272 """
273 hash_key = '-'.join(map(str, args))
274 return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key))
275
276 def invalidate_vcs_cache(self):
277 """
278 This invalidates the context which is sent to the VCSServer on every
279 call to a remote method. It forces the VCSServer to create a fresh
280 repository instance on the next call to a remote method.
281 """
282 self._wire['context'] = str(uuid.uuid4())
283
284
257 class VcsHttpProxy(object):
285 class VcsHttpProxy(object):
258
286
259 CHUNK_SIZE = 16384
287 CHUNK_SIZE = 16384
260
288
261 def __init__(self, server_and_port, backend_endpoint):
289 def __init__(self, server_and_port, backend_endpoint):
262 retries = Retry(total=5, connect=None, read=None, redirect=None)
290 retries = Retry(total=5, connect=None, read=None, redirect=None)
263
291
264 adapter = requests.adapters.HTTPAdapter(max_retries=retries)
292 adapter = requests.adapters.HTTPAdapter(max_retries=retries)
265 self.base_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
293 self.base_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
266 self.session = requests.Session()
294 self.session = requests.Session()
267 self.session.mount('http://', adapter)
295 self.session.mount('http://', adapter)
268
296
269 def handle(self, environment, input_data, *args, **kwargs):
297 def handle(self, environment, input_data, *args, **kwargs):
270 data = {
298 data = {
271 'environment': environment,
299 'environment': environment,
272 'input_data': input_data,
300 'input_data': input_data,
273 'args': args,
301 'args': args,
274 'kwargs': kwargs
302 'kwargs': kwargs
275 }
303 }
276 result = self.session.post(
304 result = self.session.post(
277 self.base_url, msgpack.packb(data), stream=True)
305 self.base_url, msgpack.packb(data), stream=True)
278 return self._get_result(result)
306 return self._get_result(result)
279
307
280 def _deserialize_and_raise(self, error):
308 def _deserialize_and_raise(self, error):
281 exception = Exception(error['message'])
309 exception = Exception(error['message'])
282 try:
310 try:
283 exception._vcs_kind = error['_vcs_kind']
311 exception._vcs_kind = error['_vcs_kind']
284 except KeyError:
312 except KeyError:
285 pass
313 pass
286 raise exception
314 raise exception
287
315
288 def _iterate(self, result):
316 def _iterate(self, result):
289 unpacker = msgpack.Unpacker()
317 unpacker = msgpack.Unpacker()
290 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
318 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
291 unpacker.feed(line)
319 unpacker.feed(line)
292 for chunk in unpacker:
320 for chunk in unpacker:
293 yield chunk
321 yield chunk
294
322
295 def _get_result(self, result):
323 def _get_result(self, result):
296 iterator = self._iterate(result)
324 iterator = self._iterate(result)
297 error = iterator.next()
325 error = iterator.next()
298 if error:
326 if error:
299 self._deserialize_and_raise(error)
327 self._deserialize_and_raise(error)
300
328
301 status = iterator.next()
329 status = iterator.next()
302 headers = iterator.next()
330 headers = iterator.next()
303
331
304 return iterator, status, headers
332 return iterator, status, headers
305
333
306
334
307 class ThreadlocalSessionFactory(object):
335 class ThreadlocalSessionFactory(object):
308 """
336 """
309 Creates one CurlSession per thread on demand.
337 Creates one CurlSession per thread on demand.
310 """
338 """
311
339
312 def __init__(self):
340 def __init__(self):
313 self._thread_local = threading.local()
341 self._thread_local = threading.local()
314
342
315 def __call__(self):
343 def __call__(self):
316 if not hasattr(self._thread_local, 'curl_session'):
344 if not hasattr(self._thread_local, 'curl_session'):
317 self._thread_local.curl_session = CurlSession()
345 self._thread_local.curl_session = CurlSession()
318 return self._thread_local.curl_session
346 return self._thread_local.curl_session
@@ -1,850 +1,870 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2014-2019 RhodeCode GmbH
3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 """
21 """
22 Module holding everything related to vcs nodes, with vcs2 architecture.
22 Module holding everything related to vcs nodes, with vcs2 architecture.
23 """
23 """
24
24
25 import os
25 import os
26 import stat
26 import stat
27
27
28 from zope.cachedescriptors.property import Lazy as LazyProperty
28 from zope.cachedescriptors.property import Lazy as LazyProperty
29
29
30 import rhodecode
30 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
31 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
31 from rhodecode.lib.utils import safe_unicode, safe_str
32 from rhodecode.lib.utils import safe_unicode, safe_str
32 from rhodecode.lib.utils2 import md5
33 from rhodecode.lib.utils2 import md5
33 from rhodecode.lib.vcs import path as vcspath
34 from rhodecode.lib.vcs import path as vcspath
34 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
35 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
35 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
36 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
36 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
37 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
37
38
38 LARGEFILE_PREFIX = '.hglf'
39 LARGEFILE_PREFIX = '.hglf'
39
40
40
41
41 class NodeKind:
42 class NodeKind:
42 SUBMODULE = -1
43 SUBMODULE = -1
43 DIR = 1
44 DIR = 1
44 FILE = 2
45 FILE = 2
45 LARGEFILE = 3
46 LARGEFILE = 3
46
47
47
48
48 class NodeState:
49 class NodeState:
49 ADDED = u'added'
50 ADDED = u'added'
50 CHANGED = u'changed'
51 CHANGED = u'changed'
51 NOT_CHANGED = u'not changed'
52 NOT_CHANGED = u'not changed'
52 REMOVED = u'removed'
53 REMOVED = u'removed'
53
54
54
55
55 class NodeGeneratorBase(object):
56 class NodeGeneratorBase(object):
56 """
57 """
57 Base class for removed added and changed filenodes, it's a lazy generator
58 Base class for removed added and changed filenodes, it's a lazy generator
58 class that will create filenodes only on iteration or call
59 class that will create filenodes only on iteration or call
59
60
60 The len method doesn't need to create filenodes at all
61 The len method doesn't need to create filenodes at all
61 """
62 """
62
63
63 def __init__(self, current_paths, cs):
64 def __init__(self, current_paths, cs):
64 self.cs = cs
65 self.cs = cs
65 self.current_paths = current_paths
66 self.current_paths = current_paths
66
67
67 def __call__(self):
68 def __call__(self):
68 return [n for n in self]
69 return [n for n in self]
69
70
70 def __getslice__(self, i, j):
71 def __getslice__(self, i, j):
71 for p in self.current_paths[i:j]:
72 for p in self.current_paths[i:j]:
72 yield self.cs.get_node(p)
73 yield self.cs.get_node(p)
73
74
74 def __len__(self):
75 def __len__(self):
75 return len(self.current_paths)
76 return len(self.current_paths)
76
77
77 def __iter__(self):
78 def __iter__(self):
78 for p in self.current_paths:
79 for p in self.current_paths:
79 yield self.cs.get_node(p)
80 yield self.cs.get_node(p)
80
81
81
82
82 class AddedFileNodesGenerator(NodeGeneratorBase):
83 class AddedFileNodesGenerator(NodeGeneratorBase):
83 """
84 """
84 Class holding added files for current commit
85 Class holding added files for current commit
85 """
86 """
86
87
87
88
88 class ChangedFileNodesGenerator(NodeGeneratorBase):
89 class ChangedFileNodesGenerator(NodeGeneratorBase):
89 """
90 """
90 Class holding changed files for current commit
91 Class holding changed files for current commit
91 """
92 """
92
93
93
94
94 class RemovedFileNodesGenerator(NodeGeneratorBase):
95 class RemovedFileNodesGenerator(NodeGeneratorBase):
95 """
96 """
96 Class holding removed files for current commit
97 Class holding removed files for current commit
97 """
98 """
98 def __iter__(self):
99 def __iter__(self):
99 for p in self.current_paths:
100 for p in self.current_paths:
100 yield RemovedFileNode(path=p)
101 yield RemovedFileNode(path=p)
101
102
102 def __getslice__(self, i, j):
103 def __getslice__(self, i, j):
103 for p in self.current_paths[i:j]:
104 for p in self.current_paths[i:j]:
104 yield RemovedFileNode(path=p)
105 yield RemovedFileNode(path=p)
105
106
106
107
107 class Node(object):
108 class Node(object):
108 """
109 """
109 Simplest class representing file or directory on repository. SCM backends
110 Simplest class representing file or directory on repository. SCM backends
110 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
111 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
111 directly.
112 directly.
112
113
113 Node's ``path`` cannot start with slash as we operate on *relative* paths
114 Node's ``path`` cannot start with slash as we operate on *relative* paths
114 only. Moreover, every single node is identified by the ``path`` attribute,
115 only. Moreover, every single node is identified by the ``path`` attribute,
115 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
116 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
116 """
117 """
117 RTLO_MARKER = u"\u202E" # RTLO marker allows swapping text, and certain
118 RTLO_MARKER = u"\u202E" # RTLO marker allows swapping text, and certain
118 # security attacks could be used with this
119 # security attacks could be used with this
119 commit = None
120 commit = None
120
121
121 def __init__(self, path, kind):
122 def __init__(self, path, kind):
122 self._validate_path(path) # can throw exception if path is invalid
123 self._validate_path(path) # can throw exception if path is invalid
123 self.path = safe_str(path.rstrip('/')) # we store paths as str
124 self.path = safe_str(path.rstrip('/')) # we store paths as str
124 if path == '' and kind != NodeKind.DIR:
125 if path == '' and kind != NodeKind.DIR:
125 raise NodeError("Only DirNode and its subclasses may be "
126 raise NodeError("Only DirNode and its subclasses may be "
126 "initialized with empty path")
127 "initialized with empty path")
127 self.kind = kind
128 self.kind = kind
128
129
129 if self.is_root() and not self.is_dir():
130 if self.is_root() and not self.is_dir():
130 raise NodeError("Root node cannot be FILE kind")
131 raise NodeError("Root node cannot be FILE kind")
131
132
132 def _validate_path(self, path):
133 def _validate_path(self, path):
133 if path.startswith('/'):
134 if path.startswith('/'):
134 raise NodeError(
135 raise NodeError(
135 "Cannot initialize Node objects with slash at "
136 "Cannot initialize Node objects with slash at "
136 "the beginning as only relative paths are supported. "
137 "the beginning as only relative paths are supported. "
137 "Got %s" % (path,))
138 "Got %s" % (path,))
138
139
139 @LazyProperty
140 @LazyProperty
140 def parent(self):
141 def parent(self):
141 parent_path = self.get_parent_path()
142 parent_path = self.get_parent_path()
142 if parent_path:
143 if parent_path:
143 if self.commit:
144 if self.commit:
144 return self.commit.get_node(parent_path)
145 return self.commit.get_node(parent_path)
145 return DirNode(parent_path)
146 return DirNode(parent_path)
146 return None
147 return None
147
148
148 @LazyProperty
149 @LazyProperty
149 def unicode_path(self):
150 def unicode_path(self):
150 return safe_unicode(self.path)
151 return safe_unicode(self.path)
151
152
152 @LazyProperty
153 @LazyProperty
153 def has_rtlo(self):
154 def has_rtlo(self):
154 """Detects if a path has right-to-left-override marker"""
155 """Detects if a path has right-to-left-override marker"""
155 return self.RTLO_MARKER in self.unicode_path
156 return self.RTLO_MARKER in self.unicode_path
156
157
157 @LazyProperty
158 @LazyProperty
158 def unicode_path_safe(self):
159 def unicode_path_safe(self):
159 """
160 """
160 Special SAFE representation of path without the right-to-left-override.
161 Special SAFE representation of path without the right-to-left-override.
161 This should be only used for "showing" the file, cannot be used for any
162 This should be only used for "showing" the file, cannot be used for any
162 urls etc.
163 urls etc.
163 """
164 """
164 return safe_unicode(self.path).replace(self.RTLO_MARKER, '')
165 return safe_unicode(self.path).replace(self.RTLO_MARKER, '')
165
166
166 @LazyProperty
167 @LazyProperty
167 def dir_path(self):
168 def dir_path(self):
168 """
169 """
169 Returns name of the directory from full path of this vcs node. Empty
170 Returns name of the directory from full path of this vcs node. Empty
170 string is returned if there's no directory in the path
171 string is returned if there's no directory in the path
171 """
172 """
172 _parts = self.path.rstrip('/').rsplit('/', 1)
173 _parts = self.path.rstrip('/').rsplit('/', 1)
173 if len(_parts) == 2:
174 if len(_parts) == 2:
174 return safe_unicode(_parts[0])
175 return safe_unicode(_parts[0])
175 return u''
176 return u''
176
177
177 @LazyProperty
178 @LazyProperty
178 def name(self):
179 def name(self):
179 """
180 """
180 Returns name of the node so if its path
181 Returns name of the node so if its path
181 then only last part is returned.
182 then only last part is returned.
182 """
183 """
183 return safe_unicode(self.path.rstrip('/').split('/')[-1])
184 return safe_unicode(self.path.rstrip('/').split('/')[-1])
184
185
185 @property
186 @property
186 def kind(self):
187 def kind(self):
187 return self._kind
188 return self._kind
188
189
189 @kind.setter
190 @kind.setter
190 def kind(self, kind):
191 def kind(self, kind):
191 if hasattr(self, '_kind'):
192 if hasattr(self, '_kind'):
192 raise NodeError("Cannot change node's kind")
193 raise NodeError("Cannot change node's kind")
193 else:
194 else:
194 self._kind = kind
195 self._kind = kind
195 # Post setter check (path's trailing slash)
196 # Post setter check (path's trailing slash)
196 if self.path.endswith('/'):
197 if self.path.endswith('/'):
197 raise NodeError("Node's path cannot end with slash")
198 raise NodeError("Node's path cannot end with slash")
198
199
199 def __cmp__(self, other):
200 def __cmp__(self, other):
200 """
201 """
201 Comparator using name of the node, needed for quick list sorting.
202 Comparator using name of the node, needed for quick list sorting.
202 """
203 """
203
204
204 kind_cmp = cmp(self.kind, other.kind)
205 kind_cmp = cmp(self.kind, other.kind)
205 if kind_cmp:
206 if kind_cmp:
206 if isinstance(self, SubModuleNode):
207 if isinstance(self, SubModuleNode):
207 # we make submodules equal to dirnode for "sorting" purposes
208 # we make submodules equal to dirnode for "sorting" purposes
208 return NodeKind.DIR
209 return NodeKind.DIR
209 return kind_cmp
210 return kind_cmp
210 return cmp(self.name, other.name)
211 return cmp(self.name, other.name)
211
212
212 def __eq__(self, other):
213 def __eq__(self, other):
213 for attr in ['name', 'path', 'kind']:
214 for attr in ['name', 'path', 'kind']:
214 if getattr(self, attr) != getattr(other, attr):
215 if getattr(self, attr) != getattr(other, attr):
215 return False
216 return False
216 if self.is_file():
217 if self.is_file():
217 if self.content != other.content:
218 if self.content != other.content:
218 return False
219 return False
219 else:
220 else:
220 # For DirNode's check without entering each dir
221 # For DirNode's check without entering each dir
221 self_nodes_paths = list(sorted(n.path for n in self.nodes))
222 self_nodes_paths = list(sorted(n.path for n in self.nodes))
222 other_nodes_paths = list(sorted(n.path for n in self.nodes))
223 other_nodes_paths = list(sorted(n.path for n in self.nodes))
223 if self_nodes_paths != other_nodes_paths:
224 if self_nodes_paths != other_nodes_paths:
224 return False
225 return False
225 return True
226 return True
226
227
227 def __ne__(self, other):
228 def __ne__(self, other):
228 return not self.__eq__(other)
229 return not self.__eq__(other)
229
230
230 def __repr__(self):
231 def __repr__(self):
231 return '<%s %r>' % (self.__class__.__name__, self.path)
232 return '<%s %r>' % (self.__class__.__name__, self.path)
232
233
233 def __str__(self):
234 def __str__(self):
234 return self.__repr__()
235 return self.__repr__()
235
236
236 def __unicode__(self):
237 def __unicode__(self):
237 return self.name
238 return self.name
238
239
239 def get_parent_path(self):
240 def get_parent_path(self):
240 """
241 """
241 Returns node's parent path or empty string if node is root.
242 Returns node's parent path or empty string if node is root.
242 """
243 """
243 if self.is_root():
244 if self.is_root():
244 return ''
245 return ''
245 return vcspath.dirname(self.path.rstrip('/')) + '/'
246 return vcspath.dirname(self.path.rstrip('/')) + '/'
246
247
247 def is_file(self):
248 def is_file(self):
248 """
249 """
249 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
250 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
250 otherwise.
251 otherwise.
251 """
252 """
252 return self.kind == NodeKind.FILE
253 return self.kind == NodeKind.FILE
253
254
254 def is_dir(self):
255 def is_dir(self):
255 """
256 """
256 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
257 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
257 otherwise.
258 otherwise.
258 """
259 """
259 return self.kind == NodeKind.DIR
260 return self.kind == NodeKind.DIR
260
261
261 def is_root(self):
262 def is_root(self):
262 """
263 """
263 Returns ``True`` if node is a root node and ``False`` otherwise.
264 Returns ``True`` if node is a root node and ``False`` otherwise.
264 """
265 """
265 return self.kind == NodeKind.DIR and self.path == ''
266 return self.kind == NodeKind.DIR and self.path == ''
266
267
267 def is_submodule(self):
268 def is_submodule(self):
268 """
269 """
269 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
270 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
270 otherwise.
271 otherwise.
271 """
272 """
272 return self.kind == NodeKind.SUBMODULE
273 return self.kind == NodeKind.SUBMODULE
273
274
274 def is_largefile(self):
275 def is_largefile(self):
275 """
276 """
276 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
277 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
277 otherwise
278 otherwise
278 """
279 """
279 return self.kind == NodeKind.LARGEFILE
280 return self.kind == NodeKind.LARGEFILE
280
281
281 def is_link(self):
282 def is_link(self):
282 if self.commit:
283 if self.commit:
283 return self.commit.is_link(self.path)
284 return self.commit.is_link(self.path)
284 return False
285 return False
285
286
286 @LazyProperty
287 @LazyProperty
287 def added(self):
288 def added(self):
288 return self.state is NodeState.ADDED
289 return self.state is NodeState.ADDED
289
290
290 @LazyProperty
291 @LazyProperty
291 def changed(self):
292 def changed(self):
292 return self.state is NodeState.CHANGED
293 return self.state is NodeState.CHANGED
293
294
294 @LazyProperty
295 @LazyProperty
295 def not_changed(self):
296 def not_changed(self):
296 return self.state is NodeState.NOT_CHANGED
297 return self.state is NodeState.NOT_CHANGED
297
298
298 @LazyProperty
299 @LazyProperty
299 def removed(self):
300 def removed(self):
300 return self.state is NodeState.REMOVED
301 return self.state is NodeState.REMOVED
301
302
302
303
303 class FileNode(Node):
304 class FileNode(Node):
304 """
305 """
305 Class representing file nodes.
306 Class representing file nodes.
306
307
307 :attribute: path: path to the node, relative to repository's root
308 :attribute: path: path to the node, relative to repository's root
308 :attribute: content: if given arbitrary sets content of the file
309 :attribute: content: if given arbitrary sets content of the file
309 :attribute: commit: if given, first time content is accessed, callback
310 :attribute: commit: if given, first time content is accessed, callback
310 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
311 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
311 """
312 """
312 _filter_pre_load = []
313 _filter_pre_load = []
313
314
314 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
315 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
315 """
316 """
316 Only one of ``content`` and ``commit`` may be given. Passing both
317 Only one of ``content`` and ``commit`` may be given. Passing both
317 would raise ``NodeError`` exception.
318 would raise ``NodeError`` exception.
318
319
319 :param path: relative path to the node
320 :param path: relative path to the node
320 :param content: content may be passed to constructor
321 :param content: content may be passed to constructor
321 :param commit: if given, will use it to lazily fetch content
322 :param commit: if given, will use it to lazily fetch content
322 :param mode: ST_MODE (i.e. 0100644)
323 :param mode: ST_MODE (i.e. 0100644)
323 """
324 """
324 if content and commit:
325 if content and commit:
325 raise NodeError("Cannot use both content and commit")
326 raise NodeError("Cannot use both content and commit")
326 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
327 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
327 self.commit = commit
328 self.commit = commit
328 self._content = content
329 self._content = content
329 self._mode = mode or FILEMODE_DEFAULT
330 self._mode = mode or FILEMODE_DEFAULT
330
331
331 self._set_bulk_properties(pre_load)
332 self._set_bulk_properties(pre_load)
332
333
333 def _set_bulk_properties(self, pre_load):
334 def _set_bulk_properties(self, pre_load):
334 if not pre_load:
335 if not pre_load:
335 return
336 return
336 pre_load = [entry for entry in pre_load
337 pre_load = [entry for entry in pre_load
337 if entry not in self._filter_pre_load]
338 if entry not in self._filter_pre_load]
338 if not pre_load:
339 if not pre_load:
339 return
340 return
340
341
341 for attr_name in pre_load:
342 for attr_name in pre_load:
342 result = getattr(self, attr_name)
343 result = getattr(self, attr_name)
343 if callable(result):
344 if callable(result):
344 result = result()
345 result = result()
345 self.__dict__[attr_name] = result
346 self.__dict__[attr_name] = result
346
347
347 @LazyProperty
348 @LazyProperty
348 def mode(self):
349 def mode(self):
349 """
350 """
350 Returns lazily mode of the FileNode. If `commit` is not set, would
351 Returns lazily mode of the FileNode. If `commit` is not set, would
351 use value given at initialization or `FILEMODE_DEFAULT` (default).
352 use value given at initialization or `FILEMODE_DEFAULT` (default).
352 """
353 """
353 if self.commit:
354 if self.commit:
354 mode = self.commit.get_file_mode(self.path)
355 mode = self.commit.get_file_mode(self.path)
355 else:
356 else:
356 mode = self._mode
357 mode = self._mode
357 return mode
358 return mode
358
359
359 @LazyProperty
360 @LazyProperty
360 def raw_bytes(self):
361 def raw_bytes(self):
361 """
362 """
362 Returns lazily the raw bytes of the FileNode.
363 Returns lazily the raw bytes of the FileNode.
363 """
364 """
364 if self.commit:
365 if self.commit:
365 if self._content is None:
366 if self._content is None:
366 self._content = self.commit.get_file_content(self.path)
367 self._content = self.commit.get_file_content(self.path)
367 content = self._content
368 content = self._content
368 else:
369 else:
369 content = self._content
370 content = self._content
370 return content
371 return content
371
372
373 def stream_bytes(self):
374 """
375 Returns an iterator that will stream the content of the file directly from
376 vcsserver without loading it to memory.
377 """
378 if self.commit:
379 return self.commit.get_file_content_streamed(self.path)
380 raise NodeError(
381 "Cannot retrieve message of the file without related "
382 "commit attribute")
383
372 @LazyProperty
384 @LazyProperty
373 def md5(self):
385 def md5(self):
374 """
386 """
375 Returns md5 of the file node.
387 Returns md5 of the file node.
376 """
388 """
377 return md5(self.raw_bytes)
389 return md5(self.raw_bytes)
378
390
379 def metadata_uncached(self):
391 def metadata_uncached(self):
380 """
392 """
381 Returns md5, binary flag of the file node, without any cache usage.
393 Returns md5, binary flag of the file node, without any cache usage.
382 """
394 """
383
395
384 content = self.content_uncached()
396 content = self.content_uncached()
385
397
386 is_binary = content and '\0' in content
398 is_binary = content and '\0' in content
387 size = 0
399 size = 0
388 if content:
400 if content:
389 size = len(content)
401 size = len(content)
390
402
391 return is_binary, md5(content), size, content
403 return is_binary, md5(content), size, content
392
404
393 def content_uncached(self):
405 def content_uncached(self):
394 """
406 """
395 Returns lazily content of the FileNode. If possible, would try to
407 Returns lazily content of the FileNode. If possible, would try to
396 decode content from UTF-8.
408 decode content from UTF-8.
397 """
409 """
398 if self.commit:
410 if self.commit:
399 content = self.commit.get_file_content(self.path)
411 content = self.commit.get_file_content(self.path)
400 else:
412 else:
401 content = self._content
413 content = self._content
402 return content
414 return content
403
415
404 @LazyProperty
416 @LazyProperty
405 def content(self):
417 def content(self):
406 """
418 """
407 Returns lazily content of the FileNode. If possible, would try to
419 Returns lazily content of the FileNode. If possible, would try to
408 decode content from UTF-8.
420 decode content from UTF-8.
409 """
421 """
410 content = self.raw_bytes
422 content = self.raw_bytes
411
423
412 if self.is_binary:
424 if self.is_binary:
413 return content
425 return content
414 return safe_unicode(content)
426 return safe_unicode(content)
415
427
416 @LazyProperty
428 @LazyProperty
417 def size(self):
429 def size(self):
418 if self.commit:
430 if self.commit:
419 return self.commit.get_file_size(self.path)
431 return self.commit.get_file_size(self.path)
420 raise NodeError(
432 raise NodeError(
421 "Cannot retrieve size of the file without related "
433 "Cannot retrieve size of the file without related "
422 "commit attribute")
434 "commit attribute")
423
435
424 @LazyProperty
436 @LazyProperty
425 def message(self):
437 def message(self):
426 if self.commit:
438 if self.commit:
427 return self.last_commit.message
439 return self.last_commit.message
428 raise NodeError(
440 raise NodeError(
429 "Cannot retrieve message of the file without related "
441 "Cannot retrieve message of the file without related "
430 "commit attribute")
442 "commit attribute")
431
443
432 @LazyProperty
444 @LazyProperty
433 def last_commit(self):
445 def last_commit(self):
434 if self.commit:
446 if self.commit:
435 pre_load = ["author", "date", "message", "parents"]
447 pre_load = ["author", "date", "message", "parents"]
436 return self.commit.get_path_commit(self.path, pre_load=pre_load)
448 return self.commit.get_path_commit(self.path, pre_load=pre_load)
437 raise NodeError(
449 raise NodeError(
438 "Cannot retrieve last commit of the file without "
450 "Cannot retrieve last commit of the file without "
439 "related commit attribute")
451 "related commit attribute")
440
452
441 def get_mimetype(self):
453 def get_mimetype(self):
442 """
454 """
443 Mimetype is calculated based on the file's content. If ``_mimetype``
455 Mimetype is calculated based on the file's content. If ``_mimetype``
444 attribute is available, it will be returned (backends which store
456 attribute is available, it will be returned (backends which store
445 mimetypes or can easily recognize them, should set this private
457 mimetypes or can easily recognize them, should set this private
446 attribute to indicate that type should *NOT* be calculated).
458 attribute to indicate that type should *NOT* be calculated).
447 """
459 """
448
460
449 if hasattr(self, '_mimetype'):
461 if hasattr(self, '_mimetype'):
450 if (isinstance(self._mimetype, (tuple, list,)) and
462 if (isinstance(self._mimetype, (tuple, list,)) and
451 len(self._mimetype) == 2):
463 len(self._mimetype) == 2):
452 return self._mimetype
464 return self._mimetype
453 else:
465 else:
454 raise NodeError('given _mimetype attribute must be an 2 '
466 raise NodeError('given _mimetype attribute must be an 2 '
455 'element list or tuple')
467 'element list or tuple')
456
468
457 db = get_mimetypes_db()
469 db = get_mimetypes_db()
458 mtype, encoding = db.guess_type(self.name)
470 mtype, encoding = db.guess_type(self.name)
459
471
460 if mtype is None:
472 if mtype is None:
461 if self.is_binary:
473 if self.is_binary:
462 mtype = 'application/octet-stream'
474 mtype = 'application/octet-stream'
463 encoding = None
475 encoding = None
464 else:
476 else:
465 mtype = 'text/plain'
477 mtype = 'text/plain'
466 encoding = None
478 encoding = None
467
479
468 # try with pygments
480 # try with pygments
469 try:
481 try:
470 from pygments.lexers import get_lexer_for_filename
482 from pygments.lexers import get_lexer_for_filename
471 mt = get_lexer_for_filename(self.name).mimetypes
483 mt = get_lexer_for_filename(self.name).mimetypes
472 except Exception:
484 except Exception:
473 mt = None
485 mt = None
474
486
475 if mt:
487 if mt:
476 mtype = mt[0]
488 mtype = mt[0]
477
489
478 return mtype, encoding
490 return mtype, encoding
479
491
480 @LazyProperty
492 @LazyProperty
481 def mimetype(self):
493 def mimetype(self):
482 """
494 """
483 Wrapper around full mimetype info. It returns only type of fetched
495 Wrapper around full mimetype info. It returns only type of fetched
484 mimetype without the encoding part. use get_mimetype function to fetch
496 mimetype without the encoding part. use get_mimetype function to fetch
485 full set of (type,encoding)
497 full set of (type,encoding)
486 """
498 """
487 return self.get_mimetype()[0]
499 return self.get_mimetype()[0]
488
500
489 @LazyProperty
501 @LazyProperty
490 def mimetype_main(self):
502 def mimetype_main(self):
491 return self.mimetype.split('/')[0]
503 return self.mimetype.split('/')[0]
492
504
493 @classmethod
505 @classmethod
494 def get_lexer(cls, filename, content=None):
506 def get_lexer(cls, filename, content=None):
495 from pygments import lexers
507 from pygments import lexers
496
508
497 extension = filename.split('.')[-1]
509 extension = filename.split('.')[-1]
498 lexer = None
510 lexer = None
499
511
500 try:
512 try:
501 lexer = lexers.guess_lexer_for_filename(
513 lexer = lexers.guess_lexer_for_filename(
502 filename, content, stripnl=False)
514 filename, content, stripnl=False)
503 except lexers.ClassNotFound:
515 except lexers.ClassNotFound:
504 lexer = None
516 lexer = None
505
517
506 # try our EXTENSION_MAP
518 # try our EXTENSION_MAP
507 if not lexer:
519 if not lexer:
508 try:
520 try:
509 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
521 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
510 if lexer_class:
522 if lexer_class:
511 lexer = lexers.get_lexer_by_name(lexer_class[0])
523 lexer = lexers.get_lexer_by_name(lexer_class[0])
512 except lexers.ClassNotFound:
524 except lexers.ClassNotFound:
513 lexer = None
525 lexer = None
514
526
515 if not lexer:
527 if not lexer:
516 lexer = lexers.TextLexer(stripnl=False)
528 lexer = lexers.TextLexer(stripnl=False)
517
529
518 return lexer
530 return lexer
519
531
520 @LazyProperty
532 @LazyProperty
521 def lexer(self):
533 def lexer(self):
522 """
534 """
523 Returns pygment's lexer class. Would try to guess lexer taking file's
535 Returns pygment's lexer class. Would try to guess lexer taking file's
524 content, name and mimetype.
536 content, name and mimetype.
525 """
537 """
526 return self.get_lexer(self.name, self.content)
538 return self.get_lexer(self.name, self.content)
527
539
528 @LazyProperty
540 @LazyProperty
529 def lexer_alias(self):
541 def lexer_alias(self):
530 """
542 """
531 Returns first alias of the lexer guessed for this file.
543 Returns first alias of the lexer guessed for this file.
532 """
544 """
533 return self.lexer.aliases[0]
545 return self.lexer.aliases[0]
534
546
535 @LazyProperty
547 @LazyProperty
536 def history(self):
548 def history(self):
537 """
549 """
538 Returns a list of commit for this file in which the file was changed
550 Returns a list of commit for this file in which the file was changed
539 """
551 """
540 if self.commit is None:
552 if self.commit is None:
541 raise NodeError('Unable to get commit for this FileNode')
553 raise NodeError('Unable to get commit for this FileNode')
542 return self.commit.get_path_history(self.path)
554 return self.commit.get_path_history(self.path)
543
555
544 @LazyProperty
556 @LazyProperty
545 def annotate(self):
557 def annotate(self):
546 """
558 """
547 Returns a list of three element tuples with lineno, commit and line
559 Returns a list of three element tuples with lineno, commit and line
548 """
560 """
549 if self.commit is None:
561 if self.commit is None:
550 raise NodeError('Unable to get commit for this FileNode')
562 raise NodeError('Unable to get commit for this FileNode')
551 pre_load = ["author", "date", "message", "parents"]
563 pre_load = ["author", "date", "message", "parents"]
552 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
564 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
553
565
554 @LazyProperty
566 @LazyProperty
555 def state(self):
567 def state(self):
556 if not self.commit:
568 if not self.commit:
557 raise NodeError(
569 raise NodeError(
558 "Cannot check state of the node if it's not "
570 "Cannot check state of the node if it's not "
559 "linked with commit")
571 "linked with commit")
560 elif self.path in (node.path for node in self.commit.added):
572 elif self.path in (node.path for node in self.commit.added):
561 return NodeState.ADDED
573 return NodeState.ADDED
562 elif self.path in (node.path for node in self.commit.changed):
574 elif self.path in (node.path for node in self.commit.changed):
563 return NodeState.CHANGED
575 return NodeState.CHANGED
564 else:
576 else:
565 return NodeState.NOT_CHANGED
577 return NodeState.NOT_CHANGED
566
578
567 @LazyProperty
579 @LazyProperty
568 def is_binary(self):
580 def is_binary(self):
569 """
581 """
570 Returns True if file has binary content.
582 Returns True if file has binary content.
571 """
583 """
572 _bin = self.raw_bytes and '\0' in self.raw_bytes
584 _bin = self.raw_bytes and '\0' in self.raw_bytes
573 return _bin
585 return _bin
574
586
575 @LazyProperty
587 @LazyProperty
576 def extension(self):
588 def extension(self):
577 """Returns filenode extension"""
589 """Returns filenode extension"""
578 return self.name.split('.')[-1]
590 return self.name.split('.')[-1]
579
591
580 @property
592 @property
581 def is_executable(self):
593 def is_executable(self):
582 """
594 """
583 Returns ``True`` if file has executable flag turned on.
595 Returns ``True`` if file has executable flag turned on.
584 """
596 """
585 return bool(self.mode & stat.S_IXUSR)
597 return bool(self.mode & stat.S_IXUSR)
586
598
587 def get_largefile_node(self):
599 def get_largefile_node(self):
588 """
600 """
589 Try to return a Mercurial FileNode from this node. It does internal
601 Try to return a Mercurial FileNode from this node. It does internal
590 checks inside largefile store, if that file exist there it will
602 checks inside largefile store, if that file exist there it will
591 create special instance of LargeFileNode which can get content from
603 create special instance of LargeFileNode which can get content from
592 LF store.
604 LF store.
593 """
605 """
594 if self.commit:
606 if self.commit:
595 return self.commit.get_largefile_node(self.path)
607 return self.commit.get_largefile_node(self.path)
596
608
597 def lines(self, count_empty=False):
609 def lines(self, count_empty=False):
598 all_lines, empty_lines = 0, 0
610 all_lines, empty_lines = 0, 0
599
611
600 if not self.is_binary:
612 if not self.is_binary:
601 content = self.content
613 content = self.content
602 if count_empty:
614 if count_empty:
603 all_lines = 0
615 all_lines = 0
604 empty_lines = 0
616 empty_lines = 0
605 for line in content.splitlines(True):
617 for line in content.splitlines(True):
606 if line == '\n':
618 if line == '\n':
607 empty_lines += 1
619 empty_lines += 1
608 all_lines += 1
620 all_lines += 1
609
621
610 return all_lines, all_lines - empty_lines
622 return all_lines, all_lines - empty_lines
611 else:
623 else:
612 # fast method
624 # fast method
613 empty_lines = all_lines = content.count('\n')
625 empty_lines = all_lines = content.count('\n')
614 if all_lines == 0 and content:
626 if all_lines == 0 and content:
615 # one-line without a newline
627 # one-line without a newline
616 empty_lines = all_lines = 1
628 empty_lines = all_lines = 1
617
629
618 return all_lines, empty_lines
630 return all_lines, empty_lines
619
631
620 def __repr__(self):
632 def __repr__(self):
621 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
633 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
622 getattr(self.commit, 'short_id', ''))
634 getattr(self.commit, 'short_id', ''))
623
635
624
636
625 class RemovedFileNode(FileNode):
637 class RemovedFileNode(FileNode):
626 """
638 """
627 Dummy FileNode class - trying to access any public attribute except path,
639 Dummy FileNode class - trying to access any public attribute except path,
628 name, kind or state (or methods/attributes checking those two) would raise
640 name, kind or state (or methods/attributes checking those two) would raise
629 RemovedFileNodeError.
641 RemovedFileNodeError.
630 """
642 """
631 ALLOWED_ATTRIBUTES = [
643 ALLOWED_ATTRIBUTES = [
632 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
644 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
633 'added', 'changed', 'not_changed', 'removed'
645 'added', 'changed', 'not_changed', 'removed'
634 ]
646 ]
635
647
636 def __init__(self, path):
648 def __init__(self, path):
637 """
649 """
638 :param path: relative path to the node
650 :param path: relative path to the node
639 """
651 """
640 super(RemovedFileNode, self).__init__(path=path)
652 super(RemovedFileNode, self).__init__(path=path)
641
653
642 def __getattribute__(self, attr):
654 def __getattribute__(self, attr):
643 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
655 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
644 return super(RemovedFileNode, self).__getattribute__(attr)
656 return super(RemovedFileNode, self).__getattribute__(attr)
645 raise RemovedFileNodeError(
657 raise RemovedFileNodeError(
646 "Cannot access attribute %s on RemovedFileNode" % attr)
658 "Cannot access attribute %s on RemovedFileNode" % attr)
647
659
648 @LazyProperty
660 @LazyProperty
649 def state(self):
661 def state(self):
650 return NodeState.REMOVED
662 return NodeState.REMOVED
651
663
652
664
653 class DirNode(Node):
665 class DirNode(Node):
654 """
666 """
655 DirNode stores list of files and directories within this node.
667 DirNode stores list of files and directories within this node.
656 Nodes may be used standalone but within repository context they
668 Nodes may be used standalone but within repository context they
657 lazily fetch data within same repositorty's commit.
669 lazily fetch data within same repositorty's commit.
658 """
670 """
659
671
660 def __init__(self, path, nodes=(), commit=None):
672 def __init__(self, path, nodes=(), commit=None):
661 """
673 """
662 Only one of ``nodes`` and ``commit`` may be given. Passing both
674 Only one of ``nodes`` and ``commit`` may be given. Passing both
663 would raise ``NodeError`` exception.
675 would raise ``NodeError`` exception.
664
676
665 :param path: relative path to the node
677 :param path: relative path to the node
666 :param nodes: content may be passed to constructor
678 :param nodes: content may be passed to constructor
667 :param commit: if given, will use it to lazily fetch content
679 :param commit: if given, will use it to lazily fetch content
668 """
680 """
669 if nodes and commit:
681 if nodes and commit:
670 raise NodeError("Cannot use both nodes and commit")
682 raise NodeError("Cannot use both nodes and commit")
671 super(DirNode, self).__init__(path, NodeKind.DIR)
683 super(DirNode, self).__init__(path, NodeKind.DIR)
672 self.commit = commit
684 self.commit = commit
673 self._nodes = nodes
685 self._nodes = nodes
674
686
675 @LazyProperty
687 @LazyProperty
676 def content(self):
688 def content(self):
677 raise NodeError(
689 raise NodeError(
678 "%s represents a dir and has no `content` attribute" % self)
690 "%s represents a dir and has no `content` attribute" % self)
679
691
680 @LazyProperty
692 @LazyProperty
681 def nodes(self):
693 def nodes(self):
682 if self.commit:
694 if self.commit:
683 nodes = self.commit.get_nodes(self.path)
695 nodes = self.commit.get_nodes(self.path)
684 else:
696 else:
685 nodes = self._nodes
697 nodes = self._nodes
686 self._nodes_dict = dict((node.path, node) for node in nodes)
698 self._nodes_dict = dict((node.path, node) for node in nodes)
687 return sorted(nodes)
699 return sorted(nodes)
688
700
689 @LazyProperty
701 @LazyProperty
690 def files(self):
702 def files(self):
691 return sorted((node for node in self.nodes if node.is_file()))
703 return sorted((node for node in self.nodes if node.is_file()))
692
704
693 @LazyProperty
705 @LazyProperty
694 def dirs(self):
706 def dirs(self):
695 return sorted((node for node in self.nodes if node.is_dir()))
707 return sorted((node for node in self.nodes if node.is_dir()))
696
708
697 def __iter__(self):
709 def __iter__(self):
698 for node in self.nodes:
710 for node in self.nodes:
699 yield node
711 yield node
700
712
701 def get_node(self, path):
713 def get_node(self, path):
702 """
714 """
703 Returns node from within this particular ``DirNode``, so it is now
715 Returns node from within this particular ``DirNode``, so it is now
704 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
716 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
705 'docs'. In order to access deeper nodes one must fetch nodes between
717 'docs'. In order to access deeper nodes one must fetch nodes between
706 them first - this would work::
718 them first - this would work::
707
719
708 docs = root.get_node('docs')
720 docs = root.get_node('docs')
709 docs.get_node('api').get_node('index.rst')
721 docs.get_node('api').get_node('index.rst')
710
722
711 :param: path - relative to the current node
723 :param: path - relative to the current node
712
724
713 .. note::
725 .. note::
714 To access lazily (as in example above) node have to be initialized
726 To access lazily (as in example above) node have to be initialized
715 with related commit object - without it node is out of
727 with related commit object - without it node is out of
716 context and may know nothing about anything else than nearest
728 context and may know nothing about anything else than nearest
717 (located at same level) nodes.
729 (located at same level) nodes.
718 """
730 """
719 try:
731 try:
720 path = path.rstrip('/')
732 path = path.rstrip('/')
721 if path == '':
733 if path == '':
722 raise NodeError("Cannot retrieve node without path")
734 raise NodeError("Cannot retrieve node without path")
723 self.nodes # access nodes first in order to set _nodes_dict
735 self.nodes # access nodes first in order to set _nodes_dict
724 paths = path.split('/')
736 paths = path.split('/')
725 if len(paths) == 1:
737 if len(paths) == 1:
726 if not self.is_root():
738 if not self.is_root():
727 path = '/'.join((self.path, paths[0]))
739 path = '/'.join((self.path, paths[0]))
728 else:
740 else:
729 path = paths[0]
741 path = paths[0]
730 return self._nodes_dict[path]
742 return self._nodes_dict[path]
731 elif len(paths) > 1:
743 elif len(paths) > 1:
732 if self.commit is None:
744 if self.commit is None:
733 raise NodeError(
745 raise NodeError(
734 "Cannot access deeper nodes without commit")
746 "Cannot access deeper nodes without commit")
735 else:
747 else:
736 path1, path2 = paths[0], '/'.join(paths[1:])
748 path1, path2 = paths[0], '/'.join(paths[1:])
737 return self.get_node(path1).get_node(path2)
749 return self.get_node(path1).get_node(path2)
738 else:
750 else:
739 raise KeyError
751 raise KeyError
740 except KeyError:
752 except KeyError:
741 raise NodeError("Node does not exist at %s" % path)
753 raise NodeError("Node does not exist at %s" % path)
742
754
743 @LazyProperty
755 @LazyProperty
744 def state(self):
756 def state(self):
745 raise NodeError("Cannot access state of DirNode")
757 raise NodeError("Cannot access state of DirNode")
746
758
747 @LazyProperty
759 @LazyProperty
748 def size(self):
760 def size(self):
749 size = 0
761 size = 0
750 for root, dirs, files in self.commit.walk(self.path):
762 for root, dirs, files in self.commit.walk(self.path):
751 for f in files:
763 for f in files:
752 size += f.size
764 size += f.size
753
765
754 return size
766 return size
755
767
756 @LazyProperty
768 @LazyProperty
757 def last_commit(self):
769 def last_commit(self):
758 if self.commit:
770 if self.commit:
759 pre_load = ["author", "date", "message", "parents"]
771 pre_load = ["author", "date", "message", "parents"]
760 return self.commit.get_path_commit(self.path, pre_load=pre_load)
772 return self.commit.get_path_commit(self.path, pre_load=pre_load)
761 raise NodeError(
773 raise NodeError(
762 "Cannot retrieve last commit of the file without "
774 "Cannot retrieve last commit of the file without "
763 "related commit attribute")
775 "related commit attribute")
764
776
765 def __repr__(self):
777 def __repr__(self):
766 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
778 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
767 getattr(self.commit, 'short_id', ''))
779 getattr(self.commit, 'short_id', ''))
768
780
769
781
770 class RootNode(DirNode):
782 class RootNode(DirNode):
771 """
783 """
772 DirNode being the root node of the repository.
784 DirNode being the root node of the repository.
773 """
785 """
774
786
775 def __init__(self, nodes=(), commit=None):
787 def __init__(self, nodes=(), commit=None):
776 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
788 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
777
789
778 def __repr__(self):
790 def __repr__(self):
779 return '<%s>' % self.__class__.__name__
791 return '<%s>' % self.__class__.__name__
780
792
781
793
782 class SubModuleNode(Node):
794 class SubModuleNode(Node):
783 """
795 """
784 represents a SubModule of Git or SubRepo of Mercurial
796 represents a SubModule of Git or SubRepo of Mercurial
785 """
797 """
786 is_binary = False
798 is_binary = False
787 size = 0
799 size = 0
788
800
789 def __init__(self, name, url=None, commit=None, alias=None):
801 def __init__(self, name, url=None, commit=None, alias=None):
790 self.path = name
802 self.path = name
791 self.kind = NodeKind.SUBMODULE
803 self.kind = NodeKind.SUBMODULE
792 self.alias = alias
804 self.alias = alias
793
805
794 # we have to use EmptyCommit here since this can point to svn/git/hg
806 # we have to use EmptyCommit here since this can point to svn/git/hg
795 # submodules we cannot get from repository
807 # submodules we cannot get from repository
796 self.commit = EmptyCommit(str(commit), alias=alias)
808 self.commit = EmptyCommit(str(commit), alias=alias)
797 self.url = url or self._extract_submodule_url()
809 self.url = url or self._extract_submodule_url()
798
810
799 def __repr__(self):
811 def __repr__(self):
800 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
812 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
801 getattr(self.commit, 'short_id', ''))
813 getattr(self.commit, 'short_id', ''))
802
814
803 def _extract_submodule_url(self):
815 def _extract_submodule_url(self):
804 # TODO: find a way to parse gits submodule file and extract the
816 # TODO: find a way to parse gits submodule file and extract the
805 # linking URL
817 # linking URL
806 return self.path
818 return self.path
807
819
808 @LazyProperty
820 @LazyProperty
809 def name(self):
821 def name(self):
810 """
822 """
811 Returns name of the node so if its path
823 Returns name of the node so if its path
812 then only last part is returned.
824 then only last part is returned.
813 """
825 """
814 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
826 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
815 return u'%s @ %s' % (org, self.commit.short_id)
827 return u'%s @ %s' % (org, self.commit.short_id)
816
828
817
829
818 class LargeFileNode(FileNode):
830 class LargeFileNode(FileNode):
819
831
820 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
832 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
821 self.path = path
833 self.path = path
822 self.org_path = org_path
834 self.org_path = org_path
823 self.kind = NodeKind.LARGEFILE
835 self.kind = NodeKind.LARGEFILE
824 self.alias = alias
836 self.alias = alias
825
837
826 def _validate_path(self, path):
838 def _validate_path(self, path):
827 """
839 """
828 we override check since the LargeFileNode path is system absolute
840 we override check since the LargeFileNode path is system absolute
829 """
841 """
830 pass
842 pass
831
843
832 def __repr__(self):
844 def __repr__(self):
833 return '<%s %r>' % (self.__class__.__name__, self.path)
845 return '<%s %r>' % (self.__class__.__name__, self.path)
834
846
835 @LazyProperty
847 @LazyProperty
836 def size(self):
848 def size(self):
837 return os.stat(self.path).st_size
849 return os.stat(self.path).st_size
838
850
839 @LazyProperty
851 @LazyProperty
840 def raw_bytes(self):
852 def raw_bytes(self):
841 with open(self.path, 'rb') as f:
853 with open(self.path, 'rb') as f:
842 content = f.read()
854 content = f.read()
843 return content
855 return content
844
856
845 @LazyProperty
857 @LazyProperty
846 def name(self):
858 def name(self):
847 """
859 """
848 Overwrites name to be the org lf path
860 Overwrites name to be the org lf path
849 """
861 """
850 return self.org_path
862 return self.org_path
863
864 def stream_bytes(self):
865 with open(self.path, 'rb') as stream:
866 while True:
867 data = stream.read(16 * 1024)
868 if not data:
869 break
870 yield data
@@ -1,1902 +1,1826 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import hashlib
23 import hashlib
24 import os
24 import os
25 import re
25 import re
26 import pprint
26 import pprint
27 import shutil
27 import shutil
28 import socket
28 import socket
29 import subprocess32
29 import subprocess32
30 import time
30 import time
31 import uuid
31 import uuid
32 import dateutil.tz
32 import dateutil.tz
33 import functools
33 import functools
34
34
35 import mock
35 import mock
36 import pyramid.testing
36 import pyramid.testing
37 import pytest
37 import pytest
38 import colander
38 import colander
39 import requests
39 import requests
40 import pyramid.paster
40 import pyramid.paster
41
41
42 import rhodecode
42 import rhodecode
43 from rhodecode.lib.utils2 import AttributeDict
43 from rhodecode.lib.utils2 import AttributeDict
44 from rhodecode.model.changeset_status import ChangesetStatusModel
44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 from rhodecode.model.comment import CommentsModel
45 from rhodecode.model.comment import CommentsModel
46 from rhodecode.model.db import (
46 from rhodecode.model.db import (
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 from rhodecode.model.meta import Session
49 from rhodecode.model.meta import Session
50 from rhodecode.model.pull_request import PullRequestModel
50 from rhodecode.model.pull_request import PullRequestModel
51 from rhodecode.model.repo import RepoModel
51 from rhodecode.model.repo import RepoModel
52 from rhodecode.model.repo_group import RepoGroupModel
52 from rhodecode.model.repo_group import RepoGroupModel
53 from rhodecode.model.user import UserModel
53 from rhodecode.model.user import UserModel
54 from rhodecode.model.settings import VcsSettingsModel
54 from rhodecode.model.settings import VcsSettingsModel
55 from rhodecode.model.user_group import UserGroupModel
55 from rhodecode.model.user_group import UserGroupModel
56 from rhodecode.model.integration import IntegrationModel
56 from rhodecode.model.integration import IntegrationModel
57 from rhodecode.integrations import integration_type_registry
57 from rhodecode.integrations import integration_type_registry
58 from rhodecode.integrations.types.base import IntegrationTypeBase
58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 from rhodecode.lib.utils import repo2db_mapper
59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 from rhodecode.lib.vcs.backends import get_backend
60 from rhodecode.lib.vcs.backends import get_backend
62 from rhodecode.lib.vcs.nodes import FileNode
61 from rhodecode.lib.vcs.nodes import FileNode
63 from rhodecode.tests import (
62 from rhodecode.tests import (
64 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 TEST_USER_REGULAR_PASS)
65 TEST_USER_REGULAR_PASS)
67 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 from rhodecode.tests.fixture import Fixture
67 from rhodecode.tests.fixture import Fixture
69 from rhodecode.config import utils as config_utils
68 from rhodecode.config import utils as config_utils
70
69
71 def _split_comma(value):
70 def _split_comma(value):
72 return value.split(',')
71 return value.split(',')
73
72
74
73
75 def pytest_addoption(parser):
74 def pytest_addoption(parser):
76 parser.addoption(
75 parser.addoption(
77 '--keep-tmp-path', action='store_true',
76 '--keep-tmp-path', action='store_true',
78 help="Keep the test temporary directories")
77 help="Keep the test temporary directories")
79 parser.addoption(
78 parser.addoption(
80 '--backends', action='store', type=_split_comma,
79 '--backends', action='store', type=_split_comma,
81 default=['git', 'hg', 'svn'],
80 default=['git', 'hg', 'svn'],
82 help="Select which backends to test for backend specific tests.")
81 help="Select which backends to test for backend specific tests.")
83 parser.addoption(
82 parser.addoption(
84 '--dbs', action='store', type=_split_comma,
83 '--dbs', action='store', type=_split_comma,
85 default=['sqlite'],
84 default=['sqlite'],
86 help="Select which database to test for database specific tests. "
85 help="Select which database to test for database specific tests. "
87 "Possible options are sqlite,postgres,mysql")
86 "Possible options are sqlite,postgres,mysql")
88 parser.addoption(
87 parser.addoption(
89 '--appenlight', '--ae', action='store_true',
88 '--appenlight', '--ae', action='store_true',
90 help="Track statistics in appenlight.")
89 help="Track statistics in appenlight.")
91 parser.addoption(
90 parser.addoption(
92 '--appenlight-api-key', '--ae-key',
91 '--appenlight-api-key', '--ae-key',
93 help="API key for Appenlight.")
92 help="API key for Appenlight.")
94 parser.addoption(
93 parser.addoption(
95 '--appenlight-url', '--ae-url',
94 '--appenlight-url', '--ae-url',
96 default="https://ae.rhodecode.com",
95 default="https://ae.rhodecode.com",
97 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
96 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 parser.addoption(
97 parser.addoption(
99 '--sqlite-connection-string', action='store',
98 '--sqlite-connection-string', action='store',
100 default='', help="Connection string for the dbs tests with SQLite")
99 default='', help="Connection string for the dbs tests with SQLite")
101 parser.addoption(
100 parser.addoption(
102 '--postgres-connection-string', action='store',
101 '--postgres-connection-string', action='store',
103 default='', help="Connection string for the dbs tests with Postgres")
102 default='', help="Connection string for the dbs tests with Postgres")
104 parser.addoption(
103 parser.addoption(
105 '--mysql-connection-string', action='store',
104 '--mysql-connection-string', action='store',
106 default='', help="Connection string for the dbs tests with MySQL")
105 default='', help="Connection string for the dbs tests with MySQL")
107 parser.addoption(
106 parser.addoption(
108 '--repeat', type=int, default=100,
107 '--repeat', type=int, default=100,
109 help="Number of repetitions in performance tests.")
108 help="Number of repetitions in performance tests.")
110
109
111
110
112 def pytest_configure(config):
111 def pytest_configure(config):
113 from rhodecode.config import patches
112 from rhodecode.config import patches
114
113
115
114
116 def pytest_collection_modifyitems(session, config, items):
115 def pytest_collection_modifyitems(session, config, items):
117 # nottest marked, compare nose, used for transition from nose to pytest
116 # nottest marked, compare nose, used for transition from nose to pytest
118 remaining = [
117 remaining = [
119 i for i in items if getattr(i.obj, '__test__', True)]
118 i for i in items if getattr(i.obj, '__test__', True)]
120 items[:] = remaining
119 items[:] = remaining
121
120
122
121
123 def pytest_generate_tests(metafunc):
122 def pytest_generate_tests(metafunc):
124 # Support test generation based on --backend parameter
123 # Support test generation based on --backend parameter
125 if 'backend_alias' in metafunc.fixturenames:
124 if 'backend_alias' in metafunc.fixturenames:
126 backends = get_backends_from_metafunc(metafunc)
125 backends = get_backends_from_metafunc(metafunc)
127 scope = None
126 scope = None
128 if not backends:
127 if not backends:
129 pytest.skip("Not enabled for any of selected backends")
128 pytest.skip("Not enabled for any of selected backends")
130 metafunc.parametrize('backend_alias', backends, scope=scope)
129 metafunc.parametrize('backend_alias', backends, scope=scope)
131 elif hasattr(metafunc.function, 'backends'):
130 elif hasattr(metafunc.function, 'backends'):
132 backends = get_backends_from_metafunc(metafunc)
131 backends = get_backends_from_metafunc(metafunc)
133 if not backends:
132 if not backends:
134 pytest.skip("Not enabled for any of selected backends")
133 pytest.skip("Not enabled for any of selected backends")
135
134
136
135
137 def get_backends_from_metafunc(metafunc):
136 def get_backends_from_metafunc(metafunc):
138 requested_backends = set(metafunc.config.getoption('--backends'))
137 requested_backends = set(metafunc.config.getoption('--backends'))
139 if hasattr(metafunc.function, 'backends'):
138 if hasattr(metafunc.function, 'backends'):
140 # Supported backends by this test function, created from
139 # Supported backends by this test function, created from
141 # pytest.mark.backends
140 # pytest.mark.backends
142 backends = metafunc.definition.get_closest_marker('backends').args
141 backends = metafunc.definition.get_closest_marker('backends').args
143 elif hasattr(metafunc.cls, 'backend_alias'):
142 elif hasattr(metafunc.cls, 'backend_alias'):
144 # Support class attribute "backend_alias", this is mainly
143 # Support class attribute "backend_alias", this is mainly
145 # for legacy reasons for tests not yet using pytest.mark.backends
144 # for legacy reasons for tests not yet using pytest.mark.backends
146 backends = [metafunc.cls.backend_alias]
145 backends = [metafunc.cls.backend_alias]
147 else:
146 else:
148 backends = metafunc.config.getoption('--backends')
147 backends = metafunc.config.getoption('--backends')
149 return requested_backends.intersection(backends)
148 return requested_backends.intersection(backends)
150
149
151
150
152 @pytest.fixture(scope='session', autouse=True)
151 @pytest.fixture(scope='session', autouse=True)
153 def activate_example_rcextensions(request):
152 def activate_example_rcextensions(request):
154 """
153 """
155 Patch in an example rcextensions module which verifies passed in kwargs.
154 Patch in an example rcextensions module which verifies passed in kwargs.
156 """
155 """
157 from rhodecode.config import rcextensions
156 from rhodecode.config import rcextensions
158
157
159 old_extensions = rhodecode.EXTENSIONS
158 old_extensions = rhodecode.EXTENSIONS
160 rhodecode.EXTENSIONS = rcextensions
159 rhodecode.EXTENSIONS = rcextensions
161 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
160 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
162
161
163 @request.addfinalizer
162 @request.addfinalizer
164 def cleanup():
163 def cleanup():
165 rhodecode.EXTENSIONS = old_extensions
164 rhodecode.EXTENSIONS = old_extensions
166
165
167
166
168 @pytest.fixture
167 @pytest.fixture
169 def capture_rcextensions():
168 def capture_rcextensions():
170 """
169 """
171 Returns the recorded calls to entry points in rcextensions.
170 Returns the recorded calls to entry points in rcextensions.
172 """
171 """
173 calls = rhodecode.EXTENSIONS.calls
172 calls = rhodecode.EXTENSIONS.calls
174 calls.clear()
173 calls.clear()
175 # Note: At this moment, it is still the empty dict, but that will
174 # Note: At this moment, it is still the empty dict, but that will
176 # be filled during the test run and since it is a reference this
175 # be filled during the test run and since it is a reference this
177 # is enough to make it work.
176 # is enough to make it work.
178 return calls
177 return calls
179
178
180
179
181 @pytest.fixture(scope='session')
180 @pytest.fixture(scope='session')
182 def http_environ_session():
181 def http_environ_session():
183 """
182 """
184 Allow to use "http_environ" in session scope.
183 Allow to use "http_environ" in session scope.
185 """
184 """
186 return plain_http_environ()
185 return plain_http_environ()
187
186
188
187
189 def plain_http_host_stub():
188 def plain_http_host_stub():
190 """
189 """
191 Value of HTTP_HOST in the test run.
190 Value of HTTP_HOST in the test run.
192 """
191 """
193 return 'example.com:80'
192 return 'example.com:80'
194
193
195
194
196 @pytest.fixture
195 @pytest.fixture
197 def http_host_stub():
196 def http_host_stub():
198 """
197 """
199 Value of HTTP_HOST in the test run.
198 Value of HTTP_HOST in the test run.
200 """
199 """
201 return plain_http_host_stub()
200 return plain_http_host_stub()
202
201
203
202
204 def plain_http_host_only_stub():
203 def plain_http_host_only_stub():
205 """
204 """
206 Value of HTTP_HOST in the test run.
205 Value of HTTP_HOST in the test run.
207 """
206 """
208 return plain_http_host_stub().split(':')[0]
207 return plain_http_host_stub().split(':')[0]
209
208
210
209
211 @pytest.fixture
210 @pytest.fixture
212 def http_host_only_stub():
211 def http_host_only_stub():
213 """
212 """
214 Value of HTTP_HOST in the test run.
213 Value of HTTP_HOST in the test run.
215 """
214 """
216 return plain_http_host_only_stub()
215 return plain_http_host_only_stub()
217
216
218
217
219 def plain_http_environ():
218 def plain_http_environ():
220 """
219 """
221 HTTP extra environ keys.
220 HTTP extra environ keys.
222
221
223 User by the test application and as well for setting up the pylons
222 User by the test application and as well for setting up the pylons
224 environment. In the case of the fixture "app" it should be possible
223 environment. In the case of the fixture "app" it should be possible
225 to override this for a specific test case.
224 to override this for a specific test case.
226 """
225 """
227 return {
226 return {
228 'SERVER_NAME': plain_http_host_only_stub(),
227 'SERVER_NAME': plain_http_host_only_stub(),
229 'SERVER_PORT': plain_http_host_stub().split(':')[1],
228 'SERVER_PORT': plain_http_host_stub().split(':')[1],
230 'HTTP_HOST': plain_http_host_stub(),
229 'HTTP_HOST': plain_http_host_stub(),
231 'HTTP_USER_AGENT': 'rc-test-agent',
230 'HTTP_USER_AGENT': 'rc-test-agent',
232 'REQUEST_METHOD': 'GET'
231 'REQUEST_METHOD': 'GET'
233 }
232 }
234
233
235
234
236 @pytest.fixture
235 @pytest.fixture
237 def http_environ():
236 def http_environ():
238 """
237 """
239 HTTP extra environ keys.
238 HTTP extra environ keys.
240
239
241 User by the test application and as well for setting up the pylons
240 User by the test application and as well for setting up the pylons
242 environment. In the case of the fixture "app" it should be possible
241 environment. In the case of the fixture "app" it should be possible
243 to override this for a specific test case.
242 to override this for a specific test case.
244 """
243 """
245 return plain_http_environ()
244 return plain_http_environ()
246
245
247
246
248 @pytest.fixture(scope='session')
247 @pytest.fixture(scope='session')
249 def baseapp(ini_config, vcsserver, http_environ_session):
248 def baseapp(ini_config, vcsserver, http_environ_session):
250 from rhodecode.lib.pyramid_utils import get_app_config
249 from rhodecode.lib.pyramid_utils import get_app_config
251 from rhodecode.config.middleware import make_pyramid_app
250 from rhodecode.config.middleware import make_pyramid_app
252
251
253 print("Using the RhodeCode configuration:{}".format(ini_config))
252 print("Using the RhodeCode configuration:{}".format(ini_config))
254 pyramid.paster.setup_logging(ini_config)
253 pyramid.paster.setup_logging(ini_config)
255
254
256 settings = get_app_config(ini_config)
255 settings = get_app_config(ini_config)
257 app = make_pyramid_app({'__file__': ini_config}, **settings)
256 app = make_pyramid_app({'__file__': ini_config}, **settings)
258
257
259 return app
258 return app
260
259
261
260
262 @pytest.fixture(scope='function')
261 @pytest.fixture(scope='function')
263 def app(request, config_stub, baseapp, http_environ):
262 def app(request, config_stub, baseapp, http_environ):
264 app = CustomTestApp(
263 app = CustomTestApp(
265 baseapp,
264 baseapp,
266 extra_environ=http_environ)
265 extra_environ=http_environ)
267 if request.cls:
266 if request.cls:
268 request.cls.app = app
267 request.cls.app = app
269 return app
268 return app
270
269
271
270
272 @pytest.fixture(scope='session')
271 @pytest.fixture(scope='session')
273 def app_settings(baseapp, ini_config):
272 def app_settings(baseapp, ini_config):
274 """
273 """
275 Settings dictionary used to create the app.
274 Settings dictionary used to create the app.
276
275
277 Parses the ini file and passes the result through the sanitize and apply
276 Parses the ini file and passes the result through the sanitize and apply
278 defaults mechanism in `rhodecode.config.middleware`.
277 defaults mechanism in `rhodecode.config.middleware`.
279 """
278 """
280 return baseapp.config.get_settings()
279 return baseapp.config.get_settings()
281
280
282
281
283 @pytest.fixture(scope='session')
282 @pytest.fixture(scope='session')
284 def db_connection(ini_settings):
283 def db_connection(ini_settings):
285 # Initialize the database connection.
284 # Initialize the database connection.
286 config_utils.initialize_database(ini_settings)
285 config_utils.initialize_database(ini_settings)
287
286
288
287
289 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
288 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
290
289
291
290
292 def _autologin_user(app, *args):
291 def _autologin_user(app, *args):
293 session = login_user_session(app, *args)
292 session = login_user_session(app, *args)
294 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
293 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
295 return LoginData(csrf_token, session['rhodecode_user'])
294 return LoginData(csrf_token, session['rhodecode_user'])
296
295
297
296
298 @pytest.fixture
297 @pytest.fixture
299 def autologin_user(app):
298 def autologin_user(app):
300 """
299 """
301 Utility fixture which makes sure that the admin user is logged in
300 Utility fixture which makes sure that the admin user is logged in
302 """
301 """
303 return _autologin_user(app)
302 return _autologin_user(app)
304
303
305
304
306 @pytest.fixture
305 @pytest.fixture
307 def autologin_regular_user(app):
306 def autologin_regular_user(app):
308 """
307 """
309 Utility fixture which makes sure that the regular user is logged in
308 Utility fixture which makes sure that the regular user is logged in
310 """
309 """
311 return _autologin_user(
310 return _autologin_user(
312 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
311 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
313
312
314
313
315 @pytest.fixture(scope='function')
314 @pytest.fixture(scope='function')
316 def csrf_token(request, autologin_user):
315 def csrf_token(request, autologin_user):
317 return autologin_user.csrf_token
316 return autologin_user.csrf_token
318
317
319
318
320 @pytest.fixture(scope='function')
319 @pytest.fixture(scope='function')
321 def xhr_header(request):
320 def xhr_header(request):
322 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
321 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
323
322
324
323
325 @pytest.fixture
324 @pytest.fixture
326 def real_crypto_backend(monkeypatch):
325 def real_crypto_backend(monkeypatch):
327 """
326 """
328 Switch the production crypto backend on for this test.
327 Switch the production crypto backend on for this test.
329
328
330 During the test run the crypto backend is replaced with a faster
329 During the test run the crypto backend is replaced with a faster
331 implementation based on the MD5 algorithm.
330 implementation based on the MD5 algorithm.
332 """
331 """
333 monkeypatch.setattr(rhodecode, 'is_test', False)
332 monkeypatch.setattr(rhodecode, 'is_test', False)
334
333
335
334
336 @pytest.fixture(scope='class')
335 @pytest.fixture(scope='class')
337 def index_location(request, baseapp):
336 def index_location(request, baseapp):
338 index_location = baseapp.config.get_settings()['search.location']
337 index_location = baseapp.config.get_settings()['search.location']
339 if request.cls:
338 if request.cls:
340 request.cls.index_location = index_location
339 request.cls.index_location = index_location
341 return index_location
340 return index_location
342
341
343
342
344 @pytest.fixture(scope='session', autouse=True)
343 @pytest.fixture(scope='session', autouse=True)
345 def tests_tmp_path(request):
344 def tests_tmp_path(request):
346 """
345 """
347 Create temporary directory to be used during the test session.
346 Create temporary directory to be used during the test session.
348 """
347 """
349 if not os.path.exists(TESTS_TMP_PATH):
348 if not os.path.exists(TESTS_TMP_PATH):
350 os.makedirs(TESTS_TMP_PATH)
349 os.makedirs(TESTS_TMP_PATH)
351
350
352 if not request.config.getoption('--keep-tmp-path'):
351 if not request.config.getoption('--keep-tmp-path'):
353 @request.addfinalizer
352 @request.addfinalizer
354 def remove_tmp_path():
353 def remove_tmp_path():
355 shutil.rmtree(TESTS_TMP_PATH)
354 shutil.rmtree(TESTS_TMP_PATH)
356
355
357 return TESTS_TMP_PATH
356 return TESTS_TMP_PATH
358
357
359
358
360 @pytest.fixture
359 @pytest.fixture
361 def test_repo_group(request):
360 def test_repo_group(request):
362 """
361 """
363 Create a temporary repository group, and destroy it after
362 Create a temporary repository group, and destroy it after
364 usage automatically
363 usage automatically
365 """
364 """
366 fixture = Fixture()
365 fixture = Fixture()
367 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
366 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
368 repo_group = fixture.create_repo_group(repogroupid)
367 repo_group = fixture.create_repo_group(repogroupid)
369
368
370 def _cleanup():
369 def _cleanup():
371 fixture.destroy_repo_group(repogroupid)
370 fixture.destroy_repo_group(repogroupid)
372
371
373 request.addfinalizer(_cleanup)
372 request.addfinalizer(_cleanup)
374 return repo_group
373 return repo_group
375
374
376
375
377 @pytest.fixture
376 @pytest.fixture
378 def test_user_group(request):
377 def test_user_group(request):
379 """
378 """
380 Create a temporary user group, and destroy it after
379 Create a temporary user group, and destroy it after
381 usage automatically
380 usage automatically
382 """
381 """
383 fixture = Fixture()
382 fixture = Fixture()
384 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
383 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
385 user_group = fixture.create_user_group(usergroupid)
384 user_group = fixture.create_user_group(usergroupid)
386
385
387 def _cleanup():
386 def _cleanup():
388 fixture.destroy_user_group(user_group)
387 fixture.destroy_user_group(user_group)
389
388
390 request.addfinalizer(_cleanup)
389 request.addfinalizer(_cleanup)
391 return user_group
390 return user_group
392
391
393
392
394 @pytest.fixture(scope='session')
393 @pytest.fixture(scope='session')
395 def test_repo(request):
394 def test_repo(request):
396 container = TestRepoContainer()
395 container = TestRepoContainer()
397 request.addfinalizer(container._cleanup)
396 request.addfinalizer(container._cleanup)
398 return container
397 return container
399
398
400
399
401 class TestRepoContainer(object):
400 class TestRepoContainer(object):
402 """
401 """
403 Container for test repositories which are used read only.
402 Container for test repositories which are used read only.
404
403
405 Repositories will be created on demand and re-used during the lifetime
404 Repositories will be created on demand and re-used during the lifetime
406 of this object.
405 of this object.
407
406
408 Usage to get the svn test repository "minimal"::
407 Usage to get the svn test repository "minimal"::
409
408
410 test_repo = TestContainer()
409 test_repo = TestContainer()
411 repo = test_repo('minimal', 'svn')
410 repo = test_repo('minimal', 'svn')
412
411
413 """
412 """
414
413
415 dump_extractors = {
414 dump_extractors = {
416 'git': utils.extract_git_repo_from_dump,
415 'git': utils.extract_git_repo_from_dump,
417 'hg': utils.extract_hg_repo_from_dump,
416 'hg': utils.extract_hg_repo_from_dump,
418 'svn': utils.extract_svn_repo_from_dump,
417 'svn': utils.extract_svn_repo_from_dump,
419 }
418 }
420
419
421 def __init__(self):
420 def __init__(self):
422 self._cleanup_repos = []
421 self._cleanup_repos = []
423 self._fixture = Fixture()
422 self._fixture = Fixture()
424 self._repos = {}
423 self._repos = {}
425
424
426 def __call__(self, dump_name, backend_alias, config=None):
425 def __call__(self, dump_name, backend_alias, config=None):
427 key = (dump_name, backend_alias)
426 key = (dump_name, backend_alias)
428 if key not in self._repos:
427 if key not in self._repos:
429 repo = self._create_repo(dump_name, backend_alias, config)
428 repo = self._create_repo(dump_name, backend_alias, config)
430 self._repos[key] = repo.repo_id
429 self._repos[key] = repo.repo_id
431 return Repository.get(self._repos[key])
430 return Repository.get(self._repos[key])
432
431
433 def _create_repo(self, dump_name, backend_alias, config):
432 def _create_repo(self, dump_name, backend_alias, config):
434 repo_name = '%s-%s' % (backend_alias, dump_name)
433 repo_name = '%s-%s' % (backend_alias, dump_name)
435 backend = get_backend(backend_alias)
434 backend = get_backend(backend_alias)
436 dump_extractor = self.dump_extractors[backend_alias]
435 dump_extractor = self.dump_extractors[backend_alias]
437 repo_path = dump_extractor(dump_name, repo_name)
436 repo_path = dump_extractor(dump_name, repo_name)
438
437
439 vcs_repo = backend(repo_path, config=config)
438 vcs_repo = backend(repo_path, config=config)
440 repo2db_mapper({repo_name: vcs_repo})
439 repo2db_mapper({repo_name: vcs_repo})
441
440
442 repo = RepoModel().get_by_repo_name(repo_name)
441 repo = RepoModel().get_by_repo_name(repo_name)
443 self._cleanup_repos.append(repo_name)
442 self._cleanup_repos.append(repo_name)
444 return repo
443 return repo
445
444
446 def _cleanup(self):
445 def _cleanup(self):
447 for repo_name in reversed(self._cleanup_repos):
446 for repo_name in reversed(self._cleanup_repos):
448 self._fixture.destroy_repo(repo_name)
447 self._fixture.destroy_repo(repo_name)
449
448
450
449
451 def backend_base(request, backend_alias, baseapp, test_repo):
450 def backend_base(request, backend_alias, baseapp, test_repo):
452 if backend_alias not in request.config.getoption('--backends'):
451 if backend_alias not in request.config.getoption('--backends'):
453 pytest.skip("Backend %s not selected." % (backend_alias, ))
452 pytest.skip("Backend %s not selected." % (backend_alias, ))
454
453
455 utils.check_xfail_backends(request.node, backend_alias)
454 utils.check_xfail_backends(request.node, backend_alias)
456 utils.check_skip_backends(request.node, backend_alias)
455 utils.check_skip_backends(request.node, backend_alias)
457
456
458 repo_name = 'vcs_test_%s' % (backend_alias, )
457 repo_name = 'vcs_test_%s' % (backend_alias, )
459 backend = Backend(
458 backend = Backend(
460 alias=backend_alias,
459 alias=backend_alias,
461 repo_name=repo_name,
460 repo_name=repo_name,
462 test_name=request.node.name,
461 test_name=request.node.name,
463 test_repo_container=test_repo)
462 test_repo_container=test_repo)
464 request.addfinalizer(backend.cleanup)
463 request.addfinalizer(backend.cleanup)
465 return backend
464 return backend
466
465
467
466
468 @pytest.fixture
467 @pytest.fixture
469 def backend(request, backend_alias, baseapp, test_repo):
468 def backend(request, backend_alias, baseapp, test_repo):
470 """
469 """
471 Parametrized fixture which represents a single backend implementation.
470 Parametrized fixture which represents a single backend implementation.
472
471
473 It respects the option `--backends` to focus the test run on specific
472 It respects the option `--backends` to focus the test run on specific
474 backend implementations.
473 backend implementations.
475
474
476 It also supports `pytest.mark.xfail_backends` to mark tests as failing
475 It also supports `pytest.mark.xfail_backends` to mark tests as failing
477 for specific backends. This is intended as a utility for incremental
476 for specific backends. This is intended as a utility for incremental
478 development of a new backend implementation.
477 development of a new backend implementation.
479 """
478 """
480 return backend_base(request, backend_alias, baseapp, test_repo)
479 return backend_base(request, backend_alias, baseapp, test_repo)
481
480
482
481
483 @pytest.fixture
482 @pytest.fixture
484 def backend_git(request, baseapp, test_repo):
483 def backend_git(request, baseapp, test_repo):
485 return backend_base(request, 'git', baseapp, test_repo)
484 return backend_base(request, 'git', baseapp, test_repo)
486
485
487
486
488 @pytest.fixture
487 @pytest.fixture
489 def backend_hg(request, baseapp, test_repo):
488 def backend_hg(request, baseapp, test_repo):
490 return backend_base(request, 'hg', baseapp, test_repo)
489 return backend_base(request, 'hg', baseapp, test_repo)
491
490
492
491
493 @pytest.fixture
492 @pytest.fixture
494 def backend_svn(request, baseapp, test_repo):
493 def backend_svn(request, baseapp, test_repo):
495 return backend_base(request, 'svn', baseapp, test_repo)
494 return backend_base(request, 'svn', baseapp, test_repo)
496
495
497
496
498 @pytest.fixture
497 @pytest.fixture
499 def backend_random(backend_git):
498 def backend_random(backend_git):
500 """
499 """
501 Use this to express that your tests need "a backend.
500 Use this to express that your tests need "a backend.
502
501
503 A few of our tests need a backend, so that we can run the code. This
502 A few of our tests need a backend, so that we can run the code. This
504 fixture is intended to be used for such cases. It will pick one of the
503 fixture is intended to be used for such cases. It will pick one of the
505 backends and run the tests.
504 backends and run the tests.
506
505
507 The fixture `backend` would run the test multiple times for each
506 The fixture `backend` would run the test multiple times for each
508 available backend which is a pure waste of time if the test is
507 available backend which is a pure waste of time if the test is
509 independent of the backend type.
508 independent of the backend type.
510 """
509 """
511 # TODO: johbo: Change this to pick a random backend
510 # TODO: johbo: Change this to pick a random backend
512 return backend_git
511 return backend_git
513
512
514
513
515 @pytest.fixture
514 @pytest.fixture
516 def backend_stub(backend_git):
515 def backend_stub(backend_git):
517 """
516 """
518 Use this to express that your tests need a backend stub
517 Use this to express that your tests need a backend stub
519
518
520 TODO: mikhail: Implement a real stub logic instead of returning
519 TODO: mikhail: Implement a real stub logic instead of returning
521 a git backend
520 a git backend
522 """
521 """
523 return backend_git
522 return backend_git
524
523
525
524
526 @pytest.fixture
525 @pytest.fixture
527 def repo_stub(backend_stub):
526 def repo_stub(backend_stub):
528 """
527 """
529 Use this to express that your tests need a repository stub
528 Use this to express that your tests need a repository stub
530 """
529 """
531 return backend_stub.create_repo()
530 return backend_stub.create_repo()
532
531
533
532
534 class Backend(object):
533 class Backend(object):
535 """
534 """
536 Represents the test configuration for one supported backend
535 Represents the test configuration for one supported backend
537
536
538 Provides easy access to different test repositories based on
537 Provides easy access to different test repositories based on
539 `__getitem__`. Such repositories will only be created once per test
538 `__getitem__`. Such repositories will only be created once per test
540 session.
539 session.
541 """
540 """
542
541
543 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
542 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
544 _master_repo = None
543 _master_repo = None
545 _commit_ids = {}
544 _commit_ids = {}
546
545
547 def __init__(self, alias, repo_name, test_name, test_repo_container):
546 def __init__(self, alias, repo_name, test_name, test_repo_container):
548 self.alias = alias
547 self.alias = alias
549 self.repo_name = repo_name
548 self.repo_name = repo_name
550 self._cleanup_repos = []
549 self._cleanup_repos = []
551 self._test_name = test_name
550 self._test_name = test_name
552 self._test_repo_container = test_repo_container
551 self._test_repo_container = test_repo_container
553 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
552 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
554 # Fixture will survive in the end.
553 # Fixture will survive in the end.
555 self._fixture = Fixture()
554 self._fixture = Fixture()
556
555
557 def __getitem__(self, key):
556 def __getitem__(self, key):
558 return self._test_repo_container(key, self.alias)
557 return self._test_repo_container(key, self.alias)
559
558
560 def create_test_repo(self, key, config=None):
559 def create_test_repo(self, key, config=None):
561 return self._test_repo_container(key, self.alias, config)
560 return self._test_repo_container(key, self.alias, config)
562
561
563 @property
562 @property
564 def repo(self):
563 def repo(self):
565 """
564 """
566 Returns the "current" repository. This is the vcs_test repo or the
565 Returns the "current" repository. This is the vcs_test repo or the
567 last repo which has been created with `create_repo`.
566 last repo which has been created with `create_repo`.
568 """
567 """
569 from rhodecode.model.db import Repository
568 from rhodecode.model.db import Repository
570 return Repository.get_by_repo_name(self.repo_name)
569 return Repository.get_by_repo_name(self.repo_name)
571
570
572 @property
571 @property
573 def default_branch_name(self):
572 def default_branch_name(self):
574 VcsRepository = get_backend(self.alias)
573 VcsRepository = get_backend(self.alias)
575 return VcsRepository.DEFAULT_BRANCH_NAME
574 return VcsRepository.DEFAULT_BRANCH_NAME
576
575
577 @property
576 @property
578 def default_head_id(self):
577 def default_head_id(self):
579 """
578 """
580 Returns the default head id of the underlying backend.
579 Returns the default head id of the underlying backend.
581
580
582 This will be the default branch name in case the backend does have a
581 This will be the default branch name in case the backend does have a
583 default branch. In the other cases it will point to a valid head
582 default branch. In the other cases it will point to a valid head
584 which can serve as the base to create a new commit on top of it.
583 which can serve as the base to create a new commit on top of it.
585 """
584 """
586 vcsrepo = self.repo.scm_instance()
585 vcsrepo = self.repo.scm_instance()
587 head_id = (
586 head_id = (
588 vcsrepo.DEFAULT_BRANCH_NAME or
587 vcsrepo.DEFAULT_BRANCH_NAME or
589 vcsrepo.commit_ids[-1])
588 vcsrepo.commit_ids[-1])
590 return head_id
589 return head_id
591
590
592 @property
591 @property
593 def commit_ids(self):
592 def commit_ids(self):
594 """
593 """
595 Returns the list of commits for the last created repository
594 Returns the list of commits for the last created repository
596 """
595 """
597 return self._commit_ids
596 return self._commit_ids
598
597
599 def create_master_repo(self, commits):
598 def create_master_repo(self, commits):
600 """
599 """
601 Create a repository and remember it as a template.
600 Create a repository and remember it as a template.
602
601
603 This allows to easily create derived repositories to construct
602 This allows to easily create derived repositories to construct
604 more complex scenarios for diff, compare and pull requests.
603 more complex scenarios for diff, compare and pull requests.
605
604
606 Returns a commit map which maps from commit message to raw_id.
605 Returns a commit map which maps from commit message to raw_id.
607 """
606 """
608 self._master_repo = self.create_repo(commits=commits)
607 self._master_repo = self.create_repo(commits=commits)
609 return self._commit_ids
608 return self._commit_ids
610
609
611 def create_repo(
610 def create_repo(
612 self, commits=None, number_of_commits=0, heads=None,
611 self, commits=None, number_of_commits=0, heads=None,
613 name_suffix=u'', bare=False, **kwargs):
612 name_suffix=u'', bare=False, **kwargs):
614 """
613 """
615 Create a repository and record it for later cleanup.
614 Create a repository and record it for later cleanup.
616
615
617 :param commits: Optional. A sequence of dict instances.
616 :param commits: Optional. A sequence of dict instances.
618 Will add a commit per entry to the new repository.
617 Will add a commit per entry to the new repository.
619 :param number_of_commits: Optional. If set to a number, this number of
618 :param number_of_commits: Optional. If set to a number, this number of
620 commits will be added to the new repository.
619 commits will be added to the new repository.
621 :param heads: Optional. Can be set to a sequence of of commit
620 :param heads: Optional. Can be set to a sequence of of commit
622 names which shall be pulled in from the master repository.
621 names which shall be pulled in from the master repository.
623 :param name_suffix: adds special suffix to generated repo name
622 :param name_suffix: adds special suffix to generated repo name
624 :param bare: set a repo as bare (no checkout)
623 :param bare: set a repo as bare (no checkout)
625 """
624 """
626 self.repo_name = self._next_repo_name() + name_suffix
625 self.repo_name = self._next_repo_name() + name_suffix
627 repo = self._fixture.create_repo(
626 repo = self._fixture.create_repo(
628 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
627 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
629 self._cleanup_repos.append(repo.repo_name)
628 self._cleanup_repos.append(repo.repo_name)
630
629
631 commits = commits or [
630 commits = commits or [
632 {'message': 'Commit %s of %s' % (x, self.repo_name)}
631 {'message': 'Commit %s of %s' % (x, self.repo_name)}
633 for x in range(number_of_commits)]
632 for x in range(number_of_commits)]
634 vcs_repo = repo.scm_instance()
633 vcs_repo = repo.scm_instance()
635 vcs_repo.count()
634 vcs_repo.count()
636 self._add_commits_to_repo(vcs_repo, commits)
635 self._add_commits_to_repo(vcs_repo, commits)
637 if heads:
636 if heads:
638 self.pull_heads(repo, heads)
637 self.pull_heads(repo, heads)
639
638
640 return repo
639 return repo
641
640
642 def pull_heads(self, repo, heads):
641 def pull_heads(self, repo, heads):
643 """
642 """
644 Make sure that repo contains all commits mentioned in `heads`
643 Make sure that repo contains all commits mentioned in `heads`
645 """
644 """
646 vcsmaster = self._master_repo.scm_instance()
645 vcsmaster = self._master_repo.scm_instance()
647 vcsrepo = repo.scm_instance()
646 vcsrepo = repo.scm_instance()
648 vcsrepo.config.clear_section('hooks')
647 vcsrepo.config.clear_section('hooks')
649 commit_ids = [self._commit_ids[h] for h in heads]
648 commit_ids = [self._commit_ids[h] for h in heads]
650 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
649 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
651
650
652 def create_fork(self):
651 def create_fork(self):
653 repo_to_fork = self.repo_name
652 repo_to_fork = self.repo_name
654 self.repo_name = self._next_repo_name()
653 self.repo_name = self._next_repo_name()
655 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
654 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
656 self._cleanup_repos.append(self.repo_name)
655 self._cleanup_repos.append(self.repo_name)
657 return repo
656 return repo
658
657
659 def new_repo_name(self, suffix=u''):
658 def new_repo_name(self, suffix=u''):
660 self.repo_name = self._next_repo_name() + suffix
659 self.repo_name = self._next_repo_name() + suffix
661 self._cleanup_repos.append(self.repo_name)
660 self._cleanup_repos.append(self.repo_name)
662 return self.repo_name
661 return self.repo_name
663
662
664 def _next_repo_name(self):
663 def _next_repo_name(self):
665 return u"%s_%s" % (
664 return u"%s_%s" % (
666 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
665 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
667
666
668 def ensure_file(self, filename, content='Test content\n'):
667 def ensure_file(self, filename, content='Test content\n'):
669 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
668 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
670 commits = [
669 commits = [
671 {'added': [
670 {'added': [
672 FileNode(filename, content=content),
671 FileNode(filename, content=content),
673 ]},
672 ]},
674 ]
673 ]
675 self._add_commits_to_repo(self.repo.scm_instance(), commits)
674 self._add_commits_to_repo(self.repo.scm_instance(), commits)
676
675
677 def enable_downloads(self):
676 def enable_downloads(self):
678 repo = self.repo
677 repo = self.repo
679 repo.enable_downloads = True
678 repo.enable_downloads = True
680 Session().add(repo)
679 Session().add(repo)
681 Session().commit()
680 Session().commit()
682
681
683 def cleanup(self):
682 def cleanup(self):
684 for repo_name in reversed(self._cleanup_repos):
683 for repo_name in reversed(self._cleanup_repos):
685 self._fixture.destroy_repo(repo_name)
684 self._fixture.destroy_repo(repo_name)
686
685
687 def _add_commits_to_repo(self, repo, commits):
686 def _add_commits_to_repo(self, repo, commits):
688 commit_ids = _add_commits_to_repo(repo, commits)
687 commit_ids = _add_commits_to_repo(repo, commits)
689 if not commit_ids:
688 if not commit_ids:
690 return
689 return
691 self._commit_ids = commit_ids
690 self._commit_ids = commit_ids
692
691
693 # Creating refs for Git to allow fetching them from remote repository
692 # Creating refs for Git to allow fetching them from remote repository
694 if self.alias == 'git':
693 if self.alias == 'git':
695 refs = {}
694 refs = {}
696 for message in self._commit_ids:
695 for message in self._commit_ids:
697 # TODO: mikhail: do more special chars replacements
696 # TODO: mikhail: do more special chars replacements
698 ref_name = 'refs/test-refs/{}'.format(
697 ref_name = 'refs/test-refs/{}'.format(
699 message.replace(' ', ''))
698 message.replace(' ', ''))
700 refs[ref_name] = self._commit_ids[message]
699 refs[ref_name] = self._commit_ids[message]
701 self._create_refs(repo, refs)
700 self._create_refs(repo, refs)
702
701
703 def _create_refs(self, repo, refs):
702 def _create_refs(self, repo, refs):
704 for ref_name in refs:
703 for ref_name in refs:
705 repo.set_refs(ref_name, refs[ref_name])
704 repo.set_refs(ref_name, refs[ref_name])
706
705
707
706
708 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
707 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
709 if backend_alias not in request.config.getoption('--backends'):
708 if backend_alias not in request.config.getoption('--backends'):
710 pytest.skip("Backend %s not selected." % (backend_alias, ))
709 pytest.skip("Backend %s not selected." % (backend_alias, ))
711
710
712 utils.check_xfail_backends(request.node, backend_alias)
711 utils.check_xfail_backends(request.node, backend_alias)
713 utils.check_skip_backends(request.node, backend_alias)
712 utils.check_skip_backends(request.node, backend_alias)
714
713
715 repo_name = 'vcs_test_%s' % (backend_alias, )
714 repo_name = 'vcs_test_%s' % (backend_alias, )
716 repo_path = os.path.join(tests_tmp_path, repo_name)
715 repo_path = os.path.join(tests_tmp_path, repo_name)
717 backend = VcsBackend(
716 backend = VcsBackend(
718 alias=backend_alias,
717 alias=backend_alias,
719 repo_path=repo_path,
718 repo_path=repo_path,
720 test_name=request.node.name,
719 test_name=request.node.name,
721 test_repo_container=test_repo)
720 test_repo_container=test_repo)
722 request.addfinalizer(backend.cleanup)
721 request.addfinalizer(backend.cleanup)
723 return backend
722 return backend
724
723
725
724
726 @pytest.fixture
725 @pytest.fixture
727 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
726 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
728 """
727 """
729 Parametrized fixture which represents a single vcs backend implementation.
728 Parametrized fixture which represents a single vcs backend implementation.
730
729
731 See the fixture `backend` for more details. This one implements the same
730 See the fixture `backend` for more details. This one implements the same
732 concept, but on vcs level. So it does not provide model instances etc.
731 concept, but on vcs level. So it does not provide model instances etc.
733
732
734 Parameters are generated dynamically, see :func:`pytest_generate_tests`
733 Parameters are generated dynamically, see :func:`pytest_generate_tests`
735 for how this works.
734 for how this works.
736 """
735 """
737 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
736 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
738
737
739
738
740 @pytest.fixture
739 @pytest.fixture
741 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
740 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
742 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
741 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
743
742
744
743
745 @pytest.fixture
744 @pytest.fixture
746 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
745 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
747 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
746 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
748
747
749
748
750 @pytest.fixture
749 @pytest.fixture
751 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
750 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
752 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
751 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
753
752
754
753
755 @pytest.fixture
754 @pytest.fixture
756 def vcsbackend_stub(vcsbackend_git):
755 def vcsbackend_stub(vcsbackend_git):
757 """
756 """
758 Use this to express that your test just needs a stub of a vcsbackend.
757 Use this to express that your test just needs a stub of a vcsbackend.
759
758
760 Plan is to eventually implement an in-memory stub to speed tests up.
759 Plan is to eventually implement an in-memory stub to speed tests up.
761 """
760 """
762 return vcsbackend_git
761 return vcsbackend_git
763
762
764
763
765 class VcsBackend(object):
764 class VcsBackend(object):
766 """
765 """
767 Represents the test configuration for one supported vcs backend.
766 Represents the test configuration for one supported vcs backend.
768 """
767 """
769
768
770 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
769 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
771
770
772 def __init__(self, alias, repo_path, test_name, test_repo_container):
771 def __init__(self, alias, repo_path, test_name, test_repo_container):
773 self.alias = alias
772 self.alias = alias
774 self._repo_path = repo_path
773 self._repo_path = repo_path
775 self._cleanup_repos = []
774 self._cleanup_repos = []
776 self._test_name = test_name
775 self._test_name = test_name
777 self._test_repo_container = test_repo_container
776 self._test_repo_container = test_repo_container
778
777
779 def __getitem__(self, key):
778 def __getitem__(self, key):
780 return self._test_repo_container(key, self.alias).scm_instance()
779 return self._test_repo_container(key, self.alias).scm_instance()
781
780
782 @property
781 @property
783 def repo(self):
782 def repo(self):
784 """
783 """
785 Returns the "current" repository. This is the vcs_test repo of the last
784 Returns the "current" repository. This is the vcs_test repo of the last
786 repo which has been created.
785 repo which has been created.
787 """
786 """
788 Repository = get_backend(self.alias)
787 Repository = get_backend(self.alias)
789 return Repository(self._repo_path)
788 return Repository(self._repo_path)
790
789
791 @property
790 @property
792 def backend(self):
791 def backend(self):
793 """
792 """
794 Returns the backend implementation class.
793 Returns the backend implementation class.
795 """
794 """
796 return get_backend(self.alias)
795 return get_backend(self.alias)
797
796
798 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
797 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
799 bare=False):
798 bare=False):
800 repo_name = self._next_repo_name()
799 repo_name = self._next_repo_name()
801 self._repo_path = get_new_dir(repo_name)
800 self._repo_path = get_new_dir(repo_name)
802 repo_class = get_backend(self.alias)
801 repo_class = get_backend(self.alias)
803 src_url = None
802 src_url = None
804 if _clone_repo:
803 if _clone_repo:
805 src_url = _clone_repo.path
804 src_url = _clone_repo.path
806 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
805 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
807 self._cleanup_repos.append(repo)
806 self._cleanup_repos.append(repo)
808
807
809 commits = commits or [
808 commits = commits or [
810 {'message': 'Commit %s of %s' % (x, repo_name)}
809 {'message': 'Commit %s of %s' % (x, repo_name)}
811 for x in xrange(number_of_commits)]
810 for x in xrange(number_of_commits)]
812 _add_commits_to_repo(repo, commits)
811 _add_commits_to_repo(repo, commits)
813 return repo
812 return repo
814
813
815 def clone_repo(self, repo):
814 def clone_repo(self, repo):
816 return self.create_repo(_clone_repo=repo)
815 return self.create_repo(_clone_repo=repo)
817
816
818 def cleanup(self):
817 def cleanup(self):
819 for repo in self._cleanup_repos:
818 for repo in self._cleanup_repos:
820 shutil.rmtree(repo.path)
819 shutil.rmtree(repo.path)
821
820
822 def new_repo_path(self):
821 def new_repo_path(self):
823 repo_name = self._next_repo_name()
822 repo_name = self._next_repo_name()
824 self._repo_path = get_new_dir(repo_name)
823 self._repo_path = get_new_dir(repo_name)
825 return self._repo_path
824 return self._repo_path
826
825
827 def _next_repo_name(self):
826 def _next_repo_name(self):
828 return "%s_%s" % (
827 return "%s_%s" % (
829 self.invalid_repo_name.sub('_', self._test_name),
828 self.invalid_repo_name.sub('_', self._test_name),
830 len(self._cleanup_repos))
829 len(self._cleanup_repos))
831
830
832 def add_file(self, repo, filename, content='Test content\n'):
831 def add_file(self, repo, filename, content='Test content\n'):
833 imc = repo.in_memory_commit
832 imc = repo.in_memory_commit
834 imc.add(FileNode(filename, content=content))
833 imc.add(FileNode(filename, content=content))
835 imc.commit(
834 imc.commit(
836 message=u'Automatic commit from vcsbackend fixture',
835 message=u'Automatic commit from vcsbackend fixture',
837 author=u'Automatic <automatic@rhodecode.com>')
836 author=u'Automatic <automatic@rhodecode.com>')
838
837
839 def ensure_file(self, filename, content='Test content\n'):
838 def ensure_file(self, filename, content='Test content\n'):
840 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
839 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
841 self.add_file(self.repo, filename, content)
840 self.add_file(self.repo, filename, content)
842
841
843
842
844 def _add_commits_to_repo(vcs_repo, commits):
843 def _add_commits_to_repo(vcs_repo, commits):
845 commit_ids = {}
844 commit_ids = {}
846 if not commits:
845 if not commits:
847 return commit_ids
846 return commit_ids
848
847
849 imc = vcs_repo.in_memory_commit
848 imc = vcs_repo.in_memory_commit
850 commit = None
849 commit = None
851
850
852 for idx, commit in enumerate(commits):
851 for idx, commit in enumerate(commits):
853 message = unicode(commit.get('message', 'Commit %s' % idx))
852 message = unicode(commit.get('message', 'Commit %s' % idx))
854
853
855 for node in commit.get('added', []):
854 for node in commit.get('added', []):
856 imc.add(FileNode(node.path, content=node.content))
855 imc.add(FileNode(node.path, content=node.content))
857 for node in commit.get('changed', []):
856 for node in commit.get('changed', []):
858 imc.change(FileNode(node.path, content=node.content))
857 imc.change(FileNode(node.path, content=node.content))
859 for node in commit.get('removed', []):
858 for node in commit.get('removed', []):
860 imc.remove(FileNode(node.path))
859 imc.remove(FileNode(node.path))
861
860
862 parents = [
861 parents = [
863 vcs_repo.get_commit(commit_id=commit_ids[p])
862 vcs_repo.get_commit(commit_id=commit_ids[p])
864 for p in commit.get('parents', [])]
863 for p in commit.get('parents', [])]
865
864
866 operations = ('added', 'changed', 'removed')
865 operations = ('added', 'changed', 'removed')
867 if not any((commit.get(o) for o in operations)):
866 if not any((commit.get(o) for o in operations)):
868 imc.add(FileNode('file_%s' % idx, content=message))
867 imc.add(FileNode('file_%s' % idx, content=message))
869
868
870 commit = imc.commit(
869 commit = imc.commit(
871 message=message,
870 message=message,
872 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
871 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
873 date=commit.get('date'),
872 date=commit.get('date'),
874 branch=commit.get('branch'),
873 branch=commit.get('branch'),
875 parents=parents)
874 parents=parents)
876
875
877 commit_ids[commit.message] = commit.raw_id
876 commit_ids[commit.message] = commit.raw_id
878
877
879 return commit_ids
878 return commit_ids
880
879
881
880
882 @pytest.fixture
881 @pytest.fixture
883 def reposerver(request):
882 def reposerver(request):
884 """
883 """
885 Allows to serve a backend repository
884 Allows to serve a backend repository
886 """
885 """
887
886
888 repo_server = RepoServer()
887 repo_server = RepoServer()
889 request.addfinalizer(repo_server.cleanup)
888 request.addfinalizer(repo_server.cleanup)
890 return repo_server
889 return repo_server
891
890
892
891
893 class RepoServer(object):
892 class RepoServer(object):
894 """
893 """
895 Utility to serve a local repository for the duration of a test case.
894 Utility to serve a local repository for the duration of a test case.
896
895
897 Supports only Subversion so far.
896 Supports only Subversion so far.
898 """
897 """
899
898
900 url = None
899 url = None
901
900
902 def __init__(self):
901 def __init__(self):
903 self._cleanup_servers = []
902 self._cleanup_servers = []
904
903
905 def serve(self, vcsrepo):
904 def serve(self, vcsrepo):
906 if vcsrepo.alias != 'svn':
905 if vcsrepo.alias != 'svn':
907 raise TypeError("Backend %s not supported" % vcsrepo.alias)
906 raise TypeError("Backend %s not supported" % vcsrepo.alias)
908
907
909 proc = subprocess32.Popen(
908 proc = subprocess32.Popen(
910 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
909 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
911 '--root', vcsrepo.path])
910 '--root', vcsrepo.path])
912 self._cleanup_servers.append(proc)
911 self._cleanup_servers.append(proc)
913 self.url = 'svn://localhost'
912 self.url = 'svn://localhost'
914
913
915 def cleanup(self):
914 def cleanup(self):
916 for proc in self._cleanup_servers:
915 for proc in self._cleanup_servers:
917 proc.terminate()
916 proc.terminate()
918
917
919
918
920 @pytest.fixture
919 @pytest.fixture
921 def pr_util(backend, request, config_stub):
920 def pr_util(backend, request, config_stub):
922 """
921 """
923 Utility for tests of models and for functional tests around pull requests.
922 Utility for tests of models and for functional tests around pull requests.
924
923
925 It gives an instance of :class:`PRTestUtility` which provides various
924 It gives an instance of :class:`PRTestUtility` which provides various
926 utility methods around one pull request.
925 utility methods around one pull request.
927
926
928 This fixture uses `backend` and inherits its parameterization.
927 This fixture uses `backend` and inherits its parameterization.
929 """
928 """
930
929
931 util = PRTestUtility(backend)
930 util = PRTestUtility(backend)
932 request.addfinalizer(util.cleanup)
931 request.addfinalizer(util.cleanup)
933
932
934 return util
933 return util
935
934
936
935
937 class PRTestUtility(object):
936 class PRTestUtility(object):
938
937
939 pull_request = None
938 pull_request = None
940 pull_request_id = None
939 pull_request_id = None
941 mergeable_patcher = None
940 mergeable_patcher = None
942 mergeable_mock = None
941 mergeable_mock = None
943 notification_patcher = None
942 notification_patcher = None
944
943
945 def __init__(self, backend):
944 def __init__(self, backend):
946 self.backend = backend
945 self.backend = backend
947
946
948 def create_pull_request(
947 def create_pull_request(
949 self, commits=None, target_head=None, source_head=None,
948 self, commits=None, target_head=None, source_head=None,
950 revisions=None, approved=False, author=None, mergeable=False,
949 revisions=None, approved=False, author=None, mergeable=False,
951 enable_notifications=True, name_suffix=u'', reviewers=None,
950 enable_notifications=True, name_suffix=u'', reviewers=None,
952 title=u"Test", description=u"Description"):
951 title=u"Test", description=u"Description"):
953 self.set_mergeable(mergeable)
952 self.set_mergeable(mergeable)
954 if not enable_notifications:
953 if not enable_notifications:
955 # mock notification side effect
954 # mock notification side effect
956 self.notification_patcher = mock.patch(
955 self.notification_patcher = mock.patch(
957 'rhodecode.model.notification.NotificationModel.create')
956 'rhodecode.model.notification.NotificationModel.create')
958 self.notification_patcher.start()
957 self.notification_patcher.start()
959
958
960 if not self.pull_request:
959 if not self.pull_request:
961 if not commits:
960 if not commits:
962 commits = [
961 commits = [
963 {'message': 'c1'},
962 {'message': 'c1'},
964 {'message': 'c2'},
963 {'message': 'c2'},
965 {'message': 'c3'},
964 {'message': 'c3'},
966 ]
965 ]
967 target_head = 'c1'
966 target_head = 'c1'
968 source_head = 'c2'
967 source_head = 'c2'
969 revisions = ['c2']
968 revisions = ['c2']
970
969
971 self.commit_ids = self.backend.create_master_repo(commits)
970 self.commit_ids = self.backend.create_master_repo(commits)
972 self.target_repository = self.backend.create_repo(
971 self.target_repository = self.backend.create_repo(
973 heads=[target_head], name_suffix=name_suffix)
972 heads=[target_head], name_suffix=name_suffix)
974 self.source_repository = self.backend.create_repo(
973 self.source_repository = self.backend.create_repo(
975 heads=[source_head], name_suffix=name_suffix)
974 heads=[source_head], name_suffix=name_suffix)
976 self.author = author or UserModel().get_by_username(
975 self.author = author or UserModel().get_by_username(
977 TEST_USER_ADMIN_LOGIN)
976 TEST_USER_ADMIN_LOGIN)
978
977
979 model = PullRequestModel()
978 model = PullRequestModel()
980 self.create_parameters = {
979 self.create_parameters = {
981 'created_by': self.author,
980 'created_by': self.author,
982 'source_repo': self.source_repository.repo_name,
981 'source_repo': self.source_repository.repo_name,
983 'source_ref': self._default_branch_reference(source_head),
982 'source_ref': self._default_branch_reference(source_head),
984 'target_repo': self.target_repository.repo_name,
983 'target_repo': self.target_repository.repo_name,
985 'target_ref': self._default_branch_reference(target_head),
984 'target_ref': self._default_branch_reference(target_head),
986 'revisions': [self.commit_ids[r] for r in revisions],
985 'revisions': [self.commit_ids[r] for r in revisions],
987 'reviewers': reviewers or self._get_reviewers(),
986 'reviewers': reviewers or self._get_reviewers(),
988 'title': title,
987 'title': title,
989 'description': description,
988 'description': description,
990 }
989 }
991 self.pull_request = model.create(**self.create_parameters)
990 self.pull_request = model.create(**self.create_parameters)
992 assert model.get_versions(self.pull_request) == []
991 assert model.get_versions(self.pull_request) == []
993
992
994 self.pull_request_id = self.pull_request.pull_request_id
993 self.pull_request_id = self.pull_request.pull_request_id
995
994
996 if approved:
995 if approved:
997 self.approve()
996 self.approve()
998
997
999 Session().add(self.pull_request)
998 Session().add(self.pull_request)
1000 Session().commit()
999 Session().commit()
1001
1000
1002 return self.pull_request
1001 return self.pull_request
1003
1002
1004 def approve(self):
1003 def approve(self):
1005 self.create_status_votes(
1004 self.create_status_votes(
1006 ChangesetStatus.STATUS_APPROVED,
1005 ChangesetStatus.STATUS_APPROVED,
1007 *self.pull_request.reviewers)
1006 *self.pull_request.reviewers)
1008
1007
1009 def close(self):
1008 def close(self):
1010 PullRequestModel().close_pull_request(self.pull_request, self.author)
1009 PullRequestModel().close_pull_request(self.pull_request, self.author)
1011
1010
1012 def _default_branch_reference(self, commit_message):
1011 def _default_branch_reference(self, commit_message):
1013 reference = '%s:%s:%s' % (
1012 reference = '%s:%s:%s' % (
1014 'branch',
1013 'branch',
1015 self.backend.default_branch_name,
1014 self.backend.default_branch_name,
1016 self.commit_ids[commit_message])
1015 self.commit_ids[commit_message])
1017 return reference
1016 return reference
1018
1017
1019 def _get_reviewers(self):
1018 def _get_reviewers(self):
1020 return [
1019 return [
1021 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1020 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1022 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1021 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1023 ]
1022 ]
1024
1023
1025 def update_source_repository(self, head=None):
1024 def update_source_repository(self, head=None):
1026 heads = [head or 'c3']
1025 heads = [head or 'c3']
1027 self.backend.pull_heads(self.source_repository, heads=heads)
1026 self.backend.pull_heads(self.source_repository, heads=heads)
1028
1027
1029 def add_one_commit(self, head=None):
1028 def add_one_commit(self, head=None):
1030 self.update_source_repository(head=head)
1029 self.update_source_repository(head=head)
1031 old_commit_ids = set(self.pull_request.revisions)
1030 old_commit_ids = set(self.pull_request.revisions)
1032 PullRequestModel().update_commits(self.pull_request)
1031 PullRequestModel().update_commits(self.pull_request)
1033 commit_ids = set(self.pull_request.revisions)
1032 commit_ids = set(self.pull_request.revisions)
1034 new_commit_ids = commit_ids - old_commit_ids
1033 new_commit_ids = commit_ids - old_commit_ids
1035 assert len(new_commit_ids) == 1
1034 assert len(new_commit_ids) == 1
1036 return new_commit_ids.pop()
1035 return new_commit_ids.pop()
1037
1036
1038 def remove_one_commit(self):
1037 def remove_one_commit(self):
1039 assert len(self.pull_request.revisions) == 2
1038 assert len(self.pull_request.revisions) == 2
1040 source_vcs = self.source_repository.scm_instance()
1039 source_vcs = self.source_repository.scm_instance()
1041 removed_commit_id = source_vcs.commit_ids[-1]
1040 removed_commit_id = source_vcs.commit_ids[-1]
1042
1041
1043 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1042 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1044 # remove the if once that's sorted out.
1043 # remove the if once that's sorted out.
1045 if self.backend.alias == "git":
1044 if self.backend.alias == "git":
1046 kwargs = {'branch_name': self.backend.default_branch_name}
1045 kwargs = {'branch_name': self.backend.default_branch_name}
1047 else:
1046 else:
1048 kwargs = {}
1047 kwargs = {}
1049 source_vcs.strip(removed_commit_id, **kwargs)
1048 source_vcs.strip(removed_commit_id, **kwargs)
1050
1049
1051 PullRequestModel().update_commits(self.pull_request)
1050 PullRequestModel().update_commits(self.pull_request)
1052 assert len(self.pull_request.revisions) == 1
1051 assert len(self.pull_request.revisions) == 1
1053 return removed_commit_id
1052 return removed_commit_id
1054
1053
1055 def create_comment(self, linked_to=None):
1054 def create_comment(self, linked_to=None):
1056 comment = CommentsModel().create(
1055 comment = CommentsModel().create(
1057 text=u"Test comment",
1056 text=u"Test comment",
1058 repo=self.target_repository.repo_name,
1057 repo=self.target_repository.repo_name,
1059 user=self.author,
1058 user=self.author,
1060 pull_request=self.pull_request)
1059 pull_request=self.pull_request)
1061 assert comment.pull_request_version_id is None
1060 assert comment.pull_request_version_id is None
1062
1061
1063 if linked_to:
1062 if linked_to:
1064 PullRequestModel()._link_comments_to_version(linked_to)
1063 PullRequestModel()._link_comments_to_version(linked_to)
1065
1064
1066 return comment
1065 return comment
1067
1066
1068 def create_inline_comment(
1067 def create_inline_comment(
1069 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1068 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1070 comment = CommentsModel().create(
1069 comment = CommentsModel().create(
1071 text=u"Test comment",
1070 text=u"Test comment",
1072 repo=self.target_repository.repo_name,
1071 repo=self.target_repository.repo_name,
1073 user=self.author,
1072 user=self.author,
1074 line_no=line_no,
1073 line_no=line_no,
1075 f_path=file_path,
1074 f_path=file_path,
1076 pull_request=self.pull_request)
1075 pull_request=self.pull_request)
1077 assert comment.pull_request_version_id is None
1076 assert comment.pull_request_version_id is None
1078
1077
1079 if linked_to:
1078 if linked_to:
1080 PullRequestModel()._link_comments_to_version(linked_to)
1079 PullRequestModel()._link_comments_to_version(linked_to)
1081
1080
1082 return comment
1081 return comment
1083
1082
1084 def create_version_of_pull_request(self):
1083 def create_version_of_pull_request(self):
1085 pull_request = self.create_pull_request()
1084 pull_request = self.create_pull_request()
1086 version = PullRequestModel()._create_version_from_snapshot(
1085 version = PullRequestModel()._create_version_from_snapshot(
1087 pull_request)
1086 pull_request)
1088 return version
1087 return version
1089
1088
1090 def create_status_votes(self, status, *reviewers):
1089 def create_status_votes(self, status, *reviewers):
1091 for reviewer in reviewers:
1090 for reviewer in reviewers:
1092 ChangesetStatusModel().set_status(
1091 ChangesetStatusModel().set_status(
1093 repo=self.pull_request.target_repo,
1092 repo=self.pull_request.target_repo,
1094 status=status,
1093 status=status,
1095 user=reviewer.user_id,
1094 user=reviewer.user_id,
1096 pull_request=self.pull_request)
1095 pull_request=self.pull_request)
1097
1096
1098 def set_mergeable(self, value):
1097 def set_mergeable(self, value):
1099 if not self.mergeable_patcher:
1098 if not self.mergeable_patcher:
1100 self.mergeable_patcher = mock.patch.object(
1099 self.mergeable_patcher = mock.patch.object(
1101 VcsSettingsModel, 'get_general_settings')
1100 VcsSettingsModel, 'get_general_settings')
1102 self.mergeable_mock = self.mergeable_patcher.start()
1101 self.mergeable_mock = self.mergeable_patcher.start()
1103 self.mergeable_mock.return_value = {
1102 self.mergeable_mock.return_value = {
1104 'rhodecode_pr_merge_enabled': value}
1103 'rhodecode_pr_merge_enabled': value}
1105
1104
1106 def cleanup(self):
1105 def cleanup(self):
1107 # In case the source repository is already cleaned up, the pull
1106 # In case the source repository is already cleaned up, the pull
1108 # request will already be deleted.
1107 # request will already be deleted.
1109 pull_request = PullRequest().get(self.pull_request_id)
1108 pull_request = PullRequest().get(self.pull_request_id)
1110 if pull_request:
1109 if pull_request:
1111 PullRequestModel().delete(pull_request, pull_request.author)
1110 PullRequestModel().delete(pull_request, pull_request.author)
1112 Session().commit()
1111 Session().commit()
1113
1112
1114 if self.notification_patcher:
1113 if self.notification_patcher:
1115 self.notification_patcher.stop()
1114 self.notification_patcher.stop()
1116
1115
1117 if self.mergeable_patcher:
1116 if self.mergeable_patcher:
1118 self.mergeable_patcher.stop()
1117 self.mergeable_patcher.stop()
1119
1118
1120
1119
1121 @pytest.fixture
1120 @pytest.fixture
1122 def user_admin(baseapp):
1121 def user_admin(baseapp):
1123 """
1122 """
1124 Provides the default admin test user as an instance of `db.User`.
1123 Provides the default admin test user as an instance of `db.User`.
1125 """
1124 """
1126 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1125 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1127 return user
1126 return user
1128
1127
1129
1128
1130 @pytest.fixture
1129 @pytest.fixture
1131 def user_regular(baseapp):
1130 def user_regular(baseapp):
1132 """
1131 """
1133 Provides the default regular test user as an instance of `db.User`.
1132 Provides the default regular test user as an instance of `db.User`.
1134 """
1133 """
1135 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1134 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1136 return user
1135 return user
1137
1136
1138
1137
1139 @pytest.fixture
1138 @pytest.fixture
1140 def user_util(request, db_connection):
1139 def user_util(request, db_connection):
1141 """
1140 """
1142 Provides a wired instance of `UserUtility` with integrated cleanup.
1141 Provides a wired instance of `UserUtility` with integrated cleanup.
1143 """
1142 """
1144 utility = UserUtility(test_name=request.node.name)
1143 utility = UserUtility(test_name=request.node.name)
1145 request.addfinalizer(utility.cleanup)
1144 request.addfinalizer(utility.cleanup)
1146 return utility
1145 return utility
1147
1146
1148
1147
1149 # TODO: johbo: Split this up into utilities per domain or something similar
1148 # TODO: johbo: Split this up into utilities per domain or something similar
1150 class UserUtility(object):
1149 class UserUtility(object):
1151
1150
1152 def __init__(self, test_name="test"):
1151 def __init__(self, test_name="test"):
1153 self._test_name = self._sanitize_name(test_name)
1152 self._test_name = self._sanitize_name(test_name)
1154 self.fixture = Fixture()
1153 self.fixture = Fixture()
1155 self.repo_group_ids = []
1154 self.repo_group_ids = []
1156 self.repos_ids = []
1155 self.repos_ids = []
1157 self.user_ids = []
1156 self.user_ids = []
1158 self.user_group_ids = []
1157 self.user_group_ids = []
1159 self.user_repo_permission_ids = []
1158 self.user_repo_permission_ids = []
1160 self.user_group_repo_permission_ids = []
1159 self.user_group_repo_permission_ids = []
1161 self.user_repo_group_permission_ids = []
1160 self.user_repo_group_permission_ids = []
1162 self.user_group_repo_group_permission_ids = []
1161 self.user_group_repo_group_permission_ids = []
1163 self.user_user_group_permission_ids = []
1162 self.user_user_group_permission_ids = []
1164 self.user_group_user_group_permission_ids = []
1163 self.user_group_user_group_permission_ids = []
1165 self.user_permissions = []
1164 self.user_permissions = []
1166
1165
1167 def _sanitize_name(self, name):
1166 def _sanitize_name(self, name):
1168 for char in ['[', ']']:
1167 for char in ['[', ']']:
1169 name = name.replace(char, '_')
1168 name = name.replace(char, '_')
1170 return name
1169 return name
1171
1170
1172 def create_repo_group(
1171 def create_repo_group(
1173 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1172 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1174 group_name = "{prefix}_repogroup_{count}".format(
1173 group_name = "{prefix}_repogroup_{count}".format(
1175 prefix=self._test_name,
1174 prefix=self._test_name,
1176 count=len(self.repo_group_ids))
1175 count=len(self.repo_group_ids))
1177 repo_group = self.fixture.create_repo_group(
1176 repo_group = self.fixture.create_repo_group(
1178 group_name, cur_user=owner)
1177 group_name, cur_user=owner)
1179 if auto_cleanup:
1178 if auto_cleanup:
1180 self.repo_group_ids.append(repo_group.group_id)
1179 self.repo_group_ids.append(repo_group.group_id)
1181 return repo_group
1180 return repo_group
1182
1181
1183 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1182 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1184 auto_cleanup=True, repo_type='hg', bare=False):
1183 auto_cleanup=True, repo_type='hg', bare=False):
1185 repo_name = "{prefix}_repository_{count}".format(
1184 repo_name = "{prefix}_repository_{count}".format(
1186 prefix=self._test_name,
1185 prefix=self._test_name,
1187 count=len(self.repos_ids))
1186 count=len(self.repos_ids))
1188
1187
1189 repository = self.fixture.create_repo(
1188 repository = self.fixture.create_repo(
1190 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1189 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1191 if auto_cleanup:
1190 if auto_cleanup:
1192 self.repos_ids.append(repository.repo_id)
1191 self.repos_ids.append(repository.repo_id)
1193 return repository
1192 return repository
1194
1193
1195 def create_user(self, auto_cleanup=True, **kwargs):
1194 def create_user(self, auto_cleanup=True, **kwargs):
1196 user_name = "{prefix}_user_{count}".format(
1195 user_name = "{prefix}_user_{count}".format(
1197 prefix=self._test_name,
1196 prefix=self._test_name,
1198 count=len(self.user_ids))
1197 count=len(self.user_ids))
1199 user = self.fixture.create_user(user_name, **kwargs)
1198 user = self.fixture.create_user(user_name, **kwargs)
1200 if auto_cleanup:
1199 if auto_cleanup:
1201 self.user_ids.append(user.user_id)
1200 self.user_ids.append(user.user_id)
1202 return user
1201 return user
1203
1202
1204 def create_additional_user_email(self, user, email):
1203 def create_additional_user_email(self, user, email):
1205 uem = self.fixture.create_additional_user_email(user=user, email=email)
1204 uem = self.fixture.create_additional_user_email(user=user, email=email)
1206 return uem
1205 return uem
1207
1206
1208 def create_user_with_group(self):
1207 def create_user_with_group(self):
1209 user = self.create_user()
1208 user = self.create_user()
1210 user_group = self.create_user_group(members=[user])
1209 user_group = self.create_user_group(members=[user])
1211 return user, user_group
1210 return user, user_group
1212
1211
1213 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1212 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1214 auto_cleanup=True, **kwargs):
1213 auto_cleanup=True, **kwargs):
1215 group_name = "{prefix}_usergroup_{count}".format(
1214 group_name = "{prefix}_usergroup_{count}".format(
1216 prefix=self._test_name,
1215 prefix=self._test_name,
1217 count=len(self.user_group_ids))
1216 count=len(self.user_group_ids))
1218 user_group = self.fixture.create_user_group(
1217 user_group = self.fixture.create_user_group(
1219 group_name, cur_user=owner, **kwargs)
1218 group_name, cur_user=owner, **kwargs)
1220
1219
1221 if auto_cleanup:
1220 if auto_cleanup:
1222 self.user_group_ids.append(user_group.users_group_id)
1221 self.user_group_ids.append(user_group.users_group_id)
1223 if members:
1222 if members:
1224 for user in members:
1223 for user in members:
1225 UserGroupModel().add_user_to_group(user_group, user)
1224 UserGroupModel().add_user_to_group(user_group, user)
1226 return user_group
1225 return user_group
1227
1226
1228 def grant_user_permission(self, user_name, permission_name):
1227 def grant_user_permission(self, user_name, permission_name):
1229 self.inherit_default_user_permissions(user_name, False)
1228 self.inherit_default_user_permissions(user_name, False)
1230 self.user_permissions.append((user_name, permission_name))
1229 self.user_permissions.append((user_name, permission_name))
1231
1230
1232 def grant_user_permission_to_repo_group(
1231 def grant_user_permission_to_repo_group(
1233 self, repo_group, user, permission_name):
1232 self, repo_group, user, permission_name):
1234 permission = RepoGroupModel().grant_user_permission(
1233 permission = RepoGroupModel().grant_user_permission(
1235 repo_group, user, permission_name)
1234 repo_group, user, permission_name)
1236 self.user_repo_group_permission_ids.append(
1235 self.user_repo_group_permission_ids.append(
1237 (repo_group.group_id, user.user_id))
1236 (repo_group.group_id, user.user_id))
1238 return permission
1237 return permission
1239
1238
1240 def grant_user_group_permission_to_repo_group(
1239 def grant_user_group_permission_to_repo_group(
1241 self, repo_group, user_group, permission_name):
1240 self, repo_group, user_group, permission_name):
1242 permission = RepoGroupModel().grant_user_group_permission(
1241 permission = RepoGroupModel().grant_user_group_permission(
1243 repo_group, user_group, permission_name)
1242 repo_group, user_group, permission_name)
1244 self.user_group_repo_group_permission_ids.append(
1243 self.user_group_repo_group_permission_ids.append(
1245 (repo_group.group_id, user_group.users_group_id))
1244 (repo_group.group_id, user_group.users_group_id))
1246 return permission
1245 return permission
1247
1246
1248 def grant_user_permission_to_repo(
1247 def grant_user_permission_to_repo(
1249 self, repo, user, permission_name):
1248 self, repo, user, permission_name):
1250 permission = RepoModel().grant_user_permission(
1249 permission = RepoModel().grant_user_permission(
1251 repo, user, permission_name)
1250 repo, user, permission_name)
1252 self.user_repo_permission_ids.append(
1251 self.user_repo_permission_ids.append(
1253 (repo.repo_id, user.user_id))
1252 (repo.repo_id, user.user_id))
1254 return permission
1253 return permission
1255
1254
1256 def grant_user_group_permission_to_repo(
1255 def grant_user_group_permission_to_repo(
1257 self, repo, user_group, permission_name):
1256 self, repo, user_group, permission_name):
1258 permission = RepoModel().grant_user_group_permission(
1257 permission = RepoModel().grant_user_group_permission(
1259 repo, user_group, permission_name)
1258 repo, user_group, permission_name)
1260 self.user_group_repo_permission_ids.append(
1259 self.user_group_repo_permission_ids.append(
1261 (repo.repo_id, user_group.users_group_id))
1260 (repo.repo_id, user_group.users_group_id))
1262 return permission
1261 return permission
1263
1262
1264 def grant_user_permission_to_user_group(
1263 def grant_user_permission_to_user_group(
1265 self, target_user_group, user, permission_name):
1264 self, target_user_group, user, permission_name):
1266 permission = UserGroupModel().grant_user_permission(
1265 permission = UserGroupModel().grant_user_permission(
1267 target_user_group, user, permission_name)
1266 target_user_group, user, permission_name)
1268 self.user_user_group_permission_ids.append(
1267 self.user_user_group_permission_ids.append(
1269 (target_user_group.users_group_id, user.user_id))
1268 (target_user_group.users_group_id, user.user_id))
1270 return permission
1269 return permission
1271
1270
1272 def grant_user_group_permission_to_user_group(
1271 def grant_user_group_permission_to_user_group(
1273 self, target_user_group, user_group, permission_name):
1272 self, target_user_group, user_group, permission_name):
1274 permission = UserGroupModel().grant_user_group_permission(
1273 permission = UserGroupModel().grant_user_group_permission(
1275 target_user_group, user_group, permission_name)
1274 target_user_group, user_group, permission_name)
1276 self.user_group_user_group_permission_ids.append(
1275 self.user_group_user_group_permission_ids.append(
1277 (target_user_group.users_group_id, user_group.users_group_id))
1276 (target_user_group.users_group_id, user_group.users_group_id))
1278 return permission
1277 return permission
1279
1278
1280 def revoke_user_permission(self, user_name, permission_name):
1279 def revoke_user_permission(self, user_name, permission_name):
1281 self.inherit_default_user_permissions(user_name, True)
1280 self.inherit_default_user_permissions(user_name, True)
1282 UserModel().revoke_perm(user_name, permission_name)
1281 UserModel().revoke_perm(user_name, permission_name)
1283
1282
1284 def inherit_default_user_permissions(self, user_name, value):
1283 def inherit_default_user_permissions(self, user_name, value):
1285 user = UserModel().get_by_username(user_name)
1284 user = UserModel().get_by_username(user_name)
1286 user.inherit_default_permissions = value
1285 user.inherit_default_permissions = value
1287 Session().add(user)
1286 Session().add(user)
1288 Session().commit()
1287 Session().commit()
1289
1288
1290 def cleanup(self):
1289 def cleanup(self):
1291 self._cleanup_permissions()
1290 self._cleanup_permissions()
1292 self._cleanup_repos()
1291 self._cleanup_repos()
1293 self._cleanup_repo_groups()
1292 self._cleanup_repo_groups()
1294 self._cleanup_user_groups()
1293 self._cleanup_user_groups()
1295 self._cleanup_users()
1294 self._cleanup_users()
1296
1295
1297 def _cleanup_permissions(self):
1296 def _cleanup_permissions(self):
1298 if self.user_permissions:
1297 if self.user_permissions:
1299 for user_name, permission_name in self.user_permissions:
1298 for user_name, permission_name in self.user_permissions:
1300 self.revoke_user_permission(user_name, permission_name)
1299 self.revoke_user_permission(user_name, permission_name)
1301
1300
1302 for permission in self.user_repo_permission_ids:
1301 for permission in self.user_repo_permission_ids:
1303 RepoModel().revoke_user_permission(*permission)
1302 RepoModel().revoke_user_permission(*permission)
1304
1303
1305 for permission in self.user_group_repo_permission_ids:
1304 for permission in self.user_group_repo_permission_ids:
1306 RepoModel().revoke_user_group_permission(*permission)
1305 RepoModel().revoke_user_group_permission(*permission)
1307
1306
1308 for permission in self.user_repo_group_permission_ids:
1307 for permission in self.user_repo_group_permission_ids:
1309 RepoGroupModel().revoke_user_permission(*permission)
1308 RepoGroupModel().revoke_user_permission(*permission)
1310
1309
1311 for permission in self.user_group_repo_group_permission_ids:
1310 for permission in self.user_group_repo_group_permission_ids:
1312 RepoGroupModel().revoke_user_group_permission(*permission)
1311 RepoGroupModel().revoke_user_group_permission(*permission)
1313
1312
1314 for permission in self.user_user_group_permission_ids:
1313 for permission in self.user_user_group_permission_ids:
1315 UserGroupModel().revoke_user_permission(*permission)
1314 UserGroupModel().revoke_user_permission(*permission)
1316
1315
1317 for permission in self.user_group_user_group_permission_ids:
1316 for permission in self.user_group_user_group_permission_ids:
1318 UserGroupModel().revoke_user_group_permission(*permission)
1317 UserGroupModel().revoke_user_group_permission(*permission)
1319
1318
1320 def _cleanup_repo_groups(self):
1319 def _cleanup_repo_groups(self):
1321 def _repo_group_compare(first_group_id, second_group_id):
1320 def _repo_group_compare(first_group_id, second_group_id):
1322 """
1321 """
1323 Gives higher priority to the groups with the most complex paths
1322 Gives higher priority to the groups with the most complex paths
1324 """
1323 """
1325 first_group = RepoGroup.get(first_group_id)
1324 first_group = RepoGroup.get(first_group_id)
1326 second_group = RepoGroup.get(second_group_id)
1325 second_group = RepoGroup.get(second_group_id)
1327 first_group_parts = (
1326 first_group_parts = (
1328 len(first_group.group_name.split('/')) if first_group else 0)
1327 len(first_group.group_name.split('/')) if first_group else 0)
1329 second_group_parts = (
1328 second_group_parts = (
1330 len(second_group.group_name.split('/')) if second_group else 0)
1329 len(second_group.group_name.split('/')) if second_group else 0)
1331 return cmp(second_group_parts, first_group_parts)
1330 return cmp(second_group_parts, first_group_parts)
1332
1331
1333 sorted_repo_group_ids = sorted(
1332 sorted_repo_group_ids = sorted(
1334 self.repo_group_ids, cmp=_repo_group_compare)
1333 self.repo_group_ids, cmp=_repo_group_compare)
1335 for repo_group_id in sorted_repo_group_ids:
1334 for repo_group_id in sorted_repo_group_ids:
1336 self.fixture.destroy_repo_group(repo_group_id)
1335 self.fixture.destroy_repo_group(repo_group_id)
1337
1336
1338 def _cleanup_repos(self):
1337 def _cleanup_repos(self):
1339 sorted_repos_ids = sorted(self.repos_ids)
1338 sorted_repos_ids = sorted(self.repos_ids)
1340 for repo_id in sorted_repos_ids:
1339 for repo_id in sorted_repos_ids:
1341 self.fixture.destroy_repo(repo_id)
1340 self.fixture.destroy_repo(repo_id)
1342
1341
1343 def _cleanup_user_groups(self):
1342 def _cleanup_user_groups(self):
1344 def _user_group_compare(first_group_id, second_group_id):
1343 def _user_group_compare(first_group_id, second_group_id):
1345 """
1344 """
1346 Gives higher priority to the groups with the most complex paths
1345 Gives higher priority to the groups with the most complex paths
1347 """
1346 """
1348 first_group = UserGroup.get(first_group_id)
1347 first_group = UserGroup.get(first_group_id)
1349 second_group = UserGroup.get(second_group_id)
1348 second_group = UserGroup.get(second_group_id)
1350 first_group_parts = (
1349 first_group_parts = (
1351 len(first_group.users_group_name.split('/'))
1350 len(first_group.users_group_name.split('/'))
1352 if first_group else 0)
1351 if first_group else 0)
1353 second_group_parts = (
1352 second_group_parts = (
1354 len(second_group.users_group_name.split('/'))
1353 len(second_group.users_group_name.split('/'))
1355 if second_group else 0)
1354 if second_group else 0)
1356 return cmp(second_group_parts, first_group_parts)
1355 return cmp(second_group_parts, first_group_parts)
1357
1356
1358 sorted_user_group_ids = sorted(
1357 sorted_user_group_ids = sorted(
1359 self.user_group_ids, cmp=_user_group_compare)
1358 self.user_group_ids, cmp=_user_group_compare)
1360 for user_group_id in sorted_user_group_ids:
1359 for user_group_id in sorted_user_group_ids:
1361 self.fixture.destroy_user_group(user_group_id)
1360 self.fixture.destroy_user_group(user_group_id)
1362
1361
1363 def _cleanup_users(self):
1362 def _cleanup_users(self):
1364 for user_id in self.user_ids:
1363 for user_id in self.user_ids:
1365 self.fixture.destroy_user(user_id)
1364 self.fixture.destroy_user(user_id)
1366
1365
1367
1366
1368 # TODO: Think about moving this into a pytest-pyro package and make it a
1367 # TODO: Think about moving this into a pytest-pyro package and make it a
1369 # pytest plugin
1368 # pytest plugin
1370 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1369 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1371 def pytest_runtest_makereport(item, call):
1370 def pytest_runtest_makereport(item, call):
1372 """
1371 """
1373 Adding the remote traceback if the exception has this information.
1372 Adding the remote traceback if the exception has this information.
1374
1373
1375 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1374 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1376 to the exception instance.
1375 to the exception instance.
1377 """
1376 """
1378 outcome = yield
1377 outcome = yield
1379 report = outcome.get_result()
1378 report = outcome.get_result()
1380 if call.excinfo:
1379 if call.excinfo:
1381 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1380 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1382
1381
1383
1382
1384 def _add_vcsserver_remote_traceback(report, exc):
1383 def _add_vcsserver_remote_traceback(report, exc):
1385 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1384 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1386
1385
1387 if vcsserver_traceback:
1386 if vcsserver_traceback:
1388 section = 'VCSServer remote traceback ' + report.when
1387 section = 'VCSServer remote traceback ' + report.when
1389 report.sections.append((section, vcsserver_traceback))
1388 report.sections.append((section, vcsserver_traceback))
1390
1389
1391
1390
1392 @pytest.fixture(scope='session')
1391 @pytest.fixture(scope='session')
1393 def testrun():
1392 def testrun():
1394 return {
1393 return {
1395 'uuid': uuid.uuid4(),
1394 'uuid': uuid.uuid4(),
1396 'start': datetime.datetime.utcnow().isoformat(),
1395 'start': datetime.datetime.utcnow().isoformat(),
1397 'timestamp': int(time.time()),
1396 'timestamp': int(time.time()),
1398 }
1397 }
1399
1398
1400
1399
1401 @pytest.fixture(autouse=True)
1400 class AppenlightClient(object):
1402 def collect_appenlight_stats(request, testrun):
1403 """
1404 This fixture reports memory consumtion of single tests.
1405
1406 It gathers data based on `psutil` and sends them to Appenlight. The option
1407 ``--ae`` has te be used to enable this fixture and the API key for your
1408 application has to be provided in ``--ae-key``.
1409 """
1410 try:
1411 # cygwin cannot have yet psutil support.
1412 import psutil
1413 except ImportError:
1414 return
1415
1416 if not request.config.getoption('--appenlight'):
1417 return
1418 else:
1419 # Only request the baseapp fixture if appenlight tracking is
1420 # enabled. This will speed up a test run of unit tests by 2 to 3
1421 # seconds if appenlight is not enabled.
1422 baseapp = request.getfuncargvalue("baseapp")
1423 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1424 client = AppenlightClient(
1425 url=url,
1426 api_key=request.config.getoption('--appenlight-api-key'),
1427 namespace=request.node.nodeid,
1428 request=str(testrun['uuid']),
1429 testrun=testrun)
1430
1431 client.collect({
1432 'message': "Starting",
1433 })
1434
1435 server_and_port = baseapp.config.get_settings()['vcs.server']
1436 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1437 server = create_vcsserver_proxy(server_and_port, protocol)
1438 with server:
1439 vcs_pid = server.get_pid()
1440 server.run_gc()
1441 vcs_process = psutil.Process(vcs_pid)
1442 mem = vcs_process.memory_info()
1443 client.tag_before('vcsserver.rss', mem.rss)
1444 client.tag_before('vcsserver.vms', mem.vms)
1445
1446 test_process = psutil.Process()
1447 mem = test_process.memory_info()
1448 client.tag_before('test.rss', mem.rss)
1449 client.tag_before('test.vms', mem.vms)
1450
1451 client.tag_before('time', time.time())
1452
1453 @request.addfinalizer
1454 def send_stats():
1455 client.tag_after('time', time.time())
1456 with server:
1457 gc_stats = server.run_gc()
1458 for tag, value in gc_stats.items():
1459 client.tag_after(tag, value)
1460 mem = vcs_process.memory_info()
1461 client.tag_after('vcsserver.rss', mem.rss)
1462 client.tag_after('vcsserver.vms', mem.vms)
1463
1464 mem = test_process.memory_info()
1465 client.tag_after('test.rss', mem.rss)
1466 client.tag_after('test.vms', mem.vms)
1467
1468 client.collect({
1469 'message': "Finished",
1470 })
1471 client.send_stats()
1472
1473 return client
1474
1475
1476 class AppenlightClient():
1477
1401
1478 url_template = '{url}?protocol_version=0.5'
1402 url_template = '{url}?protocol_version=0.5'
1479
1403
1480 def __init__(
1404 def __init__(
1481 self, url, api_key, add_server=True, add_timestamp=True,
1405 self, url, api_key, add_server=True, add_timestamp=True,
1482 namespace=None, request=None, testrun=None):
1406 namespace=None, request=None, testrun=None):
1483 self.url = self.url_template.format(url=url)
1407 self.url = self.url_template.format(url=url)
1484 self.api_key = api_key
1408 self.api_key = api_key
1485 self.add_server = add_server
1409 self.add_server = add_server
1486 self.add_timestamp = add_timestamp
1410 self.add_timestamp = add_timestamp
1487 self.namespace = namespace
1411 self.namespace = namespace
1488 self.request = request
1412 self.request = request
1489 self.server = socket.getfqdn(socket.gethostname())
1413 self.server = socket.getfqdn(socket.gethostname())
1490 self.tags_before = {}
1414 self.tags_before = {}
1491 self.tags_after = {}
1415 self.tags_after = {}
1492 self.stats = []
1416 self.stats = []
1493 self.testrun = testrun or {}
1417 self.testrun = testrun or {}
1494
1418
1495 def tag_before(self, tag, value):
1419 def tag_before(self, tag, value):
1496 self.tags_before[tag] = value
1420 self.tags_before[tag] = value
1497
1421
1498 def tag_after(self, tag, value):
1422 def tag_after(self, tag, value):
1499 self.tags_after[tag] = value
1423 self.tags_after[tag] = value
1500
1424
1501 def collect(self, data):
1425 def collect(self, data):
1502 if self.add_server:
1426 if self.add_server:
1503 data.setdefault('server', self.server)
1427 data.setdefault('server', self.server)
1504 if self.add_timestamp:
1428 if self.add_timestamp:
1505 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1429 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1506 if self.namespace:
1430 if self.namespace:
1507 data.setdefault('namespace', self.namespace)
1431 data.setdefault('namespace', self.namespace)
1508 if self.request:
1432 if self.request:
1509 data.setdefault('request', self.request)
1433 data.setdefault('request', self.request)
1510 self.stats.append(data)
1434 self.stats.append(data)
1511
1435
1512 def send_stats(self):
1436 def send_stats(self):
1513 tags = [
1437 tags = [
1514 ('testrun', self.request),
1438 ('testrun', self.request),
1515 ('testrun.start', self.testrun['start']),
1439 ('testrun.start', self.testrun['start']),
1516 ('testrun.timestamp', self.testrun['timestamp']),
1440 ('testrun.timestamp', self.testrun['timestamp']),
1517 ('test', self.namespace),
1441 ('test', self.namespace),
1518 ]
1442 ]
1519 for key, value in self.tags_before.items():
1443 for key, value in self.tags_before.items():
1520 tags.append((key + '.before', value))
1444 tags.append((key + '.before', value))
1521 try:
1445 try:
1522 delta = self.tags_after[key] - value
1446 delta = self.tags_after[key] - value
1523 tags.append((key + '.delta', delta))
1447 tags.append((key + '.delta', delta))
1524 except Exception:
1448 except Exception:
1525 pass
1449 pass
1526 for key, value in self.tags_after.items():
1450 for key, value in self.tags_after.items():
1527 tags.append((key + '.after', value))
1451 tags.append((key + '.after', value))
1528 self.collect({
1452 self.collect({
1529 'message': "Collected tags",
1453 'message': "Collected tags",
1530 'tags': tags,
1454 'tags': tags,
1531 })
1455 })
1532
1456
1533 response = requests.post(
1457 response = requests.post(
1534 self.url,
1458 self.url,
1535 headers={
1459 headers={
1536 'X-appenlight-api-key': self.api_key},
1460 'X-appenlight-api-key': self.api_key},
1537 json=self.stats,
1461 json=self.stats,
1538 )
1462 )
1539
1463
1540 if not response.status_code == 200:
1464 if not response.status_code == 200:
1541 pprint.pprint(self.stats)
1465 pprint.pprint(self.stats)
1542 print(response.headers)
1466 print(response.headers)
1543 print(response.text)
1467 print(response.text)
1544 raise Exception('Sending to appenlight failed')
1468 raise Exception('Sending to appenlight failed')
1545
1469
1546
1470
1547 @pytest.fixture
1471 @pytest.fixture
1548 def gist_util(request, db_connection):
1472 def gist_util(request, db_connection):
1549 """
1473 """
1550 Provides a wired instance of `GistUtility` with integrated cleanup.
1474 Provides a wired instance of `GistUtility` with integrated cleanup.
1551 """
1475 """
1552 utility = GistUtility()
1476 utility = GistUtility()
1553 request.addfinalizer(utility.cleanup)
1477 request.addfinalizer(utility.cleanup)
1554 return utility
1478 return utility
1555
1479
1556
1480
1557 class GistUtility(object):
1481 class GistUtility(object):
1558 def __init__(self):
1482 def __init__(self):
1559 self.fixture = Fixture()
1483 self.fixture = Fixture()
1560 self.gist_ids = []
1484 self.gist_ids = []
1561
1485
1562 def create_gist(self, **kwargs):
1486 def create_gist(self, **kwargs):
1563 gist = self.fixture.create_gist(**kwargs)
1487 gist = self.fixture.create_gist(**kwargs)
1564 self.gist_ids.append(gist.gist_id)
1488 self.gist_ids.append(gist.gist_id)
1565 return gist
1489 return gist
1566
1490
1567 def cleanup(self):
1491 def cleanup(self):
1568 for id_ in self.gist_ids:
1492 for id_ in self.gist_ids:
1569 self.fixture.destroy_gists(str(id_))
1493 self.fixture.destroy_gists(str(id_))
1570
1494
1571
1495
1572 @pytest.fixture
1496 @pytest.fixture
1573 def enabled_backends(request):
1497 def enabled_backends(request):
1574 backends = request.config.option.backends
1498 backends = request.config.option.backends
1575 return backends[:]
1499 return backends[:]
1576
1500
1577
1501
1578 @pytest.fixture
1502 @pytest.fixture
1579 def settings_util(request, db_connection):
1503 def settings_util(request, db_connection):
1580 """
1504 """
1581 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1505 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1582 """
1506 """
1583 utility = SettingsUtility()
1507 utility = SettingsUtility()
1584 request.addfinalizer(utility.cleanup)
1508 request.addfinalizer(utility.cleanup)
1585 return utility
1509 return utility
1586
1510
1587
1511
1588 class SettingsUtility(object):
1512 class SettingsUtility(object):
1589 def __init__(self):
1513 def __init__(self):
1590 self.rhodecode_ui_ids = []
1514 self.rhodecode_ui_ids = []
1591 self.rhodecode_setting_ids = []
1515 self.rhodecode_setting_ids = []
1592 self.repo_rhodecode_ui_ids = []
1516 self.repo_rhodecode_ui_ids = []
1593 self.repo_rhodecode_setting_ids = []
1517 self.repo_rhodecode_setting_ids = []
1594
1518
1595 def create_repo_rhodecode_ui(
1519 def create_repo_rhodecode_ui(
1596 self, repo, section, value, key=None, active=True, cleanup=True):
1520 self, repo, section, value, key=None, active=True, cleanup=True):
1597 key = key or hashlib.sha1(
1521 key = key or hashlib.sha1(
1598 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1522 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1599
1523
1600 setting = RepoRhodeCodeUi()
1524 setting = RepoRhodeCodeUi()
1601 setting.repository_id = repo.repo_id
1525 setting.repository_id = repo.repo_id
1602 setting.ui_section = section
1526 setting.ui_section = section
1603 setting.ui_value = value
1527 setting.ui_value = value
1604 setting.ui_key = key
1528 setting.ui_key = key
1605 setting.ui_active = active
1529 setting.ui_active = active
1606 Session().add(setting)
1530 Session().add(setting)
1607 Session().commit()
1531 Session().commit()
1608
1532
1609 if cleanup:
1533 if cleanup:
1610 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1534 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1611 return setting
1535 return setting
1612
1536
1613 def create_rhodecode_ui(
1537 def create_rhodecode_ui(
1614 self, section, value, key=None, active=True, cleanup=True):
1538 self, section, value, key=None, active=True, cleanup=True):
1615 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1539 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1616
1540
1617 setting = RhodeCodeUi()
1541 setting = RhodeCodeUi()
1618 setting.ui_section = section
1542 setting.ui_section = section
1619 setting.ui_value = value
1543 setting.ui_value = value
1620 setting.ui_key = key
1544 setting.ui_key = key
1621 setting.ui_active = active
1545 setting.ui_active = active
1622 Session().add(setting)
1546 Session().add(setting)
1623 Session().commit()
1547 Session().commit()
1624
1548
1625 if cleanup:
1549 if cleanup:
1626 self.rhodecode_ui_ids.append(setting.ui_id)
1550 self.rhodecode_ui_ids.append(setting.ui_id)
1627 return setting
1551 return setting
1628
1552
1629 def create_repo_rhodecode_setting(
1553 def create_repo_rhodecode_setting(
1630 self, repo, name, value, type_, cleanup=True):
1554 self, repo, name, value, type_, cleanup=True):
1631 setting = RepoRhodeCodeSetting(
1555 setting = RepoRhodeCodeSetting(
1632 repo.repo_id, key=name, val=value, type=type_)
1556 repo.repo_id, key=name, val=value, type=type_)
1633 Session().add(setting)
1557 Session().add(setting)
1634 Session().commit()
1558 Session().commit()
1635
1559
1636 if cleanup:
1560 if cleanup:
1637 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1561 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1638 return setting
1562 return setting
1639
1563
1640 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1564 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1641 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1565 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1642 Session().add(setting)
1566 Session().add(setting)
1643 Session().commit()
1567 Session().commit()
1644
1568
1645 if cleanup:
1569 if cleanup:
1646 self.rhodecode_setting_ids.append(setting.app_settings_id)
1570 self.rhodecode_setting_ids.append(setting.app_settings_id)
1647
1571
1648 return setting
1572 return setting
1649
1573
1650 def cleanup(self):
1574 def cleanup(self):
1651 for id_ in self.rhodecode_ui_ids:
1575 for id_ in self.rhodecode_ui_ids:
1652 setting = RhodeCodeUi.get(id_)
1576 setting = RhodeCodeUi.get(id_)
1653 Session().delete(setting)
1577 Session().delete(setting)
1654
1578
1655 for id_ in self.rhodecode_setting_ids:
1579 for id_ in self.rhodecode_setting_ids:
1656 setting = RhodeCodeSetting.get(id_)
1580 setting = RhodeCodeSetting.get(id_)
1657 Session().delete(setting)
1581 Session().delete(setting)
1658
1582
1659 for id_ in self.repo_rhodecode_ui_ids:
1583 for id_ in self.repo_rhodecode_ui_ids:
1660 setting = RepoRhodeCodeUi.get(id_)
1584 setting = RepoRhodeCodeUi.get(id_)
1661 Session().delete(setting)
1585 Session().delete(setting)
1662
1586
1663 for id_ in self.repo_rhodecode_setting_ids:
1587 for id_ in self.repo_rhodecode_setting_ids:
1664 setting = RepoRhodeCodeSetting.get(id_)
1588 setting = RepoRhodeCodeSetting.get(id_)
1665 Session().delete(setting)
1589 Session().delete(setting)
1666
1590
1667 Session().commit()
1591 Session().commit()
1668
1592
1669
1593
1670 @pytest.fixture
1594 @pytest.fixture
1671 def no_notifications(request):
1595 def no_notifications(request):
1672 notification_patcher = mock.patch(
1596 notification_patcher = mock.patch(
1673 'rhodecode.model.notification.NotificationModel.create')
1597 'rhodecode.model.notification.NotificationModel.create')
1674 notification_patcher.start()
1598 notification_patcher.start()
1675 request.addfinalizer(notification_patcher.stop)
1599 request.addfinalizer(notification_patcher.stop)
1676
1600
1677
1601
1678 @pytest.fixture(scope='session')
1602 @pytest.fixture(scope='session')
1679 def repeat(request):
1603 def repeat(request):
1680 """
1604 """
1681 The number of repetitions is based on this fixture.
1605 The number of repetitions is based on this fixture.
1682
1606
1683 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1607 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1684 tests are not too slow in our default test suite.
1608 tests are not too slow in our default test suite.
1685 """
1609 """
1686 return request.config.getoption('--repeat')
1610 return request.config.getoption('--repeat')
1687
1611
1688
1612
1689 @pytest.fixture
1613 @pytest.fixture
1690 def rhodecode_fixtures():
1614 def rhodecode_fixtures():
1691 return Fixture()
1615 return Fixture()
1692
1616
1693
1617
1694 @pytest.fixture
1618 @pytest.fixture
1695 def context_stub():
1619 def context_stub():
1696 """
1620 """
1697 Stub context object.
1621 Stub context object.
1698 """
1622 """
1699 context = pyramid.testing.DummyResource()
1623 context = pyramid.testing.DummyResource()
1700 return context
1624 return context
1701
1625
1702
1626
1703 @pytest.fixture
1627 @pytest.fixture
1704 def request_stub():
1628 def request_stub():
1705 """
1629 """
1706 Stub request object.
1630 Stub request object.
1707 """
1631 """
1708 from rhodecode.lib.base import bootstrap_request
1632 from rhodecode.lib.base import bootstrap_request
1709 request = bootstrap_request(scheme='https')
1633 request = bootstrap_request(scheme='https')
1710 return request
1634 return request
1711
1635
1712
1636
1713 @pytest.fixture
1637 @pytest.fixture
1714 def config_stub(request, request_stub):
1638 def config_stub(request, request_stub):
1715 """
1639 """
1716 Set up pyramid.testing and return the Configurator.
1640 Set up pyramid.testing and return the Configurator.
1717 """
1641 """
1718 from rhodecode.lib.base import bootstrap_config
1642 from rhodecode.lib.base import bootstrap_config
1719 config = bootstrap_config(request=request_stub)
1643 config = bootstrap_config(request=request_stub)
1720
1644
1721 @request.addfinalizer
1645 @request.addfinalizer
1722 def cleanup():
1646 def cleanup():
1723 pyramid.testing.tearDown()
1647 pyramid.testing.tearDown()
1724
1648
1725 return config
1649 return config
1726
1650
1727
1651
1728 @pytest.fixture
1652 @pytest.fixture
1729 def StubIntegrationType():
1653 def StubIntegrationType():
1730 class _StubIntegrationType(IntegrationTypeBase):
1654 class _StubIntegrationType(IntegrationTypeBase):
1731 """ Test integration type class """
1655 """ Test integration type class """
1732
1656
1733 key = 'test'
1657 key = 'test'
1734 display_name = 'Test integration type'
1658 display_name = 'Test integration type'
1735 description = 'A test integration type for testing'
1659 description = 'A test integration type for testing'
1736
1660
1737 @classmethod
1661 @classmethod
1738 def icon(cls):
1662 def icon(cls):
1739 return 'test_icon_html_image'
1663 return 'test_icon_html_image'
1740
1664
1741 def __init__(self, settings):
1665 def __init__(self, settings):
1742 super(_StubIntegrationType, self).__init__(settings)
1666 super(_StubIntegrationType, self).__init__(settings)
1743 self.sent_events = [] # for testing
1667 self.sent_events = [] # for testing
1744
1668
1745 def send_event(self, event):
1669 def send_event(self, event):
1746 self.sent_events.append(event)
1670 self.sent_events.append(event)
1747
1671
1748 def settings_schema(self):
1672 def settings_schema(self):
1749 class SettingsSchema(colander.Schema):
1673 class SettingsSchema(colander.Schema):
1750 test_string_field = colander.SchemaNode(
1674 test_string_field = colander.SchemaNode(
1751 colander.String(),
1675 colander.String(),
1752 missing=colander.required,
1676 missing=colander.required,
1753 title='test string field',
1677 title='test string field',
1754 )
1678 )
1755 test_int_field = colander.SchemaNode(
1679 test_int_field = colander.SchemaNode(
1756 colander.Int(),
1680 colander.Int(),
1757 title='some integer setting',
1681 title='some integer setting',
1758 )
1682 )
1759 return SettingsSchema()
1683 return SettingsSchema()
1760
1684
1761
1685
1762 integration_type_registry.register_integration_type(_StubIntegrationType)
1686 integration_type_registry.register_integration_type(_StubIntegrationType)
1763 return _StubIntegrationType
1687 return _StubIntegrationType
1764
1688
1765 @pytest.fixture
1689 @pytest.fixture
1766 def stub_integration_settings():
1690 def stub_integration_settings():
1767 return {
1691 return {
1768 'test_string_field': 'some data',
1692 'test_string_field': 'some data',
1769 'test_int_field': 100,
1693 'test_int_field': 100,
1770 }
1694 }
1771
1695
1772
1696
1773 @pytest.fixture
1697 @pytest.fixture
1774 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1698 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1775 stub_integration_settings):
1699 stub_integration_settings):
1776 integration = IntegrationModel().create(
1700 integration = IntegrationModel().create(
1777 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1701 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1778 name='test repo integration',
1702 name='test repo integration',
1779 repo=repo_stub, repo_group=None, child_repos_only=None)
1703 repo=repo_stub, repo_group=None, child_repos_only=None)
1780
1704
1781 @request.addfinalizer
1705 @request.addfinalizer
1782 def cleanup():
1706 def cleanup():
1783 IntegrationModel().delete(integration)
1707 IntegrationModel().delete(integration)
1784
1708
1785 return integration
1709 return integration
1786
1710
1787
1711
1788 @pytest.fixture
1712 @pytest.fixture
1789 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1713 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1790 stub_integration_settings):
1714 stub_integration_settings):
1791 integration = IntegrationModel().create(
1715 integration = IntegrationModel().create(
1792 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1716 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1793 name='test repogroup integration',
1717 name='test repogroup integration',
1794 repo=None, repo_group=test_repo_group, child_repos_only=True)
1718 repo=None, repo_group=test_repo_group, child_repos_only=True)
1795
1719
1796 @request.addfinalizer
1720 @request.addfinalizer
1797 def cleanup():
1721 def cleanup():
1798 IntegrationModel().delete(integration)
1722 IntegrationModel().delete(integration)
1799
1723
1800 return integration
1724 return integration
1801
1725
1802
1726
1803 @pytest.fixture
1727 @pytest.fixture
1804 def repogroup_recursive_integration_stub(request, test_repo_group,
1728 def repogroup_recursive_integration_stub(request, test_repo_group,
1805 StubIntegrationType, stub_integration_settings):
1729 StubIntegrationType, stub_integration_settings):
1806 integration = IntegrationModel().create(
1730 integration = IntegrationModel().create(
1807 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1731 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1808 name='test recursive repogroup integration',
1732 name='test recursive repogroup integration',
1809 repo=None, repo_group=test_repo_group, child_repos_only=False)
1733 repo=None, repo_group=test_repo_group, child_repos_only=False)
1810
1734
1811 @request.addfinalizer
1735 @request.addfinalizer
1812 def cleanup():
1736 def cleanup():
1813 IntegrationModel().delete(integration)
1737 IntegrationModel().delete(integration)
1814
1738
1815 return integration
1739 return integration
1816
1740
1817
1741
1818 @pytest.fixture
1742 @pytest.fixture
1819 def global_integration_stub(request, StubIntegrationType,
1743 def global_integration_stub(request, StubIntegrationType,
1820 stub_integration_settings):
1744 stub_integration_settings):
1821 integration = IntegrationModel().create(
1745 integration = IntegrationModel().create(
1822 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1746 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1823 name='test global integration',
1747 name='test global integration',
1824 repo=None, repo_group=None, child_repos_only=None)
1748 repo=None, repo_group=None, child_repos_only=None)
1825
1749
1826 @request.addfinalizer
1750 @request.addfinalizer
1827 def cleanup():
1751 def cleanup():
1828 IntegrationModel().delete(integration)
1752 IntegrationModel().delete(integration)
1829
1753
1830 return integration
1754 return integration
1831
1755
1832
1756
1833 @pytest.fixture
1757 @pytest.fixture
1834 def root_repos_integration_stub(request, StubIntegrationType,
1758 def root_repos_integration_stub(request, StubIntegrationType,
1835 stub_integration_settings):
1759 stub_integration_settings):
1836 integration = IntegrationModel().create(
1760 integration = IntegrationModel().create(
1837 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1761 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1838 name='test global integration',
1762 name='test global integration',
1839 repo=None, repo_group=None, child_repos_only=True)
1763 repo=None, repo_group=None, child_repos_only=True)
1840
1764
1841 @request.addfinalizer
1765 @request.addfinalizer
1842 def cleanup():
1766 def cleanup():
1843 IntegrationModel().delete(integration)
1767 IntegrationModel().delete(integration)
1844
1768
1845 return integration
1769 return integration
1846
1770
1847
1771
1848 @pytest.fixture
1772 @pytest.fixture
1849 def local_dt_to_utc():
1773 def local_dt_to_utc():
1850 def _factory(dt):
1774 def _factory(dt):
1851 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1775 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1852 dateutil.tz.tzutc()).replace(tzinfo=None)
1776 dateutil.tz.tzutc()).replace(tzinfo=None)
1853 return _factory
1777 return _factory
1854
1778
1855
1779
1856 @pytest.fixture
1780 @pytest.fixture
1857 def disable_anonymous_user(request, baseapp):
1781 def disable_anonymous_user(request, baseapp):
1858 set_anonymous_access(False)
1782 set_anonymous_access(False)
1859
1783
1860 @request.addfinalizer
1784 @request.addfinalizer
1861 def cleanup():
1785 def cleanup():
1862 set_anonymous_access(True)
1786 set_anonymous_access(True)
1863
1787
1864
1788
1865 @pytest.fixture(scope='module')
1789 @pytest.fixture(scope='module')
1866 def rc_fixture(request):
1790 def rc_fixture(request):
1867 return Fixture()
1791 return Fixture()
1868
1792
1869
1793
1870 @pytest.fixture
1794 @pytest.fixture
1871 def repo_groups(request):
1795 def repo_groups(request):
1872 fixture = Fixture()
1796 fixture = Fixture()
1873
1797
1874 session = Session()
1798 session = Session()
1875 zombie_group = fixture.create_repo_group('zombie')
1799 zombie_group = fixture.create_repo_group('zombie')
1876 parent_group = fixture.create_repo_group('parent')
1800 parent_group = fixture.create_repo_group('parent')
1877 child_group = fixture.create_repo_group('parent/child')
1801 child_group = fixture.create_repo_group('parent/child')
1878 groups_in_db = session.query(RepoGroup).all()
1802 groups_in_db = session.query(RepoGroup).all()
1879 assert len(groups_in_db) == 3
1803 assert len(groups_in_db) == 3
1880 assert child_group.group_parent_id == parent_group.group_id
1804 assert child_group.group_parent_id == parent_group.group_id
1881
1805
1882 @request.addfinalizer
1806 @request.addfinalizer
1883 def cleanup():
1807 def cleanup():
1884 fixture.destroy_repo_group(zombie_group)
1808 fixture.destroy_repo_group(zombie_group)
1885 fixture.destroy_repo_group(child_group)
1809 fixture.destroy_repo_group(child_group)
1886 fixture.destroy_repo_group(parent_group)
1810 fixture.destroy_repo_group(parent_group)
1887
1811
1888 return zombie_group, parent_group, child_group
1812 return zombie_group, parent_group, child_group
1889
1813
1890
1814
1891 @pytest.fixture(scope="session")
1815 @pytest.fixture(scope="session")
1892 def tmp_path_factory(request):
1816 def tmp_path_factory(request):
1893 """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session.
1817 """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session.
1894 """
1818 """
1895
1819
1896 class TempPathFactory:
1820 class TempPathFactory:
1897
1821
1898 def mktemp(self, basename):
1822 def mktemp(self, basename):
1899 import tempfile
1823 import tempfile
1900 return tempfile.mktemp(basename)
1824 return tempfile.mktemp(basename)
1901
1825
1902 return TempPathFactory()
1826 return TempPathFactory()
@@ -1,133 +1,133 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2010-2019 RhodeCode GmbH
3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 #
4 #
5 # This program is free software: you can redistribute it and/or modify
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU Affero General Public License, version 3
6 # it under the terms of the GNU Affero General Public License, version 3
7 # (only), as published by the Free Software Foundation.
7 # (only), as published by the Free Software Foundation.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU Affero General Public License
14 # You should have received a copy of the GNU Affero General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 #
16 #
17 # This program is dual-licensed. If you wish to learn more about the
17 # This program is dual-licensed. If you wish to learn more about the
18 # RhodeCode Enterprise Edition, including its added features, Support services,
18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20
20
21 import logging
21 import logging
22
22
23 import mock
23 import mock
24 import msgpack
24 import msgpack
25 import pytest
25 import pytest
26
26
27 from rhodecode.lib import vcs
27 from rhodecode.lib import vcs
28 from rhodecode.lib.vcs import client_http, exceptions
28 from rhodecode.lib.vcs import client_http, exceptions
29
29
30
30
31 def is_new_connection(logger, level, message):
31 def is_new_connection(logger, level, message):
32 return (
32 return (
33 logger == 'requests.packages.urllib3.connectionpool' and
33 logger == 'requests.packages.urllib3.connectionpool' and
34 message.startswith('Starting new HTTP'))
34 message.startswith('Starting new HTTP'))
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture
38 def stub_session():
38 def stub_session():
39 """
39 """
40 Stub of `requests.Session()`.
40 Stub of `requests.Session()`.
41 """
41 """
42 session = mock.Mock()
42 session = mock.Mock()
43 post = session.post()
43 post = session.post()
44 post.content = msgpack.packb({})
44 post.content = msgpack.packb({})
45 post.status_code = 200
45 post.status_code = 200
46
46
47 session.reset_mock()
47 session.reset_mock()
48 return session
48 return session
49
49
50
50
51 @pytest.fixture
51 @pytest.fixture
52 def stub_fail_session():
52 def stub_fail_session():
53 """
53 """
54 Stub of `requests.Session()`.
54 Stub of `requests.Session()`.
55 """
55 """
56 session = mock.Mock()
56 session = mock.Mock()
57 post = session.post()
57 post = session.post()
58 post.content = msgpack.packb({'error': '500'})
58 post.content = msgpack.packb({'error': '500'})
59 post.status_code = 500
59 post.status_code = 500
60
60
61 session.reset_mock()
61 session.reset_mock()
62 return session
62 return session
63
63
64
64
65 @pytest.fixture
65 @pytest.fixture
66 def stub_session_factory(stub_session):
66 def stub_session_factory(stub_session):
67 """
67 """
68 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
68 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
69 """
69 """
70 session_factory = mock.Mock()
70 session_factory = mock.Mock()
71 session_factory.return_value = stub_session
71 session_factory.return_value = stub_session
72 return session_factory
72 return session_factory
73
73
74
74
75 @pytest.fixture
75 @pytest.fixture
76 def stub_session_failing_factory(stub_fail_session):
76 def stub_session_failing_factory(stub_fail_session):
77 """
77 """
78 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
78 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
79 """
79 """
80 session_factory = mock.Mock()
80 session_factory = mock.Mock()
81 session_factory.return_value = stub_fail_session
81 session_factory.return_value = stub_fail_session
82 return session_factory
82 return session_factory
83
83
84
84
85 def test_uses_persistent_http_connections(caplog, vcsbackend_hg):
85 def test_uses_persistent_http_connections(caplog, vcsbackend_hg):
86 repo = vcsbackend_hg.repo
86 repo = vcsbackend_hg.repo
87 remote_call = repo._remote.branches
87 remote_call = repo._remote.branches
88
88
89 with caplog.at_level(logging.INFO):
89 with caplog.at_level(logging.INFO):
90 for x in range(5):
90 for x in range(5):
91 remote_call(normal=True, closed=False)
91 remote_call(normal=True, closed=False)
92
92
93 new_connections = [
93 new_connections = [
94 r for r in caplog.record_tuples if is_new_connection(*r)]
94 r for r in caplog.record_tuples if is_new_connection(*r)]
95 assert len(new_connections) <= 1
95 assert len(new_connections) <= 1
96
96
97
97
98 def test_repo_maker_uses_session_for_classmethods(stub_session_factory):
98 def test_repo_maker_uses_session_for_classmethods(stub_session_factory):
99 repo_maker = client_http.RepoMaker(
99 repo_maker = client_http.RemoteVCSMaker(
100 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
100 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
101 repo_maker.example_call()
101 repo_maker.example_call()
102 stub_session_factory().post.assert_called_with(
102 stub_session_factory().post.assert_called_with(
103 'http://server_and_port/endpoint', data=mock.ANY)
103 'http://server_and_port/endpoint', data=mock.ANY)
104
104
105
105
106 def test_repo_maker_uses_session_for_instance_methods(
106 def test_repo_maker_uses_session_for_instance_methods(
107 stub_session_factory, config):
107 stub_session_factory, config):
108 repo_maker = client_http.RepoMaker(
108 repo_maker = client_http.RemoteVCSMaker(
109 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
109 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
110 repo = repo_maker('stub_path', 'stub_repo_id', config)
110 repo = repo_maker('stub_path', 'stub_repo_id', config)
111 repo.example_call()
111 repo.example_call()
112 stub_session_factory().post.assert_called_with(
112 stub_session_factory().post.assert_called_with(
113 'http://server_and_port/endpoint', data=mock.ANY)
113 'http://server_and_port/endpoint', data=mock.ANY)
114
114
115
115
116 @mock.patch('rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory')
116 @mock.patch('rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory')
117 @mock.patch('rhodecode.lib.vcs.connection')
117 @mock.patch('rhodecode.lib.vcs.connection')
118 def test_connect_passes_in_the_same_session(
118 def test_connect_passes_in_the_same_session(
119 connection, session_factory_class, stub_session):
119 connection, session_factory_class, stub_session):
120 session_factory = session_factory_class.return_value
120 session_factory = session_factory_class.return_value
121 session_factory.return_value = stub_session
121 session_factory.return_value = stub_session
122
122
123 vcs.connect_http('server_and_port')
123 vcs.connect_http('server_and_port')
124
124
125
125
126 def test_repo_maker_uses_session_that_throws_error(
126 def test_repo_maker_uses_session_that_throws_error(
127 stub_session_failing_factory, config):
127 stub_session_failing_factory, config):
128 repo_maker = client_http.RepoMaker(
128 repo_maker = client_http.RemoteVCSMaker(
129 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_failing_factory)
129 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_failing_factory)
130 repo = repo_maker('stub_path', 'stub_repo_id', config)
130 repo = repo_maker('stub_path', 'stub_repo_id', config)
131
131
132 with pytest.raises(exceptions.HttpVCSCommunicationError):
132 with pytest.raises(exceptions.HttpVCSCommunicationError):
133 repo.example_call()
133 repo.example_call()
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now