##// END OF EJS Templates
file-nodes: added streaming remote attributes for vcsserver....
dan -
r3895:2b1d7e0d default
parent child Browse files
Show More
@@ -1,1547 +1,1545 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2011-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import itertools
22 22 import logging
23 23 import os
24 24 import shutil
25 25 import tempfile
26 26 import collections
27 27 import urllib
28 28 import pathlib2
29 29
30 30 from pyramid.httpexceptions import HTTPNotFound, HTTPBadRequest, HTTPFound
31 31 from pyramid.view import view_config
32 32 from pyramid.renderers import render
33 33 from pyramid.response import Response
34 34
35 35 import rhodecode
36 36 from rhodecode.apps._base import RepoAppView
37 37
38 38
39 39 from rhodecode.lib import diffs, helpers as h, rc_cache
40 40 from rhodecode.lib import audit_logger
41 41 from rhodecode.lib.view_utils import parse_path_ref
42 42 from rhodecode.lib.exceptions import NonRelativePathError
43 43 from rhodecode.lib.codeblocks import (
44 44 filenode_as_lines_tokens, filenode_as_annotated_lines_tokens)
45 45 from rhodecode.lib.utils2 import (
46 46 convert_line_endings, detect_mode, safe_str, str2bool, safe_int, sha1, safe_unicode)
47 47 from rhodecode.lib.auth import (
48 48 LoginRequired, HasRepoPermissionAnyDecorator, CSRFRequired)
49 49 from rhodecode.lib.vcs import path as vcspath
50 50 from rhodecode.lib.vcs.backends.base import EmptyCommit
51 51 from rhodecode.lib.vcs.conf import settings
52 52 from rhodecode.lib.vcs.nodes import FileNode
53 53 from rhodecode.lib.vcs.exceptions import (
54 54 RepositoryError, CommitDoesNotExistError, EmptyRepositoryError,
55 55 ImproperArchiveTypeError, VCSError, NodeAlreadyExistsError,
56 56 NodeDoesNotExistError, CommitError, NodeError)
57 57
58 58 from rhodecode.model.scm import ScmModel
59 59 from rhodecode.model.db import Repository
60 60
61 61 log = logging.getLogger(__name__)
62 62
63 63
64 64 class RepoFilesView(RepoAppView):
65 65
66 66 @staticmethod
67 67 def adjust_file_path_for_svn(f_path, repo):
68 68 """
69 69 Computes the relative path of `f_path`.
70 70
71 71 This is mainly based on prefix matching of the recognized tags and
72 72 branches in the underlying repository.
73 73 """
74 74 tags_and_branches = itertools.chain(
75 75 repo.branches.iterkeys(),
76 76 repo.tags.iterkeys())
77 77 tags_and_branches = sorted(tags_and_branches, key=len, reverse=True)
78 78
79 79 for name in tags_and_branches:
80 80 if f_path.startswith('{}/'.format(name)):
81 81 f_path = vcspath.relpath(f_path, name)
82 82 break
83 83 return f_path
84 84
85 85 def load_default_context(self):
86 86 c = self._get_local_tmpl_context(include_app_defaults=True)
87 87 c.rhodecode_repo = self.rhodecode_vcs_repo
88 88 c.enable_downloads = self.db_repo.enable_downloads
89 89 return c
90 90
91 91 def _ensure_not_locked(self, commit_id='tip'):
92 92 _ = self.request.translate
93 93
94 94 repo = self.db_repo
95 95 if repo.enable_locking and repo.locked[0]:
96 96 h.flash(_('This repository has been locked by %s on %s')
97 97 % (h.person_by_id(repo.locked[0]),
98 98 h.format_date(h.time_to_datetime(repo.locked[1]))),
99 99 'warning')
100 100 files_url = h.route_path(
101 101 'repo_files:default_path',
102 102 repo_name=self.db_repo_name, commit_id=commit_id)
103 103 raise HTTPFound(files_url)
104 104
105 105 def forbid_non_head(self, is_head, f_path, commit_id='tip', json_mode=False):
106 106 _ = self.request.translate
107 107
108 108 if not is_head:
109 109 message = _('Cannot modify file. '
110 110 'Given commit `{}` is not head of a branch.').format(commit_id)
111 111 h.flash(message, category='warning')
112 112
113 113 if json_mode:
114 114 return message
115 115
116 116 files_url = h.route_path(
117 117 'repo_files', repo_name=self.db_repo_name, commit_id=commit_id,
118 118 f_path=f_path)
119 119 raise HTTPFound(files_url)
120 120
121 121 def check_branch_permission(self, branch_name, commit_id='tip', json_mode=False):
122 122 _ = self.request.translate
123 123
124 124 rule, branch_perm = self._rhodecode_user.get_rule_and_branch_permission(
125 125 self.db_repo_name, branch_name)
126 126 if branch_perm and branch_perm not in ['branch.push', 'branch.push_force']:
127 127 message = _('Branch `{}` changes forbidden by rule {}.').format(
128 128 branch_name, rule)
129 129 h.flash(message, 'warning')
130 130
131 131 if json_mode:
132 132 return message
133 133
134 134 files_url = h.route_path(
135 135 'repo_files:default_path', repo_name=self.db_repo_name, commit_id=commit_id)
136 136
137 137 raise HTTPFound(files_url)
138 138
139 139 def _get_commit_and_path(self):
140 140 default_commit_id = self.db_repo.landing_rev[1]
141 141 default_f_path = '/'
142 142
143 143 commit_id = self.request.matchdict.get(
144 144 'commit_id', default_commit_id)
145 145 f_path = self._get_f_path(self.request.matchdict, default_f_path)
146 146 return commit_id, f_path
147 147
148 148 def _get_default_encoding(self, c):
149 149 enc_list = getattr(c, 'default_encodings', [])
150 150 return enc_list[0] if enc_list else 'UTF-8'
151 151
152 152 def _get_commit_or_redirect(self, commit_id, redirect_after=True):
153 153 """
154 154 This is a safe way to get commit. If an error occurs it redirects to
155 155 tip with proper message
156 156
157 157 :param commit_id: id of commit to fetch
158 158 :param redirect_after: toggle redirection
159 159 """
160 160 _ = self.request.translate
161 161
162 162 try:
163 163 return self.rhodecode_vcs_repo.get_commit(commit_id)
164 164 except EmptyRepositoryError:
165 165 if not redirect_after:
166 166 return None
167 167
168 168 _url = h.route_path(
169 169 'repo_files_add_file',
170 170 repo_name=self.db_repo_name, commit_id=0, f_path='')
171 171
172 172 if h.HasRepoPermissionAny(
173 173 'repository.write', 'repository.admin')(self.db_repo_name):
174 174 add_new = h.link_to(
175 175 _('Click here to add a new file.'), _url, class_="alert-link")
176 176 else:
177 177 add_new = ""
178 178
179 179 h.flash(h.literal(
180 180 _('There are no files yet. %s') % add_new), category='warning')
181 181 raise HTTPFound(
182 182 h.route_path('repo_summary', repo_name=self.db_repo_name))
183 183
184 184 except (CommitDoesNotExistError, LookupError):
185 185 msg = _('No such commit exists for this repository')
186 186 h.flash(msg, category='error')
187 187 raise HTTPNotFound()
188 188 except RepositoryError as e:
189 189 h.flash(safe_str(h.escape(e)), category='error')
190 190 raise HTTPNotFound()
191 191
192 192 def _get_filenode_or_redirect(self, commit_obj, path):
193 193 """
194 194 Returns file_node, if error occurs or given path is directory,
195 195 it'll redirect to top level path
196 196 """
197 197 _ = self.request.translate
198 198
199 199 try:
200 200 file_node = commit_obj.get_node(path)
201 201 if file_node.is_dir():
202 202 raise RepositoryError('The given path is a directory')
203 203 except CommitDoesNotExistError:
204 204 log.exception('No such commit exists for this repository')
205 205 h.flash(_('No such commit exists for this repository'), category='error')
206 206 raise HTTPNotFound()
207 207 except RepositoryError as e:
208 208 log.warning('Repository error while fetching filenode `%s`. Err:%s', path, e)
209 209 h.flash(safe_str(h.escape(e)), category='error')
210 210 raise HTTPNotFound()
211 211
212 212 return file_node
213 213
214 214 def _is_valid_head(self, commit_id, repo):
215 215 branch_name = sha_commit_id = ''
216 216 is_head = False
217 217 log.debug('Checking if commit_id `%s` is a head for %s.', commit_id, repo)
218 218
219 219 for _branch_name, branch_commit_id in repo.branches.items():
220 220 # simple case we pass in branch name, it's a HEAD
221 221 if commit_id == _branch_name:
222 222 is_head = True
223 223 branch_name = _branch_name
224 224 sha_commit_id = branch_commit_id
225 225 break
226 226 # case when we pass in full sha commit_id, which is a head
227 227 elif commit_id == branch_commit_id:
228 228 is_head = True
229 229 branch_name = _branch_name
230 230 sha_commit_id = branch_commit_id
231 231 break
232 232
233 233 if h.is_svn(repo) and not repo.is_empty():
234 234 # Note: Subversion only has one head.
235 235 if commit_id == repo.get_commit(commit_idx=-1).raw_id:
236 236 is_head = True
237 237 return branch_name, sha_commit_id, is_head
238 238
239 239 # checked branches, means we only need to try to get the branch/commit_sha
240 240 if not repo.is_empty():
241 241 commit = repo.get_commit(commit_id=commit_id)
242 242 if commit:
243 243 branch_name = commit.branch
244 244 sha_commit_id = commit.raw_id
245 245
246 246 return branch_name, sha_commit_id, is_head
247 247
248 248 def _get_tree_at_commit(self, c, commit_id, f_path, full_load=False):
249 249
250 250 repo_id = self.db_repo.repo_id
251 251 force_recache = self.get_recache_flag()
252 252
253 253 cache_seconds = safe_int(
254 254 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
255 255 cache_on = not force_recache and cache_seconds > 0
256 256 log.debug(
257 257 'Computing FILE TREE for repo_id %s commit_id `%s` and path `%s`'
258 258 'with caching: %s[TTL: %ss]' % (
259 259 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
260 260
261 261 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
262 262 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
263 263
264 264 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
265 265 condition=cache_on)
266 266 def compute_file_tree(ver, repo_id, commit_id, f_path, full_load):
267 267 log.debug('Generating cached file tree at ver:%s for repo_id: %s, %s, %s',
268 268 ver, repo_id, commit_id, f_path)
269 269
270 270 c.full_load = full_load
271 271 return render(
272 272 'rhodecode:templates/files/files_browser_tree.mako',
273 273 self._get_template_context(c), self.request)
274 274
275 275 return compute_file_tree('v1', self.db_repo.repo_id, commit_id, f_path, full_load)
276 276
277 277 def _get_archive_spec(self, fname):
278 278 log.debug('Detecting archive spec for: `%s`', fname)
279 279
280 280 fileformat = None
281 281 ext = None
282 282 content_type = None
283 283 for a_type, content_type, extension in settings.ARCHIVE_SPECS:
284 284
285 285 if fname.endswith(extension):
286 286 fileformat = a_type
287 287 log.debug('archive is of type: %s', fileformat)
288 288 ext = extension
289 289 break
290 290
291 291 if not fileformat:
292 292 raise ValueError()
293 293
294 294 # left over part of whole fname is the commit
295 295 commit_id = fname[:-len(ext)]
296 296
297 297 return commit_id, ext, fileformat, content_type
298 298
299 299 def create_pure_path(self, *parts):
300 300 # Split paths and sanitize them, removing any ../ etc
301 301 sanitized_path = [
302 302 x for x in pathlib2.PurePath(*parts).parts
303 303 if x not in ['.', '..']]
304 304
305 305 pure_path = pathlib2.PurePath(*sanitized_path)
306 306 return pure_path
307 307
308 308 def _is_lf_enabled(self, target_repo):
309 309 lf_enabled = False
310 310
311 311 lf_key_for_vcs_map = {
312 312 'hg': 'extensions_largefiles',
313 313 'git': 'vcs_git_lfs_enabled'
314 314 }
315 315
316 316 lf_key_for_vcs = lf_key_for_vcs_map.get(target_repo.repo_type)
317 317
318 318 if lf_key_for_vcs:
319 319 lf_enabled = self._get_repo_setting(target_repo, lf_key_for_vcs)
320 320
321 321 return lf_enabled
322 322
323 323 @LoginRequired()
324 324 @HasRepoPermissionAnyDecorator(
325 325 'repository.read', 'repository.write', 'repository.admin')
326 326 @view_config(
327 327 route_name='repo_archivefile', request_method='GET',
328 328 renderer=None)
329 329 def repo_archivefile(self):
330 330 # archive cache config
331 331 from rhodecode import CONFIG
332 332 _ = self.request.translate
333 333 self.load_default_context()
334 334 default_at_path = '/'
335 335 fname = self.request.matchdict['fname']
336 336 subrepos = self.request.GET.get('subrepos') == 'true'
337 337 at_path = self.request.GET.get('at_path') or default_at_path
338 338
339 339 if not self.db_repo.enable_downloads:
340 340 return Response(_('Downloads disabled'))
341 341
342 342 try:
343 343 commit_id, ext, fileformat, content_type = \
344 344 self._get_archive_spec(fname)
345 345 except ValueError:
346 346 return Response(_('Unknown archive type for: `{}`').format(
347 347 h.escape(fname)))
348 348
349 349 try:
350 350 commit = self.rhodecode_vcs_repo.get_commit(commit_id)
351 351 except CommitDoesNotExistError:
352 352 return Response(_('Unknown commit_id {}').format(
353 353 h.escape(commit_id)))
354 354 except EmptyRepositoryError:
355 355 return Response(_('Empty repository'))
356 356
357 357 try:
358 358 at_path = commit.get_node(at_path).path or default_at_path
359 359 except Exception:
360 360 return Response(_('No node at path {} for this repository').format(at_path))
361 361
362 362 path_sha = sha1(at_path)[:8]
363 363
364 364 # original backward compat name of archive
365 365 clean_name = safe_str(self.db_repo_name.replace('/', '_'))
366 366 short_sha = safe_str(commit.short_id)
367 367
368 368 if at_path == default_at_path:
369 369 archive_name = '{}-{}{}{}'.format(
370 370 clean_name,
371 371 '-sub' if subrepos else '',
372 372 short_sha,
373 373 ext)
374 374 # custom path and new name
375 375 else:
376 376 archive_name = '{}-{}{}-{}{}'.format(
377 377 clean_name,
378 378 '-sub' if subrepos else '',
379 379 short_sha,
380 380 path_sha,
381 381 ext)
382 382
383 383 use_cached_archive = False
384 384 archive_cache_enabled = CONFIG.get(
385 385 'archive_cache_dir') and not self.request.GET.get('no_cache')
386 386 cached_archive_path = None
387 387
388 388 if archive_cache_enabled:
389 389 # check if we it's ok to write
390 390 if not os.path.isdir(CONFIG['archive_cache_dir']):
391 391 os.makedirs(CONFIG['archive_cache_dir'])
392 392 cached_archive_path = os.path.join(
393 393 CONFIG['archive_cache_dir'], archive_name)
394 394 if os.path.isfile(cached_archive_path):
395 395 log.debug('Found cached archive in %s', cached_archive_path)
396 396 fd, archive = None, cached_archive_path
397 397 use_cached_archive = True
398 398 else:
399 399 log.debug('Archive %s is not yet cached', archive_name)
400 400
401 401 if not use_cached_archive:
402 402 # generate new archive
403 403 fd, archive = tempfile.mkstemp()
404 404 log.debug('Creating new temp archive in %s', archive)
405 405 try:
406 406 commit.archive_repo(archive, kind=fileformat, subrepos=subrepos,
407 407 archive_at_path=at_path)
408 408 except ImproperArchiveTypeError:
409 409 return _('Unknown archive type')
410 410 if archive_cache_enabled:
411 411 # if we generated the archive and we have cache enabled
412 412 # let's use this for future
413 413 log.debug('Storing new archive in %s', cached_archive_path)
414 414 shutil.move(archive, cached_archive_path)
415 415 archive = cached_archive_path
416 416
417 417 # store download action
418 418 audit_logger.store_web(
419 419 'repo.archive.download', action_data={
420 420 'user_agent': self.request.user_agent,
421 421 'archive_name': archive_name,
422 422 'archive_spec': fname,
423 423 'archive_cached': use_cached_archive},
424 424 user=self._rhodecode_user,
425 425 repo=self.db_repo,
426 426 commit=True
427 427 )
428 428
429 429 def get_chunked_archive(archive_path):
430 430 with open(archive_path, 'rb') as stream:
431 431 while True:
432 432 data = stream.read(16 * 1024)
433 433 if not data:
434 434 if fd: # fd means we used temporary file
435 435 os.close(fd)
436 436 if not archive_cache_enabled:
437 437 log.debug('Destroying temp archive %s', archive_path)
438 438 os.remove(archive_path)
439 439 break
440 440 yield data
441 441
442 442 response = Response(app_iter=get_chunked_archive(archive))
443 443 response.content_disposition = str(
444 444 'attachment; filename=%s' % archive_name)
445 445 response.content_type = str(content_type)
446 446
447 447 return response
448 448
449 449 def _get_file_node(self, commit_id, f_path):
450 450 if commit_id not in ['', None, 'None', '0' * 12, '0' * 40]:
451 451 commit = self.rhodecode_vcs_repo.get_commit(commit_id=commit_id)
452 452 try:
453 453 node = commit.get_node(f_path)
454 454 if node.is_dir():
455 455 raise NodeError('%s path is a %s not a file'
456 456 % (node, type(node)))
457 457 except NodeDoesNotExistError:
458 458 commit = EmptyCommit(
459 459 commit_id=commit_id,
460 460 idx=commit.idx,
461 461 repo=commit.repository,
462 462 alias=commit.repository.alias,
463 463 message=commit.message,
464 464 author=commit.author,
465 465 date=commit.date)
466 466 node = FileNode(f_path, '', commit=commit)
467 467 else:
468 468 commit = EmptyCommit(
469 469 repo=self.rhodecode_vcs_repo,
470 470 alias=self.rhodecode_vcs_repo.alias)
471 471 node = FileNode(f_path, '', commit=commit)
472 472 return node
473 473
474 474 @LoginRequired()
475 475 @HasRepoPermissionAnyDecorator(
476 476 'repository.read', 'repository.write', 'repository.admin')
477 477 @view_config(
478 478 route_name='repo_files_diff', request_method='GET',
479 479 renderer=None)
480 480 def repo_files_diff(self):
481 481 c = self.load_default_context()
482 482 f_path = self._get_f_path(self.request.matchdict)
483 483 diff1 = self.request.GET.get('diff1', '')
484 484 diff2 = self.request.GET.get('diff2', '')
485 485
486 486 path1, diff1 = parse_path_ref(diff1, default_path=f_path)
487 487
488 488 ignore_whitespace = str2bool(self.request.GET.get('ignorews'))
489 489 line_context = self.request.GET.get('context', 3)
490 490
491 491 if not any((diff1, diff2)):
492 492 h.flash(
493 493 'Need query parameter "diff1" or "diff2" to generate a diff.',
494 494 category='error')
495 495 raise HTTPBadRequest()
496 496
497 497 c.action = self.request.GET.get('diff')
498 498 if c.action not in ['download', 'raw']:
499 499 compare_url = h.route_path(
500 500 'repo_compare',
501 501 repo_name=self.db_repo_name,
502 502 source_ref_type='rev',
503 503 source_ref=diff1,
504 504 target_repo=self.db_repo_name,
505 505 target_ref_type='rev',
506 506 target_ref=diff2,
507 507 _query=dict(f_path=f_path))
508 508 # redirect to new view if we render diff
509 509 raise HTTPFound(compare_url)
510 510
511 511 try:
512 512 node1 = self._get_file_node(diff1, path1)
513 513 node2 = self._get_file_node(diff2, f_path)
514 514 except (RepositoryError, NodeError):
515 515 log.exception("Exception while trying to get node from repository")
516 516 raise HTTPFound(
517 517 h.route_path('repo_files', repo_name=self.db_repo_name,
518 518 commit_id='tip', f_path=f_path))
519 519
520 520 if all(isinstance(node.commit, EmptyCommit)
521 521 for node in (node1, node2)):
522 522 raise HTTPNotFound()
523 523
524 524 c.commit_1 = node1.commit
525 525 c.commit_2 = node2.commit
526 526
527 527 if c.action == 'download':
528 528 _diff = diffs.get_gitdiff(node1, node2,
529 529 ignore_whitespace=ignore_whitespace,
530 530 context=line_context)
531 531 diff = diffs.DiffProcessor(_diff, format='gitdiff')
532 532
533 533 response = Response(self.path_filter.get_raw_patch(diff))
534 534 response.content_type = 'text/plain'
535 535 response.content_disposition = (
536 536 'attachment; filename=%s_%s_vs_%s.diff' % (f_path, diff1, diff2)
537 537 )
538 538 charset = self._get_default_encoding(c)
539 539 if charset:
540 540 response.charset = charset
541 541 return response
542 542
543 543 elif c.action == 'raw':
544 544 _diff = diffs.get_gitdiff(node1, node2,
545 545 ignore_whitespace=ignore_whitespace,
546 546 context=line_context)
547 547 diff = diffs.DiffProcessor(_diff, format='gitdiff')
548 548
549 549 response = Response(self.path_filter.get_raw_patch(diff))
550 550 response.content_type = 'text/plain'
551 551 charset = self._get_default_encoding(c)
552 552 if charset:
553 553 response.charset = charset
554 554 return response
555 555
556 556 # in case we ever end up here
557 557 raise HTTPNotFound()
558 558
559 559 @LoginRequired()
560 560 @HasRepoPermissionAnyDecorator(
561 561 'repository.read', 'repository.write', 'repository.admin')
562 562 @view_config(
563 563 route_name='repo_files_diff_2way_redirect', request_method='GET',
564 564 renderer=None)
565 565 def repo_files_diff_2way_redirect(self):
566 566 """
567 567 Kept only to make OLD links work
568 568 """
569 569 f_path = self._get_f_path_unchecked(self.request.matchdict)
570 570 diff1 = self.request.GET.get('diff1', '')
571 571 diff2 = self.request.GET.get('diff2', '')
572 572
573 573 if not any((diff1, diff2)):
574 574 h.flash(
575 575 'Need query parameter "diff1" or "diff2" to generate a diff.',
576 576 category='error')
577 577 raise HTTPBadRequest()
578 578
579 579 compare_url = h.route_path(
580 580 'repo_compare',
581 581 repo_name=self.db_repo_name,
582 582 source_ref_type='rev',
583 583 source_ref=diff1,
584 584 target_ref_type='rev',
585 585 target_ref=diff2,
586 586 _query=dict(f_path=f_path, diffmode='sideside',
587 587 target_repo=self.db_repo_name,))
588 588 raise HTTPFound(compare_url)
589 589
590 590 @LoginRequired()
591 591 @HasRepoPermissionAnyDecorator(
592 592 'repository.read', 'repository.write', 'repository.admin')
593 593 @view_config(
594 594 route_name='repo_files', request_method='GET',
595 595 renderer=None)
596 596 @view_config(
597 597 route_name='repo_files:default_path', request_method='GET',
598 598 renderer=None)
599 599 @view_config(
600 600 route_name='repo_files:default_commit', request_method='GET',
601 601 renderer=None)
602 602 @view_config(
603 603 route_name='repo_files:rendered', request_method='GET',
604 604 renderer=None)
605 605 @view_config(
606 606 route_name='repo_files:annotated', request_method='GET',
607 607 renderer=None)
608 608 def repo_files(self):
609 609 c = self.load_default_context()
610 610
611 611 view_name = getattr(self.request.matched_route, 'name', None)
612 612
613 613 c.annotate = view_name == 'repo_files:annotated'
614 614 # default is false, but .rst/.md files later are auto rendered, we can
615 615 # overwrite auto rendering by setting this GET flag
616 616 c.renderer = view_name == 'repo_files:rendered' or \
617 617 not self.request.GET.get('no-render', False)
618 618
619 619 # redirect to given commit_id from form if given
620 620 get_commit_id = self.request.GET.get('at_rev', None)
621 621 if get_commit_id:
622 622 self._get_commit_or_redirect(get_commit_id)
623 623
624 624 commit_id, f_path = self._get_commit_and_path()
625 625 c.commit = self._get_commit_or_redirect(commit_id)
626 626 c.branch = self.request.GET.get('branch', None)
627 627 c.f_path = f_path
628 628
629 629 # prev link
630 630 try:
631 631 prev_commit = c.commit.prev(c.branch)
632 632 c.prev_commit = prev_commit
633 633 c.url_prev = h.route_path(
634 634 'repo_files', repo_name=self.db_repo_name,
635 635 commit_id=prev_commit.raw_id, f_path=f_path)
636 636 if c.branch:
637 637 c.url_prev += '?branch=%s' % c.branch
638 638 except (CommitDoesNotExistError, VCSError):
639 639 c.url_prev = '#'
640 640 c.prev_commit = EmptyCommit()
641 641
642 642 # next link
643 643 try:
644 644 next_commit = c.commit.next(c.branch)
645 645 c.next_commit = next_commit
646 646 c.url_next = h.route_path(
647 647 'repo_files', repo_name=self.db_repo_name,
648 648 commit_id=next_commit.raw_id, f_path=f_path)
649 649 if c.branch:
650 650 c.url_next += '?branch=%s' % c.branch
651 651 except (CommitDoesNotExistError, VCSError):
652 652 c.url_next = '#'
653 653 c.next_commit = EmptyCommit()
654 654
655 655 # files or dirs
656 656 try:
657 657 c.file = c.commit.get_node(f_path)
658 658 c.file_author = True
659 659 c.file_tree = ''
660 660
661 661 # load file content
662 662 if c.file.is_file():
663 663 c.lf_node = {}
664 664
665 665 has_lf_enabled = self._is_lf_enabled(self.db_repo)
666 666 if has_lf_enabled:
667 667 c.lf_node = c.file.get_largefile_node()
668 668
669 669 c.file_source_page = 'true'
670 670 c.file_last_commit = c.file.last_commit
671 671 if c.file.size < c.visual.cut_off_limit_diff:
672 672 if c.annotate: # annotation has precedence over renderer
673 673 c.annotated_lines = filenode_as_annotated_lines_tokens(
674 674 c.file
675 675 )
676 676 else:
677 677 c.renderer = (
678 678 c.renderer and h.renderer_from_filename(c.file.path)
679 679 )
680 680 if not c.renderer:
681 681 c.lines = filenode_as_lines_tokens(c.file)
682 682
683 683 _branch_name, _sha_commit_id, is_head = self._is_valid_head(
684 684 commit_id, self.rhodecode_vcs_repo)
685 685 c.on_branch_head = is_head
686 686
687 687 branch = c.commit.branch if (
688 688 c.commit.branch and '/' not in c.commit.branch) else None
689 689 c.branch_or_raw_id = branch or c.commit.raw_id
690 690 c.branch_name = c.commit.branch or h.short_id(c.commit.raw_id)
691 691
692 692 author = c.file_last_commit.author
693 693 c.authors = [[
694 694 h.email(author),
695 695 h.person(author, 'username_or_name_or_email'),
696 696 1
697 697 ]]
698 698
699 699 else: # load tree content at path
700 700 c.file_source_page = 'false'
701 701 c.authors = []
702 702 # this loads a simple tree without metadata to speed things up
703 703 # later via ajax we call repo_nodetree_full and fetch whole
704 704 c.file_tree = self._get_tree_at_commit(c, c.commit.raw_id, f_path)
705 705
706 706 except RepositoryError as e:
707 707 h.flash(safe_str(h.escape(e)), category='error')
708 708 raise HTTPNotFound()
709 709
710 710 if self.request.environ.get('HTTP_X_PJAX'):
711 711 html = render('rhodecode:templates/files/files_pjax.mako',
712 712 self._get_template_context(c), self.request)
713 713 else:
714 714 html = render('rhodecode:templates/files/files.mako',
715 715 self._get_template_context(c), self.request)
716 716 return Response(html)
717 717
718 718 @HasRepoPermissionAnyDecorator(
719 719 'repository.read', 'repository.write', 'repository.admin')
720 720 @view_config(
721 721 route_name='repo_files:annotated_previous', request_method='GET',
722 722 renderer=None)
723 723 def repo_files_annotated_previous(self):
724 724 self.load_default_context()
725 725
726 726 commit_id, f_path = self._get_commit_and_path()
727 727 commit = self._get_commit_or_redirect(commit_id)
728 728 prev_commit_id = commit.raw_id
729 729 line_anchor = self.request.GET.get('line_anchor')
730 730 is_file = False
731 731 try:
732 732 _file = commit.get_node(f_path)
733 733 is_file = _file.is_file()
734 734 except (NodeDoesNotExistError, CommitDoesNotExistError, VCSError):
735 735 pass
736 736
737 737 if is_file:
738 738 history = commit.get_path_history(f_path)
739 739 prev_commit_id = history[1].raw_id \
740 740 if len(history) > 1 else prev_commit_id
741 741 prev_url = h.route_path(
742 742 'repo_files:annotated', repo_name=self.db_repo_name,
743 743 commit_id=prev_commit_id, f_path=f_path,
744 744 _anchor='L{}'.format(line_anchor))
745 745
746 746 raise HTTPFound(prev_url)
747 747
748 748 @LoginRequired()
749 749 @HasRepoPermissionAnyDecorator(
750 750 'repository.read', 'repository.write', 'repository.admin')
751 751 @view_config(
752 752 route_name='repo_nodetree_full', request_method='GET',
753 753 renderer=None, xhr=True)
754 754 @view_config(
755 755 route_name='repo_nodetree_full:default_path', request_method='GET',
756 756 renderer=None, xhr=True)
757 757 def repo_nodetree_full(self):
758 758 """
759 759 Returns rendered html of file tree that contains commit date,
760 760 author, commit_id for the specified combination of
761 761 repo, commit_id and file path
762 762 """
763 763 c = self.load_default_context()
764 764
765 765 commit_id, f_path = self._get_commit_and_path()
766 766 commit = self._get_commit_or_redirect(commit_id)
767 767 try:
768 768 dir_node = commit.get_node(f_path)
769 769 except RepositoryError as e:
770 770 return Response('error: {}'.format(h.escape(safe_str(e))))
771 771
772 772 if dir_node.is_file():
773 773 return Response('')
774 774
775 775 c.file = dir_node
776 776 c.commit = commit
777 777
778 778 html = self._get_tree_at_commit(
779 779 c, commit.raw_id, dir_node.path, full_load=True)
780 780
781 781 return Response(html)
782 782
783 783 def _get_attachement_headers(self, f_path):
784 784 f_name = safe_str(f_path.split(Repository.NAME_SEP)[-1])
785 785 safe_path = f_name.replace('"', '\\"')
786 786 encoded_path = urllib.quote(f_name)
787 787
788 788 return "attachment; " \
789 789 "filename=\"{}\"; " \
790 790 "filename*=UTF-8\'\'{}".format(safe_path, encoded_path)
791 791
792 792 @LoginRequired()
793 793 @HasRepoPermissionAnyDecorator(
794 794 'repository.read', 'repository.write', 'repository.admin')
795 795 @view_config(
796 796 route_name='repo_file_raw', request_method='GET',
797 797 renderer=None)
798 798 def repo_file_raw(self):
799 799 """
800 800 Action for show as raw, some mimetypes are "rendered",
801 801 those include images, icons.
802 802 """
803 803 c = self.load_default_context()
804 804
805 805 commit_id, f_path = self._get_commit_and_path()
806 806 commit = self._get_commit_or_redirect(commit_id)
807 807 file_node = self._get_filenode_or_redirect(commit, f_path)
808 808
809 809 raw_mimetype_mapping = {
810 810 # map original mimetype to a mimetype used for "show as raw"
811 811 # you can also provide a content-disposition to override the
812 812 # default "attachment" disposition.
813 813 # orig_type: (new_type, new_dispo)
814 814
815 815 # show images inline:
816 816 # Do not re-add SVG: it is unsafe and permits XSS attacks. One can
817 817 # for example render an SVG with javascript inside or even render
818 818 # HTML.
819 819 'image/x-icon': ('image/x-icon', 'inline'),
820 820 'image/png': ('image/png', 'inline'),
821 821 'image/gif': ('image/gif', 'inline'),
822 822 'image/jpeg': ('image/jpeg', 'inline'),
823 823 'application/pdf': ('application/pdf', 'inline'),
824 824 }
825 825
826 826 mimetype = file_node.mimetype
827 827 try:
828 828 mimetype, disposition = raw_mimetype_mapping[mimetype]
829 829 except KeyError:
830 830 # we don't know anything special about this, handle it safely
831 831 if file_node.is_binary:
832 832 # do same as download raw for binary files
833 833 mimetype, disposition = 'application/octet-stream', 'attachment'
834 834 else:
835 835 # do not just use the original mimetype, but force text/plain,
836 836 # otherwise it would serve text/html and that might be unsafe.
837 837 # Note: underlying vcs library fakes text/plain mimetype if the
838 838 # mimetype can not be determined and it thinks it is not
839 839 # binary.This might lead to erroneous text display in some
840 840 # cases, but helps in other cases, like with text files
841 841 # without extension.
842 842 mimetype, disposition = 'text/plain', 'inline'
843 843
844 844 if disposition == 'attachment':
845 845 disposition = self._get_attachement_headers(f_path)
846 846
847 def stream_node():
848 yield file_node.raw_bytes
847 stream_content = file_node.stream_bytes()
849 848
850 response = Response(app_iter=stream_node())
849 response = Response(app_iter=stream_content)
851 850 response.content_disposition = disposition
852 851 response.content_type = mimetype
853 852
854 853 charset = self._get_default_encoding(c)
855 854 if charset:
856 855 response.charset = charset
857 856
858 857 return response
859 858
860 859 @LoginRequired()
861 860 @HasRepoPermissionAnyDecorator(
862 861 'repository.read', 'repository.write', 'repository.admin')
863 862 @view_config(
864 863 route_name='repo_file_download', request_method='GET',
865 864 renderer=None)
866 865 @view_config(
867 866 route_name='repo_file_download:legacy', request_method='GET',
868 867 renderer=None)
869 868 def repo_file_download(self):
870 869 c = self.load_default_context()
871 870
872 871 commit_id, f_path = self._get_commit_and_path()
873 872 commit = self._get_commit_or_redirect(commit_id)
874 873 file_node = self._get_filenode_or_redirect(commit, f_path)
875 874
876 875 if self.request.GET.get('lf'):
877 876 # only if lf get flag is passed, we download this file
878 877 # as LFS/Largefile
879 878 lf_node = file_node.get_largefile_node()
880 879 if lf_node:
881 880 # overwrite our pointer with the REAL large-file
882 881 file_node = lf_node
883 882
884 883 disposition = self._get_attachement_headers(f_path)
885 884
886 def stream_node():
887 yield file_node.raw_bytes
885 stream_content = file_node.stream_bytes()
888 886
889 response = Response(app_iter=stream_node())
887 response = Response(app_iter=stream_content)
890 888 response.content_disposition = disposition
891 889 response.content_type = file_node.mimetype
892 890
893 891 charset = self._get_default_encoding(c)
894 892 if charset:
895 893 response.charset = charset
896 894
897 895 return response
898 896
899 897 def _get_nodelist_at_commit(self, repo_name, repo_id, commit_id, f_path):
900 898
901 899 cache_seconds = safe_int(
902 900 rhodecode.CONFIG.get('rc_cache.cache_repo.expiration_time'))
903 901 cache_on = cache_seconds > 0
904 902 log.debug(
905 903 'Computing FILE SEARCH for repo_id %s commit_id `%s` and path `%s`'
906 904 'with caching: %s[TTL: %ss]' % (
907 905 repo_id, commit_id, f_path, cache_on, cache_seconds or 0))
908 906
909 907 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
910 908 region = rc_cache.get_or_create_region('cache_repo', cache_namespace_uid)
911 909
912 910 @region.conditional_cache_on_arguments(namespace=cache_namespace_uid,
913 911 condition=cache_on)
914 912 def compute_file_search(repo_id, commit_id, f_path):
915 913 log.debug('Generating cached nodelist for repo_id:%s, %s, %s',
916 914 repo_id, commit_id, f_path)
917 915 try:
918 916 _d, _f = ScmModel().get_nodes(
919 917 repo_name, commit_id, f_path, flat=False)
920 918 except (RepositoryError, CommitDoesNotExistError, Exception) as e:
921 919 log.exception(safe_str(e))
922 920 h.flash(safe_str(h.escape(e)), category='error')
923 921 raise HTTPFound(h.route_path(
924 922 'repo_files', repo_name=self.db_repo_name,
925 923 commit_id='tip', f_path='/'))
926 924
927 925 return _d + _f
928 926
929 927 result = compute_file_search(self.db_repo.repo_id, commit_id, f_path)
930 928 return filter(lambda n: self.path_filter.path_access_allowed(n['name']), result)
931 929
932 930 @LoginRequired()
933 931 @HasRepoPermissionAnyDecorator(
934 932 'repository.read', 'repository.write', 'repository.admin')
935 933 @view_config(
936 934 route_name='repo_files_nodelist', request_method='GET',
937 935 renderer='json_ext', xhr=True)
938 936 def repo_nodelist(self):
939 937 self.load_default_context()
940 938
941 939 commit_id, f_path = self._get_commit_and_path()
942 940 commit = self._get_commit_or_redirect(commit_id)
943 941
944 942 metadata = self._get_nodelist_at_commit(
945 943 self.db_repo_name, self.db_repo.repo_id, commit.raw_id, f_path)
946 944 return {'nodes': metadata}
947 945
948 946 def _create_references(self, branches_or_tags, symbolic_reference, f_path, ref_type):
949 947 items = []
950 948 for name, commit_id in branches_or_tags.items():
951 949 sym_ref = symbolic_reference(commit_id, name, f_path, ref_type)
952 950 items.append((sym_ref, name, ref_type))
953 951 return items
954 952
955 953 def _symbolic_reference(self, commit_id, name, f_path, ref_type):
956 954 return commit_id
957 955
958 956 def _symbolic_reference_svn(self, commit_id, name, f_path, ref_type):
959 957 new_f_path = vcspath.join(name, f_path)
960 958 return u'%s@%s' % (new_f_path, commit_id)
961 959
962 960 def _get_node_history(self, commit_obj, f_path, commits=None):
963 961 """
964 962 get commit history for given node
965 963
966 964 :param commit_obj: commit to calculate history
967 965 :param f_path: path for node to calculate history for
968 966 :param commits: if passed don't calculate history and take
969 967 commits defined in this list
970 968 """
971 969 _ = self.request.translate
972 970
973 971 # calculate history based on tip
974 972 tip = self.rhodecode_vcs_repo.get_commit()
975 973 if commits is None:
976 974 pre_load = ["author", "branch"]
977 975 try:
978 976 commits = tip.get_path_history(f_path, pre_load=pre_load)
979 977 except (NodeDoesNotExistError, CommitError):
980 978 # this node is not present at tip!
981 979 commits = commit_obj.get_path_history(f_path, pre_load=pre_load)
982 980
983 981 history = []
984 982 commits_group = ([], _("Changesets"))
985 983 for commit in commits:
986 984 branch = ' (%s)' % commit.branch if commit.branch else ''
987 985 n_desc = 'r%s:%s%s' % (commit.idx, commit.short_id, branch)
988 986 commits_group[0].append((commit.raw_id, n_desc, 'sha'))
989 987 history.append(commits_group)
990 988
991 989 symbolic_reference = self._symbolic_reference
992 990
993 991 if self.rhodecode_vcs_repo.alias == 'svn':
994 992 adjusted_f_path = RepoFilesView.adjust_file_path_for_svn(
995 993 f_path, self.rhodecode_vcs_repo)
996 994 if adjusted_f_path != f_path:
997 995 log.debug(
998 996 'Recognized svn tag or branch in file "%s", using svn '
999 997 'specific symbolic references', f_path)
1000 998 f_path = adjusted_f_path
1001 999 symbolic_reference = self._symbolic_reference_svn
1002 1000
1003 1001 branches = self._create_references(
1004 1002 self.rhodecode_vcs_repo.branches, symbolic_reference, f_path, 'branch')
1005 1003 branches_group = (branches, _("Branches"))
1006 1004
1007 1005 tags = self._create_references(
1008 1006 self.rhodecode_vcs_repo.tags, symbolic_reference, f_path, 'tag')
1009 1007 tags_group = (tags, _("Tags"))
1010 1008
1011 1009 history.append(branches_group)
1012 1010 history.append(tags_group)
1013 1011
1014 1012 return history, commits
1015 1013
1016 1014 @LoginRequired()
1017 1015 @HasRepoPermissionAnyDecorator(
1018 1016 'repository.read', 'repository.write', 'repository.admin')
1019 1017 @view_config(
1020 1018 route_name='repo_file_history', request_method='GET',
1021 1019 renderer='json_ext')
1022 1020 def repo_file_history(self):
1023 1021 self.load_default_context()
1024 1022
1025 1023 commit_id, f_path = self._get_commit_and_path()
1026 1024 commit = self._get_commit_or_redirect(commit_id)
1027 1025 file_node = self._get_filenode_or_redirect(commit, f_path)
1028 1026
1029 1027 if file_node.is_file():
1030 1028 file_history, _hist = self._get_node_history(commit, f_path)
1031 1029
1032 1030 res = []
1033 1031 for obj in file_history:
1034 1032 res.append({
1035 1033 'text': obj[1],
1036 1034 'children': [{'id': o[0], 'text': o[1], 'type': o[2]} for o in obj[0]]
1037 1035 })
1038 1036
1039 1037 data = {
1040 1038 'more': False,
1041 1039 'results': res
1042 1040 }
1043 1041 return data
1044 1042
1045 1043 log.warning('Cannot fetch history for directory')
1046 1044 raise HTTPBadRequest()
1047 1045
1048 1046 @LoginRequired()
1049 1047 @HasRepoPermissionAnyDecorator(
1050 1048 'repository.read', 'repository.write', 'repository.admin')
1051 1049 @view_config(
1052 1050 route_name='repo_file_authors', request_method='GET',
1053 1051 renderer='rhodecode:templates/files/file_authors_box.mako')
1054 1052 def repo_file_authors(self):
1055 1053 c = self.load_default_context()
1056 1054
1057 1055 commit_id, f_path = self._get_commit_and_path()
1058 1056 commit = self._get_commit_or_redirect(commit_id)
1059 1057 file_node = self._get_filenode_or_redirect(commit, f_path)
1060 1058
1061 1059 if not file_node.is_file():
1062 1060 raise HTTPBadRequest()
1063 1061
1064 1062 c.file_last_commit = file_node.last_commit
1065 1063 if self.request.GET.get('annotate') == '1':
1066 1064 # use _hist from annotation if annotation mode is on
1067 1065 commit_ids = set(x[1] for x in file_node.annotate)
1068 1066 _hist = (
1069 1067 self.rhodecode_vcs_repo.get_commit(commit_id)
1070 1068 for commit_id in commit_ids)
1071 1069 else:
1072 1070 _f_history, _hist = self._get_node_history(commit, f_path)
1073 1071 c.file_author = False
1074 1072
1075 1073 unique = collections.OrderedDict()
1076 1074 for commit in _hist:
1077 1075 author = commit.author
1078 1076 if author not in unique:
1079 1077 unique[commit.author] = [
1080 1078 h.email(author),
1081 1079 h.person(author, 'username_or_name_or_email'),
1082 1080 1 # counter
1083 1081 ]
1084 1082
1085 1083 else:
1086 1084 # increase counter
1087 1085 unique[commit.author][2] += 1
1088 1086
1089 1087 c.authors = [val for val in unique.values()]
1090 1088
1091 1089 return self._get_template_context(c)
1092 1090
1093 1091 @LoginRequired()
1094 1092 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1095 1093 @view_config(
1096 1094 route_name='repo_files_remove_file', request_method='GET',
1097 1095 renderer='rhodecode:templates/files/files_delete.mako')
1098 1096 def repo_files_remove_file(self):
1099 1097 _ = self.request.translate
1100 1098 c = self.load_default_context()
1101 1099 commit_id, f_path = self._get_commit_and_path()
1102 1100
1103 1101 self._ensure_not_locked()
1104 1102 _branch_name, _sha_commit_id, is_head = \
1105 1103 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1106 1104
1107 1105 self.forbid_non_head(is_head, f_path)
1108 1106 self.check_branch_permission(_branch_name)
1109 1107
1110 1108 c.commit = self._get_commit_or_redirect(commit_id)
1111 1109 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1112 1110
1113 1111 c.default_message = _(
1114 1112 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1115 1113 c.f_path = f_path
1116 1114
1117 1115 return self._get_template_context(c)
1118 1116
1119 1117 @LoginRequired()
1120 1118 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1121 1119 @CSRFRequired()
1122 1120 @view_config(
1123 1121 route_name='repo_files_delete_file', request_method='POST',
1124 1122 renderer=None)
1125 1123 def repo_files_delete_file(self):
1126 1124 _ = self.request.translate
1127 1125
1128 1126 c = self.load_default_context()
1129 1127 commit_id, f_path = self._get_commit_and_path()
1130 1128
1131 1129 self._ensure_not_locked()
1132 1130 _branch_name, _sha_commit_id, is_head = \
1133 1131 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1134 1132
1135 1133 self.forbid_non_head(is_head, f_path)
1136 1134 self.check_branch_permission(_branch_name)
1137 1135
1138 1136 c.commit = self._get_commit_or_redirect(commit_id)
1139 1137 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1140 1138
1141 1139 c.default_message = _(
1142 1140 'Deleted file {} via RhodeCode Enterprise').format(f_path)
1143 1141 c.f_path = f_path
1144 1142 node_path = f_path
1145 1143 author = self._rhodecode_db_user.full_contact
1146 1144 message = self.request.POST.get('message') or c.default_message
1147 1145 try:
1148 1146 nodes = {
1149 1147 node_path: {
1150 1148 'content': ''
1151 1149 }
1152 1150 }
1153 1151 ScmModel().delete_nodes(
1154 1152 user=self._rhodecode_db_user.user_id, repo=self.db_repo,
1155 1153 message=message,
1156 1154 nodes=nodes,
1157 1155 parent_commit=c.commit,
1158 1156 author=author,
1159 1157 )
1160 1158
1161 1159 h.flash(
1162 1160 _('Successfully deleted file `{}`').format(
1163 1161 h.escape(f_path)), category='success')
1164 1162 except Exception:
1165 1163 log.exception('Error during commit operation')
1166 1164 h.flash(_('Error occurred during commit'), category='error')
1167 1165 raise HTTPFound(
1168 1166 h.route_path('repo_commit', repo_name=self.db_repo_name,
1169 1167 commit_id='tip'))
1170 1168
1171 1169 @LoginRequired()
1172 1170 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1173 1171 @view_config(
1174 1172 route_name='repo_files_edit_file', request_method='GET',
1175 1173 renderer='rhodecode:templates/files/files_edit.mako')
1176 1174 def repo_files_edit_file(self):
1177 1175 _ = self.request.translate
1178 1176 c = self.load_default_context()
1179 1177 commit_id, f_path = self._get_commit_and_path()
1180 1178
1181 1179 self._ensure_not_locked()
1182 1180 _branch_name, _sha_commit_id, is_head = \
1183 1181 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1184 1182
1185 1183 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1186 1184 self.check_branch_permission(_branch_name, commit_id=commit_id)
1187 1185
1188 1186 c.commit = self._get_commit_or_redirect(commit_id)
1189 1187 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1190 1188
1191 1189 if c.file.is_binary:
1192 1190 files_url = h.route_path(
1193 1191 'repo_files',
1194 1192 repo_name=self.db_repo_name,
1195 1193 commit_id=c.commit.raw_id, f_path=f_path)
1196 1194 raise HTTPFound(files_url)
1197 1195
1198 1196 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1199 1197 c.f_path = f_path
1200 1198
1201 1199 return self._get_template_context(c)
1202 1200
1203 1201 @LoginRequired()
1204 1202 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1205 1203 @CSRFRequired()
1206 1204 @view_config(
1207 1205 route_name='repo_files_update_file', request_method='POST',
1208 1206 renderer=None)
1209 1207 def repo_files_update_file(self):
1210 1208 _ = self.request.translate
1211 1209 c = self.load_default_context()
1212 1210 commit_id, f_path = self._get_commit_and_path()
1213 1211
1214 1212 self._ensure_not_locked()
1215 1213
1216 1214 c.commit = self._get_commit_or_redirect(commit_id)
1217 1215 c.file = self._get_filenode_or_redirect(c.commit, f_path)
1218 1216
1219 1217 if c.file.is_binary:
1220 1218 raise HTTPFound(h.route_path('repo_files', repo_name=self.db_repo_name,
1221 1219 commit_id=c.commit.raw_id, f_path=f_path))
1222 1220
1223 1221 _branch_name, _sha_commit_id, is_head = \
1224 1222 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1225 1223
1226 1224 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1227 1225 self.check_branch_permission(_branch_name, commit_id=commit_id)
1228 1226
1229 1227 c.default_message = _('Edited file {} via RhodeCode Enterprise').format(f_path)
1230 1228 c.f_path = f_path
1231 1229
1232 1230 old_content = c.file.content
1233 1231 sl = old_content.splitlines(1)
1234 1232 first_line = sl[0] if sl else ''
1235 1233
1236 1234 r_post = self.request.POST
1237 1235 # line endings: 0 - Unix, 1 - Mac, 2 - DOS
1238 1236 line_ending_mode = detect_mode(first_line, 0)
1239 1237 content = convert_line_endings(r_post.get('content', ''), line_ending_mode)
1240 1238
1241 1239 message = r_post.get('message') or c.default_message
1242 1240 org_node_path = c.file.unicode_path
1243 1241 filename = r_post['filename']
1244 1242
1245 1243 root_path = c.file.dir_path
1246 1244 pure_path = self.create_pure_path(root_path, filename)
1247 1245 node_path = safe_unicode(bytes(pure_path))
1248 1246
1249 1247 default_redirect_url = h.route_path('repo_commit', repo_name=self.db_repo_name,
1250 1248 commit_id=commit_id)
1251 1249 if content == old_content and node_path == org_node_path:
1252 1250 h.flash(_('No changes detected on {}').format(org_node_path),
1253 1251 category='warning')
1254 1252 raise HTTPFound(default_redirect_url)
1255 1253
1256 1254 try:
1257 1255 mapping = {
1258 1256 org_node_path: {
1259 1257 'org_filename': org_node_path,
1260 1258 'filename': node_path,
1261 1259 'content': content,
1262 1260 'lexer': '',
1263 1261 'op': 'mod',
1264 1262 'mode': c.file.mode
1265 1263 }
1266 1264 }
1267 1265
1268 1266 commit = ScmModel().update_nodes(
1269 1267 user=self._rhodecode_db_user.user_id,
1270 1268 repo=self.db_repo,
1271 1269 message=message,
1272 1270 nodes=mapping,
1273 1271 parent_commit=c.commit,
1274 1272 )
1275 1273
1276 1274 h.flash(_('Successfully committed changes to file `{}`').format(
1277 1275 h.escape(f_path)), category='success')
1278 1276 default_redirect_url = h.route_path(
1279 1277 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1280 1278
1281 1279 except Exception:
1282 1280 log.exception('Error occurred during commit')
1283 1281 h.flash(_('Error occurred during commit'), category='error')
1284 1282
1285 1283 raise HTTPFound(default_redirect_url)
1286 1284
1287 1285 @LoginRequired()
1288 1286 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1289 1287 @view_config(
1290 1288 route_name='repo_files_add_file', request_method='GET',
1291 1289 renderer='rhodecode:templates/files/files_add.mako')
1292 1290 @view_config(
1293 1291 route_name='repo_files_upload_file', request_method='GET',
1294 1292 renderer='rhodecode:templates/files/files_upload.mako')
1295 1293 def repo_files_add_file(self):
1296 1294 _ = self.request.translate
1297 1295 c = self.load_default_context()
1298 1296 commit_id, f_path = self._get_commit_and_path()
1299 1297
1300 1298 self._ensure_not_locked()
1301 1299
1302 1300 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1303 1301 if c.commit is None:
1304 1302 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1305 1303
1306 1304 if self.rhodecode_vcs_repo.is_empty():
1307 1305 # for empty repository we cannot check for current branch, we rely on
1308 1306 # c.commit.branch instead
1309 1307 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1310 1308 else:
1311 1309 _branch_name, _sha_commit_id, is_head = \
1312 1310 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1313 1311
1314 1312 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1315 1313 self.check_branch_permission(_branch_name, commit_id=commit_id)
1316 1314
1317 1315 c.default_message = (_('Added file via RhodeCode Enterprise'))
1318 1316 c.f_path = f_path.lstrip('/') # ensure not relative path
1319 1317
1320 1318 return self._get_template_context(c)
1321 1319
1322 1320 @LoginRequired()
1323 1321 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1324 1322 @CSRFRequired()
1325 1323 @view_config(
1326 1324 route_name='repo_files_create_file', request_method='POST',
1327 1325 renderer=None)
1328 1326 def repo_files_create_file(self):
1329 1327 _ = self.request.translate
1330 1328 c = self.load_default_context()
1331 1329 commit_id, f_path = self._get_commit_and_path()
1332 1330
1333 1331 self._ensure_not_locked()
1334 1332
1335 1333 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1336 1334 if c.commit is None:
1337 1335 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1338 1336
1339 1337 # calculate redirect URL
1340 1338 if self.rhodecode_vcs_repo.is_empty():
1341 1339 default_redirect_url = h.route_path(
1342 1340 'repo_summary', repo_name=self.db_repo_name)
1343 1341 else:
1344 1342 default_redirect_url = h.route_path(
1345 1343 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1346 1344
1347 1345 if self.rhodecode_vcs_repo.is_empty():
1348 1346 # for empty repository we cannot check for current branch, we rely on
1349 1347 # c.commit.branch instead
1350 1348 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1351 1349 else:
1352 1350 _branch_name, _sha_commit_id, is_head = \
1353 1351 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1354 1352
1355 1353 self.forbid_non_head(is_head, f_path, commit_id=commit_id)
1356 1354 self.check_branch_permission(_branch_name, commit_id=commit_id)
1357 1355
1358 1356 c.default_message = (_('Added file via RhodeCode Enterprise'))
1359 1357 c.f_path = f_path
1360 1358
1361 1359 r_post = self.request.POST
1362 1360 message = r_post.get('message') or c.default_message
1363 1361 filename = r_post.get('filename')
1364 1362 unix_mode = 0
1365 1363 content = convert_line_endings(r_post.get('content', ''), unix_mode)
1366 1364
1367 1365 if not filename:
1368 1366 # If there's no commit, redirect to repo summary
1369 1367 if type(c.commit) is EmptyCommit:
1370 1368 redirect_url = h.route_path(
1371 1369 'repo_summary', repo_name=self.db_repo_name)
1372 1370 else:
1373 1371 redirect_url = default_redirect_url
1374 1372 h.flash(_('No filename specified'), category='warning')
1375 1373 raise HTTPFound(redirect_url)
1376 1374
1377 1375 root_path = f_path
1378 1376 pure_path = self.create_pure_path(root_path, filename)
1379 1377 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1380 1378
1381 1379 author = self._rhodecode_db_user.full_contact
1382 1380 nodes = {
1383 1381 node_path: {
1384 1382 'content': content
1385 1383 }
1386 1384 }
1387 1385
1388 1386 try:
1389 1387
1390 1388 commit = ScmModel().create_nodes(
1391 1389 user=self._rhodecode_db_user.user_id,
1392 1390 repo=self.db_repo,
1393 1391 message=message,
1394 1392 nodes=nodes,
1395 1393 parent_commit=c.commit,
1396 1394 author=author,
1397 1395 )
1398 1396
1399 1397 h.flash(_('Successfully committed new file `{}`').format(
1400 1398 h.escape(node_path)), category='success')
1401 1399
1402 1400 default_redirect_url = h.route_path(
1403 1401 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1404 1402
1405 1403 except NonRelativePathError:
1406 1404 log.exception('Non Relative path found')
1407 1405 h.flash(_('The location specified must be a relative path and must not '
1408 1406 'contain .. in the path'), category='warning')
1409 1407 raise HTTPFound(default_redirect_url)
1410 1408 except (NodeError, NodeAlreadyExistsError) as e:
1411 1409 h.flash(_(h.escape(e)), category='error')
1412 1410 except Exception:
1413 1411 log.exception('Error occurred during commit')
1414 1412 h.flash(_('Error occurred during commit'), category='error')
1415 1413
1416 1414 raise HTTPFound(default_redirect_url)
1417 1415
1418 1416 @LoginRequired()
1419 1417 @HasRepoPermissionAnyDecorator('repository.write', 'repository.admin')
1420 1418 @CSRFRequired()
1421 1419 @view_config(
1422 1420 route_name='repo_files_upload_file', request_method='POST',
1423 1421 renderer='json_ext')
1424 1422 def repo_files_upload_file(self):
1425 1423 _ = self.request.translate
1426 1424 c = self.load_default_context()
1427 1425 commit_id, f_path = self._get_commit_and_path()
1428 1426
1429 1427 self._ensure_not_locked()
1430 1428
1431 1429 c.commit = self._get_commit_or_redirect(commit_id, redirect_after=False)
1432 1430 if c.commit is None:
1433 1431 c.commit = EmptyCommit(alias=self.rhodecode_vcs_repo.alias)
1434 1432
1435 1433 # calculate redirect URL
1436 1434 if self.rhodecode_vcs_repo.is_empty():
1437 1435 default_redirect_url = h.route_path(
1438 1436 'repo_summary', repo_name=self.db_repo_name)
1439 1437 else:
1440 1438 default_redirect_url = h.route_path(
1441 1439 'repo_commit', repo_name=self.db_repo_name, commit_id='tip')
1442 1440
1443 1441 if self.rhodecode_vcs_repo.is_empty():
1444 1442 # for empty repository we cannot check for current branch, we rely on
1445 1443 # c.commit.branch instead
1446 1444 _branch_name, _sha_commit_id, is_head = c.commit.branch, '', True
1447 1445 else:
1448 1446 _branch_name, _sha_commit_id, is_head = \
1449 1447 self._is_valid_head(commit_id, self.rhodecode_vcs_repo)
1450 1448
1451 1449 error = self.forbid_non_head(is_head, f_path, json_mode=True)
1452 1450 if error:
1453 1451 return {
1454 1452 'error': error,
1455 1453 'redirect_url': default_redirect_url
1456 1454 }
1457 1455 error = self.check_branch_permission(_branch_name, json_mode=True)
1458 1456 if error:
1459 1457 return {
1460 1458 'error': error,
1461 1459 'redirect_url': default_redirect_url
1462 1460 }
1463 1461
1464 1462 c.default_message = (_('Uploaded file via RhodeCode Enterprise'))
1465 1463 c.f_path = f_path
1466 1464
1467 1465 r_post = self.request.POST
1468 1466
1469 1467 message = c.default_message
1470 1468 user_message = r_post.getall('message')
1471 1469 if isinstance(user_message, list) and user_message:
1472 1470 # we take the first from duplicated results if it's not empty
1473 1471 message = user_message[0] if user_message[0] else message
1474 1472
1475 1473 nodes = {}
1476 1474
1477 1475 for file_obj in r_post.getall('files_upload') or []:
1478 1476 content = file_obj.file
1479 1477 filename = file_obj.filename
1480 1478
1481 1479 root_path = f_path
1482 1480 pure_path = self.create_pure_path(root_path, filename)
1483 1481 node_path = safe_unicode(bytes(pure_path).lstrip('/'))
1484 1482
1485 1483 nodes[node_path] = {
1486 1484 'content': content
1487 1485 }
1488 1486
1489 1487 if not nodes:
1490 1488 error = 'missing files'
1491 1489 return {
1492 1490 'error': error,
1493 1491 'redirect_url': default_redirect_url
1494 1492 }
1495 1493
1496 1494 author = self._rhodecode_db_user.full_contact
1497 1495
1498 1496 try:
1499 1497 commit = ScmModel().create_nodes(
1500 1498 user=self._rhodecode_db_user.user_id,
1501 1499 repo=self.db_repo,
1502 1500 message=message,
1503 1501 nodes=nodes,
1504 1502 parent_commit=c.commit,
1505 1503 author=author,
1506 1504 )
1507 1505 if len(nodes) == 1:
1508 1506 flash_message = _('Successfully committed {} new files').format(len(nodes))
1509 1507 else:
1510 1508 flash_message = _('Successfully committed 1 new file')
1511 1509
1512 1510 h.flash(flash_message, category='success')
1513 1511
1514 1512 default_redirect_url = h.route_path(
1515 1513 'repo_commit', repo_name=self.db_repo_name, commit_id=commit.raw_id)
1516 1514
1517 1515 except NonRelativePathError:
1518 1516 log.exception('Non Relative path found')
1519 1517 error = _('The location specified must be a relative path and must not '
1520 1518 'contain .. in the path')
1521 1519 h.flash(error, category='warning')
1522 1520
1523 1521 return {
1524 1522 'error': error,
1525 1523 'redirect_url': default_redirect_url
1526 1524 }
1527 1525 except (NodeError, NodeAlreadyExistsError) as e:
1528 1526 error = h.escape(e)
1529 1527 h.flash(error, category='error')
1530 1528
1531 1529 return {
1532 1530 'error': error,
1533 1531 'redirect_url': default_redirect_url
1534 1532 }
1535 1533 except Exception:
1536 1534 log.exception('Error occurred during commit')
1537 1535 error = _('Error occurred during commit')
1538 1536 h.flash(error, category='error')
1539 1537 return {
1540 1538 'error': error,
1541 1539 'redirect_url': default_redirect_url
1542 1540 }
1543 1541
1544 1542 return {
1545 1543 'error': None,
1546 1544 'redirect_url': default_redirect_url
1547 1545 }
@@ -1,188 +1,184 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Various version Control System version lib (vcs) management abstraction layer
23 23 for Python. Build with server client architecture.
24 24 """
25 25 import atexit
26 26 import logging
27 27 import urlparse
28 28 from cStringIO import StringIO
29 29
30 30 import rhodecode
31 31 from rhodecode.lib.vcs.conf import settings
32 32 from rhodecode.lib.vcs.backends import get_vcs_instance, get_backend
33 33 from rhodecode.lib.vcs.exceptions import (
34 34 VCSError, RepositoryError, CommitError, VCSCommunicationError)
35 35
36 36 VERSION = (0, 5, 0, 'dev')
37 37
38 38 __version__ = '.'.join((str(each) for each in VERSION[:4]))
39 39
40 40 __all__ = [
41 41 'get_version', 'get_vcs_instance', 'get_backend',
42 42 'VCSError', 'RepositoryError', 'CommitError', 'VCSCommunicationError'
43 43 ]
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47 # The pycurl library directly accesses C API functions and is not patched by
48 48 # gevent. This will potentially lead to deadlocks due to incompatibility to
49 49 # gevent. Therefore we check if gevent is active and import a gevent compatible
50 50 # wrapper in that case.
51 51 try:
52 52 from gevent import monkey
53 53 if monkey.is_module_patched('__builtin__'):
54 54 import geventcurl as pycurl
55 55 log.debug('Using gevent comapatible pycurl: %s', pycurl)
56 56 else:
57 57 import pycurl
58 58 except ImportError:
59 59 import pycurl
60 60
61 61
62 62 def get_version():
63 63 """
64 64 Returns shorter version (digit parts only) as string.
65 65 """
66 66 return '.'.join((str(each) for each in VERSION[:3]))
67 67
68 68
69 69 def connect_http(server_and_port):
70 70 from rhodecode.lib.vcs import connection, client_http
71 71 from rhodecode.lib.middleware.utils import scm_app
72 72
73 73 session_factory = client_http.ThreadlocalSessionFactory()
74 74
75 connection.Git = client_http.RepoMaker(
75 connection.Git = client_http.RemoteVCSMaker(
76 76 server_and_port, '/git', 'git', session_factory)
77 connection.Hg = client_http.RepoMaker(
77 connection.Hg = client_http.RemoteVCSMaker(
78 78 server_and_port, '/hg', 'hg', session_factory)
79 connection.Svn = client_http.RepoMaker(
79 connection.Svn = client_http.RemoteVCSMaker(
80 80 server_and_port, '/svn', 'svn', session_factory)
81 81 connection.Service = client_http.ServiceConnection(
82 82 server_and_port, '/_service', session_factory)
83 83
84 84 scm_app.HG_REMOTE_WSGI = client_http.VcsHttpProxy(
85 85 server_and_port, '/proxy/hg')
86 86 scm_app.GIT_REMOTE_WSGI = client_http.VcsHttpProxy(
87 87 server_and_port, '/proxy/git')
88 88
89 89 @atexit.register
90 90 def free_connection_resources():
91 91 connection.Git = None
92 92 connection.Hg = None
93 93 connection.Svn = None
94 94 connection.Service = None
95 95
96 96
97 97 def connect_vcs(server_and_port, protocol):
98 98 """
99 99 Initializes the connection to the vcs server.
100 100
101 101 :param server_and_port: str, e.g. "localhost:9900"
102 102 :param protocol: str or "http"
103 103 """
104 104 if protocol == 'http':
105 105 connect_http(server_and_port)
106 106 else:
107 107 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
108 108
109 109
110 def create_vcsserver_proxy(server_and_port, protocol):
111 if protocol == 'http':
112 return _create_vcsserver_proxy_http(server_and_port)
113 else:
114 raise Exception('Invalid vcs server protocol "{}"'.format(protocol))
115
116
117 def _create_vcsserver_proxy_http(server_and_port):
118 from rhodecode.lib.vcs import client_http
119
120 session = _create_http_rpc_session()
121 url = urlparse.urljoin('http://%s' % server_and_port, '/server')
122 return client_http.RemoteObject(url, session)
123
124
125 110 class CurlSession(object):
126 111 """
127 112 Modeled so that it provides a subset of the requests interface.
128 113
129 114 This has been created so that it does only provide a minimal API for our
130 115 needs. The parts which it provides are based on the API of the library
131 116 `requests` which allows us to easily benchmark against it.
132 117
133 118 Please have a look at the class :class:`requests.Session` when you extend
134 119 it.
135 120 """
136 121
137 122 def __init__(self):
138 123 curl = pycurl.Curl()
139 124 # TODO: johbo: I did test with 7.19 of libcurl. This version has
140 125 # trouble with 100 - continue being set in the expect header. This
141 126 # can lead to massive performance drops, switching it off here.
142 127 curl.setopt(curl.HTTPHEADER, ["Expect:"])
143 128 curl.setopt(curl.TCP_NODELAY, True)
144 129 curl.setopt(curl.PROTOCOLS, curl.PROTO_HTTP)
145 130 curl.setopt(curl.USERAGENT, 'RhodeCode HTTP {}'.format(rhodecode.__version__))
146 131 self._curl = curl
147 132
148 133 def post(self, url, data, allow_redirects=False):
149 134 response_buffer = StringIO()
150 135
151 136 curl = self._curl
152 137 curl.setopt(curl.URL, url)
153 138 curl.setopt(curl.POST, True)
154 139 curl.setopt(curl.POSTFIELDS, data)
155 140 curl.setopt(curl.FOLLOWLOCATION, allow_redirects)
156 141 curl.setopt(curl.WRITEDATA, response_buffer)
157 142 curl.perform()
158 143
159 144 status_code = curl.getinfo(pycurl.HTTP_CODE)
160 145
161 146 return CurlResponse(response_buffer, status_code)
162 147
163 148
164 149 class CurlResponse(object):
165 150 """
166 151 The response of a request, modeled after the requests API.
167 152
168 153 This class provides a subset of the response interface known from the
169 154 library `requests`. It is intentionally kept similar, so that we can use
170 155 `requests` as a drop in replacement for benchmarking purposes.
171 156 """
172 157
173 158 def __init__(self, response_buffer, status_code):
174 159 self._response_buffer = response_buffer
175 160 self._status_code = status_code
176 161
177 162 @property
178 163 def content(self):
179 return self._response_buffer.getvalue()
164 try:
165 return self._response_buffer.getvalue()
166 finally:
167 self._response_buffer.close()
180 168
181 169 @property
182 170 def status_code(self):
183 171 return self._status_code
184 172
173 def iter_content(self, chunk_size):
174 self._response_buffer.seek(0)
175 while 1:
176 chunk = self._response_buffer.read(chunk_size)
177 if not chunk:
178 break
179 yield chunk
180
185 181
186 182 def _create_http_rpc_session():
187 183 session = CurlSession()
188 184 return session
@@ -1,1881 +1,1890 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Base module for all VCS systems
23 23 """
24 24 import os
25 25 import re
26 26 import time
27 27 import shutil
28 28 import datetime
29 29 import fnmatch
30 30 import itertools
31 31 import logging
32 32 import collections
33 33 import warnings
34 34
35 35 from zope.cachedescriptors.property import Lazy as LazyProperty
36 36
37 37 from pyramid import compat
38 38
39 39 import rhodecode
40 40 from rhodecode.translation import lazy_ugettext
41 41 from rhodecode.lib.utils2 import safe_str, safe_unicode, CachedProperty
42 42 from rhodecode.lib.vcs import connection
43 43 from rhodecode.lib.vcs.utils import author_name, author_email
44 44 from rhodecode.lib.vcs.conf import settings
45 45 from rhodecode.lib.vcs.exceptions import (
46 46 CommitError, EmptyRepositoryError, NodeAlreadyAddedError,
47 47 NodeAlreadyChangedError, NodeAlreadyExistsError, NodeAlreadyRemovedError,
48 48 NodeDoesNotExistError, NodeNotChangedError, VCSError,
49 49 ImproperArchiveTypeError, BranchDoesNotExistError, CommitDoesNotExistError,
50 50 RepositoryError)
51 51
52 52
53 53 log = logging.getLogger(__name__)
54 54
55 55
56 56 FILEMODE_DEFAULT = 0o100644
57 57 FILEMODE_EXECUTABLE = 0o100755
58 58 EMPTY_COMMIT_ID = '0' * 40
59 59
60 60 Reference = collections.namedtuple('Reference', ('type', 'name', 'commit_id'))
61 61
62 62
63 63 class MergeFailureReason(object):
64 64 """
65 65 Enumeration with all the reasons why the server side merge could fail.
66 66
67 67 DO NOT change the number of the reasons, as they may be stored in the
68 68 database.
69 69
70 70 Changing the name of a reason is acceptable and encouraged to deprecate old
71 71 reasons.
72 72 """
73 73
74 74 # Everything went well.
75 75 NONE = 0
76 76
77 77 # An unexpected exception was raised. Check the logs for more details.
78 78 UNKNOWN = 1
79 79
80 80 # The merge was not successful, there are conflicts.
81 81 MERGE_FAILED = 2
82 82
83 83 # The merge succeeded but we could not push it to the target repository.
84 84 PUSH_FAILED = 3
85 85
86 86 # The specified target is not a head in the target repository.
87 87 TARGET_IS_NOT_HEAD = 4
88 88
89 89 # The source repository contains more branches than the target. Pushing
90 90 # the merge will create additional branches in the target.
91 91 HG_SOURCE_HAS_MORE_BRANCHES = 5
92 92
93 93 # The target reference has multiple heads. That does not allow to correctly
94 94 # identify the target location. This could only happen for mercurial
95 95 # branches.
96 96 HG_TARGET_HAS_MULTIPLE_HEADS = 6
97 97
98 98 # The target repository is locked
99 99 TARGET_IS_LOCKED = 7
100 100
101 101 # Deprecated, use MISSING_TARGET_REF or MISSING_SOURCE_REF instead.
102 102 # A involved commit could not be found.
103 103 _DEPRECATED_MISSING_COMMIT = 8
104 104
105 105 # The target repo reference is missing.
106 106 MISSING_TARGET_REF = 9
107 107
108 108 # The source repo reference is missing.
109 109 MISSING_SOURCE_REF = 10
110 110
111 111 # The merge was not successful, there are conflicts related to sub
112 112 # repositories.
113 113 SUBREPO_MERGE_FAILED = 11
114 114
115 115
116 116 class UpdateFailureReason(object):
117 117 """
118 118 Enumeration with all the reasons why the pull request update could fail.
119 119
120 120 DO NOT change the number of the reasons, as they may be stored in the
121 121 database.
122 122
123 123 Changing the name of a reason is acceptable and encouraged to deprecate old
124 124 reasons.
125 125 """
126 126
127 127 # Everything went well.
128 128 NONE = 0
129 129
130 130 # An unexpected exception was raised. Check the logs for more details.
131 131 UNKNOWN = 1
132 132
133 133 # The pull request is up to date.
134 134 NO_CHANGE = 2
135 135
136 136 # The pull request has a reference type that is not supported for update.
137 137 WRONG_REF_TYPE = 3
138 138
139 139 # Update failed because the target reference is missing.
140 140 MISSING_TARGET_REF = 4
141 141
142 142 # Update failed because the source reference is missing.
143 143 MISSING_SOURCE_REF = 5
144 144
145 145
146 146 class MergeResponse(object):
147 147
148 148 # uses .format(**metadata) for variables
149 149 MERGE_STATUS_MESSAGES = {
150 150 MergeFailureReason.NONE: lazy_ugettext(
151 151 u'This pull request can be automatically merged.'),
152 152 MergeFailureReason.UNKNOWN: lazy_ugettext(
153 153 u'This pull request cannot be merged because of an unhandled exception. '
154 154 u'{exception}'),
155 155 MergeFailureReason.MERGE_FAILED: lazy_ugettext(
156 156 u'This pull request cannot be merged because of merge conflicts.'),
157 157 MergeFailureReason.PUSH_FAILED: lazy_ugettext(
158 158 u'This pull request could not be merged because push to '
159 159 u'target:`{target}@{merge_commit}` failed.'),
160 160 MergeFailureReason.TARGET_IS_NOT_HEAD: lazy_ugettext(
161 161 u'This pull request cannot be merged because the target '
162 162 u'`{target_ref.name}` is not a head.'),
163 163 MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES: lazy_ugettext(
164 164 u'This pull request cannot be merged because the source contains '
165 165 u'more branches than the target.'),
166 166 MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext(
167 167 u'This pull request cannot be merged because the target `{target_ref.name}` '
168 168 u'has multiple heads: `{heads}`.'),
169 169 MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext(
170 170 u'This pull request cannot be merged because the target repository is '
171 171 u'locked by {locked_by}.'),
172 172
173 173 MergeFailureReason.MISSING_TARGET_REF: lazy_ugettext(
174 174 u'This pull request cannot be merged because the target '
175 175 u'reference `{target_ref.name}` is missing.'),
176 176 MergeFailureReason.MISSING_SOURCE_REF: lazy_ugettext(
177 177 u'This pull request cannot be merged because the source '
178 178 u'reference `{source_ref.name}` is missing.'),
179 179 MergeFailureReason.SUBREPO_MERGE_FAILED: lazy_ugettext(
180 180 u'This pull request cannot be merged because of conflicts related '
181 181 u'to sub repositories.'),
182 182
183 183 # Deprecations
184 184 MergeFailureReason._DEPRECATED_MISSING_COMMIT: lazy_ugettext(
185 185 u'This pull request cannot be merged because the target or the '
186 186 u'source reference is missing.'),
187 187
188 188 }
189 189
190 190 def __init__(self, possible, executed, merge_ref, failure_reason, metadata=None):
191 191 self.possible = possible
192 192 self.executed = executed
193 193 self.merge_ref = merge_ref
194 194 self.failure_reason = failure_reason
195 195 self.metadata = metadata or {}
196 196
197 197 def __repr__(self):
198 198 return '<MergeResponse:{} {}>'.format(self.label, self.failure_reason)
199 199
200 200 def __eq__(self, other):
201 201 same_instance = isinstance(other, self.__class__)
202 202 return same_instance \
203 203 and self.possible == other.possible \
204 204 and self.executed == other.executed \
205 205 and self.failure_reason == other.failure_reason
206 206
207 207 @property
208 208 def label(self):
209 209 label_dict = dict((v, k) for k, v in MergeFailureReason.__dict__.items() if
210 210 not k.startswith('_'))
211 211 return label_dict.get(self.failure_reason)
212 212
213 213 @property
214 214 def merge_status_message(self):
215 215 """
216 216 Return a human friendly error message for the given merge status code.
217 217 """
218 218 msg = safe_unicode(self.MERGE_STATUS_MESSAGES[self.failure_reason])
219 219 try:
220 220 return msg.format(**self.metadata)
221 221 except Exception:
222 222 log.exception('Failed to format %s message', self)
223 223 return msg
224 224
225 225 def asdict(self):
226 226 data = {}
227 227 for k in ['possible', 'executed', 'merge_ref', 'failure_reason',
228 228 'merge_status_message']:
229 229 data[k] = getattr(self, k)
230 230 return data
231 231
232 232
233 233 class BaseRepository(object):
234 234 """
235 235 Base Repository for final backends
236 236
237 237 .. attribute:: DEFAULT_BRANCH_NAME
238 238
239 239 name of default branch (i.e. "trunk" for svn, "master" for git etc.
240 240
241 241 .. attribute:: commit_ids
242 242
243 243 list of all available commit ids, in ascending order
244 244
245 245 .. attribute:: path
246 246
247 247 absolute path to the repository
248 248
249 249 .. attribute:: bookmarks
250 250
251 251 Mapping from name to :term:`Commit ID` of the bookmark. Empty in case
252 252 there are no bookmarks or the backend implementation does not support
253 253 bookmarks.
254 254
255 255 .. attribute:: tags
256 256
257 257 Mapping from name to :term:`Commit ID` of the tag.
258 258
259 259 """
260 260
261 261 DEFAULT_BRANCH_NAME = None
262 262 DEFAULT_CONTACT = u"Unknown"
263 263 DEFAULT_DESCRIPTION = u"unknown"
264 264 EMPTY_COMMIT_ID = '0' * 40
265 265
266 266 path = None
267 267
268 268 _is_empty = None
269 269 _commit_ids = {}
270 270
271 271 def __init__(self, repo_path, config=None, create=False, **kwargs):
272 272 """
273 273 Initializes repository. Raises RepositoryError if repository could
274 274 not be find at the given ``repo_path`` or directory at ``repo_path``
275 275 exists and ``create`` is set to True.
276 276
277 277 :param repo_path: local path of the repository
278 278 :param config: repository configuration
279 279 :param create=False: if set to True, would try to create repository.
280 280 :param src_url=None: if set, should be proper url from which repository
281 281 would be cloned; requires ``create`` parameter to be set to True -
282 282 raises RepositoryError if src_url is set and create evaluates to
283 283 False
284 284 """
285 285 raise NotImplementedError
286 286
287 287 def __repr__(self):
288 288 return '<%s at %s>' % (self.__class__.__name__, self.path)
289 289
290 290 def __len__(self):
291 291 return self.count()
292 292
293 293 def __eq__(self, other):
294 294 same_instance = isinstance(other, self.__class__)
295 295 return same_instance and other.path == self.path
296 296
297 297 def __ne__(self, other):
298 298 return not self.__eq__(other)
299 299
300 300 def get_create_shadow_cache_pr_path(self, db_repo):
301 301 path = db_repo.cached_diffs_dir
302 302 if not os.path.exists(path):
303 303 os.makedirs(path, 0o755)
304 304 return path
305 305
306 306 @classmethod
307 307 def get_default_config(cls, default=None):
308 308 config = Config()
309 309 if default and isinstance(default, list):
310 310 for section, key, val in default:
311 311 config.set(section, key, val)
312 312 return config
313 313
314 314 @LazyProperty
315 315 def _remote(self):
316 316 raise NotImplementedError
317 317
318 318 def _heads(self, branch=None):
319 319 return []
320 320
321 321 @LazyProperty
322 322 def EMPTY_COMMIT(self):
323 323 return EmptyCommit(self.EMPTY_COMMIT_ID)
324 324
325 325 @LazyProperty
326 326 def alias(self):
327 327 for k, v in settings.BACKENDS.items():
328 328 if v.split('.')[-1] == str(self.__class__.__name__):
329 329 return k
330 330
331 331 @LazyProperty
332 332 def name(self):
333 333 return safe_unicode(os.path.basename(self.path))
334 334
335 335 @LazyProperty
336 336 def description(self):
337 337 raise NotImplementedError
338 338
339 339 def refs(self):
340 340 """
341 341 returns a `dict` with branches, bookmarks, tags, and closed_branches
342 342 for this repository
343 343 """
344 344 return dict(
345 345 branches=self.branches,
346 346 branches_closed=self.branches_closed,
347 347 tags=self.tags,
348 348 bookmarks=self.bookmarks
349 349 )
350 350
351 351 @LazyProperty
352 352 def branches(self):
353 353 """
354 354 A `dict` which maps branch names to commit ids.
355 355 """
356 356 raise NotImplementedError
357 357
358 358 @LazyProperty
359 359 def branches_closed(self):
360 360 """
361 361 A `dict` which maps tags names to commit ids.
362 362 """
363 363 raise NotImplementedError
364 364
365 365 @LazyProperty
366 366 def bookmarks(self):
367 367 """
368 368 A `dict` which maps tags names to commit ids.
369 369 """
370 370 raise NotImplementedError
371 371
372 372 @LazyProperty
373 373 def tags(self):
374 374 """
375 375 A `dict` which maps tags names to commit ids.
376 376 """
377 377 raise NotImplementedError
378 378
379 379 @LazyProperty
380 380 def size(self):
381 381 """
382 382 Returns combined size in bytes for all repository files
383 383 """
384 384 tip = self.get_commit()
385 385 return tip.size
386 386
387 387 def size_at_commit(self, commit_id):
388 388 commit = self.get_commit(commit_id)
389 389 return commit.size
390 390
391 391 def _check_for_empty(self):
392 392 no_commits = len(self._commit_ids) == 0
393 393 if no_commits:
394 394 # check on remote to be sure
395 395 return self._remote.is_empty()
396 396 else:
397 397 return False
398 398
399 399 def is_empty(self):
400 400 if rhodecode.is_test:
401 401 return self._check_for_empty()
402 402
403 403 if self._is_empty is None:
404 404 # cache empty for production, but not tests
405 405 self._is_empty = self._check_for_empty()
406 406
407 407 return self._is_empty
408 408
409 409 @staticmethod
410 410 def check_url(url, config):
411 411 """
412 412 Function will check given url and try to verify if it's a valid
413 413 link.
414 414 """
415 415 raise NotImplementedError
416 416
417 417 @staticmethod
418 418 def is_valid_repository(path):
419 419 """
420 420 Check if given `path` contains a valid repository of this backend
421 421 """
422 422 raise NotImplementedError
423 423
424 424 # ==========================================================================
425 425 # COMMITS
426 426 # ==========================================================================
427 427
428 428 @CachedProperty
429 429 def commit_ids(self):
430 430 raise NotImplementedError
431 431
432 432 def append_commit_id(self, commit_id):
433 433 if commit_id not in self.commit_ids:
434 434 self._rebuild_cache(self.commit_ids + [commit_id])
435 435
436 436 # clear cache
437 437 self._invalidate_prop_cache('commit_ids')
438 438 self._is_empty = False
439 439
440 440 def get_commit(self, commit_id=None, commit_idx=None, pre_load=None, translate_tag=None):
441 441 """
442 442 Returns instance of `BaseCommit` class. If `commit_id` and `commit_idx`
443 443 are both None, most recent commit is returned.
444 444
445 445 :param pre_load: Optional. List of commit attributes to load.
446 446
447 447 :raises ``EmptyRepositoryError``: if there are no commits
448 448 """
449 449 raise NotImplementedError
450 450
451 451 def __iter__(self):
452 452 for commit_id in self.commit_ids:
453 453 yield self.get_commit(commit_id=commit_id)
454 454
455 455 def get_commits(
456 456 self, start_id=None, end_id=None, start_date=None, end_date=None,
457 457 branch_name=None, show_hidden=False, pre_load=None, translate_tags=None):
458 458 """
459 459 Returns iterator of `BaseCommit` objects from start to end
460 460 not inclusive. This should behave just like a list, ie. end is not
461 461 inclusive.
462 462
463 463 :param start_id: None or str, must be a valid commit id
464 464 :param end_id: None or str, must be a valid commit id
465 465 :param start_date:
466 466 :param end_date:
467 467 :param branch_name:
468 468 :param show_hidden:
469 469 :param pre_load:
470 470 :param translate_tags:
471 471 """
472 472 raise NotImplementedError
473 473
474 474 def __getitem__(self, key):
475 475 """
476 476 Allows index based access to the commit objects of this repository.
477 477 """
478 478 pre_load = ["author", "branch", "date", "message", "parents"]
479 479 if isinstance(key, slice):
480 480 return self._get_range(key, pre_load)
481 481 return self.get_commit(commit_idx=key, pre_load=pre_load)
482 482
483 483 def _get_range(self, slice_obj, pre_load):
484 484 for commit_id in self.commit_ids.__getitem__(slice_obj):
485 485 yield self.get_commit(commit_id=commit_id, pre_load=pre_load)
486 486
487 487 def count(self):
488 488 return len(self.commit_ids)
489 489
490 490 def tag(self, name, user, commit_id=None, message=None, date=None, **opts):
491 491 """
492 492 Creates and returns a tag for the given ``commit_id``.
493 493
494 494 :param name: name for new tag
495 495 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
496 496 :param commit_id: commit id for which new tag would be created
497 497 :param message: message of the tag's commit
498 498 :param date: date of tag's commit
499 499
500 500 :raises TagAlreadyExistError: if tag with same name already exists
501 501 """
502 502 raise NotImplementedError
503 503
504 504 def remove_tag(self, name, user, message=None, date=None):
505 505 """
506 506 Removes tag with the given ``name``.
507 507
508 508 :param name: name of the tag to be removed
509 509 :param user: full username, i.e.: "Joe Doe <joe.doe@example.com>"
510 510 :param message: message of the tag's removal commit
511 511 :param date: date of tag's removal commit
512 512
513 513 :raises TagDoesNotExistError: if tag with given name does not exists
514 514 """
515 515 raise NotImplementedError
516 516
517 517 def get_diff(
518 518 self, commit1, commit2, path=None, ignore_whitespace=False,
519 519 context=3, path1=None):
520 520 """
521 521 Returns (git like) *diff*, as plain text. Shows changes introduced by
522 522 `commit2` since `commit1`.
523 523
524 524 :param commit1: Entry point from which diff is shown. Can be
525 525 ``self.EMPTY_COMMIT`` - in this case, patch showing all
526 526 the changes since empty state of the repository until `commit2`
527 527 :param commit2: Until which commit changes should be shown.
528 528 :param path: Can be set to a path of a file to create a diff of that
529 529 file. If `path1` is also set, this value is only associated to
530 530 `commit2`.
531 531 :param ignore_whitespace: If set to ``True``, would not show whitespace
532 532 changes. Defaults to ``False``.
533 533 :param context: How many lines before/after changed lines should be
534 534 shown. Defaults to ``3``.
535 535 :param path1: Can be set to a path to associate with `commit1`. This
536 536 parameter works only for backends which support diff generation for
537 537 different paths. Other backends will raise a `ValueError` if `path1`
538 538 is set and has a different value than `path`.
539 539 :param file_path: filter this diff by given path pattern
540 540 """
541 541 raise NotImplementedError
542 542
543 543 def strip(self, commit_id, branch=None):
544 544 """
545 545 Strip given commit_id from the repository
546 546 """
547 547 raise NotImplementedError
548 548
549 549 def get_common_ancestor(self, commit_id1, commit_id2, repo2):
550 550 """
551 551 Return a latest common ancestor commit if one exists for this repo
552 552 `commit_id1` vs `commit_id2` from `repo2`.
553 553
554 554 :param commit_id1: Commit it from this repository to use as a
555 555 target for the comparison.
556 556 :param commit_id2: Source commit id to use for comparison.
557 557 :param repo2: Source repository to use for comparison.
558 558 """
559 559 raise NotImplementedError
560 560
561 561 def compare(self, commit_id1, commit_id2, repo2, merge, pre_load=None):
562 562 """
563 563 Compare this repository's revision `commit_id1` with `commit_id2`.
564 564
565 565 Returns a tuple(commits, ancestor) that would be merged from
566 566 `commit_id2`. Doing a normal compare (``merge=False``), ``None``
567 567 will be returned as ancestor.
568 568
569 569 :param commit_id1: Commit it from this repository to use as a
570 570 target for the comparison.
571 571 :param commit_id2: Source commit id to use for comparison.
572 572 :param repo2: Source repository to use for comparison.
573 573 :param merge: If set to ``True`` will do a merge compare which also
574 574 returns the common ancestor.
575 575 :param pre_load: Optional. List of commit attributes to load.
576 576 """
577 577 raise NotImplementedError
578 578
579 579 def merge(self, repo_id, workspace_id, target_ref, source_repo, source_ref,
580 580 user_name='', user_email='', message='', dry_run=False,
581 581 use_rebase=False, close_branch=False):
582 582 """
583 583 Merge the revisions specified in `source_ref` from `source_repo`
584 584 onto the `target_ref` of this repository.
585 585
586 586 `source_ref` and `target_ref` are named tupls with the following
587 587 fields `type`, `name` and `commit_id`.
588 588
589 589 Returns a MergeResponse named tuple with the following fields
590 590 'possible', 'executed', 'source_commit', 'target_commit',
591 591 'merge_commit'.
592 592
593 593 :param repo_id: `repo_id` target repo id.
594 594 :param workspace_id: `workspace_id` unique identifier.
595 595 :param target_ref: `target_ref` points to the commit on top of which
596 596 the `source_ref` should be merged.
597 597 :param source_repo: The repository that contains the commits to be
598 598 merged.
599 599 :param source_ref: `source_ref` points to the topmost commit from
600 600 the `source_repo` which should be merged.
601 601 :param user_name: Merge commit `user_name`.
602 602 :param user_email: Merge commit `user_email`.
603 603 :param message: Merge commit `message`.
604 604 :param dry_run: If `True` the merge will not take place.
605 605 :param use_rebase: If `True` commits from the source will be rebased
606 606 on top of the target instead of being merged.
607 607 :param close_branch: If `True` branch will be close before merging it
608 608 """
609 609 if dry_run:
610 610 message = message or settings.MERGE_DRY_RUN_MESSAGE
611 611 user_email = user_email or settings.MERGE_DRY_RUN_EMAIL
612 612 user_name = user_name or settings.MERGE_DRY_RUN_USER
613 613 else:
614 614 if not user_name:
615 615 raise ValueError('user_name cannot be empty')
616 616 if not user_email:
617 617 raise ValueError('user_email cannot be empty')
618 618 if not message:
619 619 raise ValueError('message cannot be empty')
620 620
621 621 try:
622 622 return self._merge_repo(
623 623 repo_id, workspace_id, target_ref, source_repo,
624 624 source_ref, message, user_name, user_email, dry_run=dry_run,
625 625 use_rebase=use_rebase, close_branch=close_branch)
626 626 except RepositoryError as exc:
627 627 log.exception('Unexpected failure when running merge, dry-run=%s', dry_run)
628 628 return MergeResponse(
629 629 False, False, None, MergeFailureReason.UNKNOWN,
630 630 metadata={'exception': str(exc)})
631 631
632 632 def _merge_repo(self, repo_id, workspace_id, target_ref,
633 633 source_repo, source_ref, merge_message,
634 634 merger_name, merger_email, dry_run=False,
635 635 use_rebase=False, close_branch=False):
636 636 """Internal implementation of merge."""
637 637 raise NotImplementedError
638 638
639 639 def _maybe_prepare_merge_workspace(
640 640 self, repo_id, workspace_id, target_ref, source_ref):
641 641 """
642 642 Create the merge workspace.
643 643
644 644 :param workspace_id: `workspace_id` unique identifier.
645 645 """
646 646 raise NotImplementedError
647 647
648 648 def _get_legacy_shadow_repository_path(self, workspace_id):
649 649 """
650 650 Legacy version that was used before. We still need it for
651 651 backward compat
652 652 """
653 653 return os.path.join(
654 654 os.path.dirname(self.path),
655 655 '.__shadow_%s_%s' % (os.path.basename(self.path), workspace_id))
656 656
657 657 def _get_shadow_repository_path(self, repo_id, workspace_id):
658 658 # The name of the shadow repository must start with '.', so it is
659 659 # skipped by 'rhodecode.lib.utils.get_filesystem_repos'.
660 660 legacy_repository_path = self._get_legacy_shadow_repository_path(workspace_id)
661 661 if os.path.exists(legacy_repository_path):
662 662 return legacy_repository_path
663 663 else:
664 664 return os.path.join(
665 665 os.path.dirname(self.path),
666 666 '.__shadow_repo_%s_%s' % (repo_id, workspace_id))
667 667
668 668 def cleanup_merge_workspace(self, repo_id, workspace_id):
669 669 """
670 670 Remove merge workspace.
671 671
672 672 This function MUST not fail in case there is no workspace associated to
673 673 the given `workspace_id`.
674 674
675 675 :param workspace_id: `workspace_id` unique identifier.
676 676 """
677 677 shadow_repository_path = self._get_shadow_repository_path(repo_id, workspace_id)
678 678 shadow_repository_path_del = '{}.{}.delete'.format(
679 679 shadow_repository_path, time.time())
680 680
681 681 # move the shadow repo, so it never conflicts with the one used.
682 682 # we use this method because shutil.rmtree had some edge case problems
683 683 # removing symlinked repositories
684 684 if not os.path.isdir(shadow_repository_path):
685 685 return
686 686
687 687 shutil.move(shadow_repository_path, shadow_repository_path_del)
688 688 try:
689 689 shutil.rmtree(shadow_repository_path_del, ignore_errors=False)
690 690 except Exception:
691 691 log.exception('Failed to gracefully remove shadow repo under %s',
692 692 shadow_repository_path_del)
693 693 shutil.rmtree(shadow_repository_path_del, ignore_errors=True)
694 694
695 695 # ========== #
696 696 # COMMIT API #
697 697 # ========== #
698 698
699 699 @LazyProperty
700 700 def in_memory_commit(self):
701 701 """
702 702 Returns :class:`InMemoryCommit` object for this repository.
703 703 """
704 704 raise NotImplementedError
705 705
706 706 # ======================== #
707 707 # UTILITIES FOR SUBCLASSES #
708 708 # ======================== #
709 709
710 710 def _validate_diff_commits(self, commit1, commit2):
711 711 """
712 712 Validates that the given commits are related to this repository.
713 713
714 714 Intended as a utility for sub classes to have a consistent validation
715 715 of input parameters in methods like :meth:`get_diff`.
716 716 """
717 717 self._validate_commit(commit1)
718 718 self._validate_commit(commit2)
719 719 if (isinstance(commit1, EmptyCommit) and
720 720 isinstance(commit2, EmptyCommit)):
721 721 raise ValueError("Cannot compare two empty commits")
722 722
723 723 def _validate_commit(self, commit):
724 724 if not isinstance(commit, BaseCommit):
725 725 raise TypeError(
726 726 "%s is not of type BaseCommit" % repr(commit))
727 727 if commit.repository != self and not isinstance(commit, EmptyCommit):
728 728 raise ValueError(
729 729 "Commit %s must be a valid commit from this repository %s, "
730 730 "related to this repository instead %s." %
731 731 (commit, self, commit.repository))
732 732
733 733 def _validate_commit_id(self, commit_id):
734 734 if not isinstance(commit_id, compat.string_types):
735 735 raise TypeError("commit_id must be a string value")
736 736
737 737 def _validate_commit_idx(self, commit_idx):
738 738 if not isinstance(commit_idx, (int, long)):
739 739 raise TypeError("commit_idx must be a numeric value")
740 740
741 741 def _validate_branch_name(self, branch_name):
742 742 if branch_name and branch_name not in self.branches_all:
743 743 msg = ("Branch %s not found in %s" % (branch_name, self))
744 744 raise BranchDoesNotExistError(msg)
745 745
746 746 #
747 747 # Supporting deprecated API parts
748 748 # TODO: johbo: consider to move this into a mixin
749 749 #
750 750
751 751 @property
752 752 def EMPTY_CHANGESET(self):
753 753 warnings.warn(
754 754 "Use EMPTY_COMMIT or EMPTY_COMMIT_ID instead", DeprecationWarning)
755 755 return self.EMPTY_COMMIT_ID
756 756
757 757 @property
758 758 def revisions(self):
759 759 warnings.warn("Use commits attribute instead", DeprecationWarning)
760 760 return self.commit_ids
761 761
762 762 @revisions.setter
763 763 def revisions(self, value):
764 764 warnings.warn("Use commits attribute instead", DeprecationWarning)
765 765 self.commit_ids = value
766 766
767 767 def get_changeset(self, revision=None, pre_load=None):
768 768 warnings.warn("Use get_commit instead", DeprecationWarning)
769 769 commit_id = None
770 770 commit_idx = None
771 771 if isinstance(revision, compat.string_types):
772 772 commit_id = revision
773 773 else:
774 774 commit_idx = revision
775 775 return self.get_commit(
776 776 commit_id=commit_id, commit_idx=commit_idx, pre_load=pre_load)
777 777
778 778 def get_changesets(
779 779 self, start=None, end=None, start_date=None, end_date=None,
780 780 branch_name=None, pre_load=None):
781 781 warnings.warn("Use get_commits instead", DeprecationWarning)
782 782 start_id = self._revision_to_commit(start)
783 783 end_id = self._revision_to_commit(end)
784 784 return self.get_commits(
785 785 start_id=start_id, end_id=end_id, start_date=start_date,
786 786 end_date=end_date, branch_name=branch_name, pre_load=pre_load)
787 787
788 788 def _revision_to_commit(self, revision):
789 789 """
790 790 Translates a revision to a commit_id
791 791
792 792 Helps to support the old changeset based API which allows to use
793 793 commit ids and commit indices interchangeable.
794 794 """
795 795 if revision is None:
796 796 return revision
797 797
798 798 if isinstance(revision, compat.string_types):
799 799 commit_id = revision
800 800 else:
801 801 commit_id = self.commit_ids[revision]
802 802 return commit_id
803 803
804 804 @property
805 805 def in_memory_changeset(self):
806 806 warnings.warn("Use in_memory_commit instead", DeprecationWarning)
807 807 return self.in_memory_commit
808 808
809 809 def get_path_permissions(self, username):
810 810 """
811 811 Returns a path permission checker or None if not supported
812 812
813 813 :param username: session user name
814 814 :return: an instance of BasePathPermissionChecker or None
815 815 """
816 816 return None
817 817
818 818 def install_hooks(self, force=False):
819 819 return self._remote.install_hooks(force)
820 820
821 821 def get_hooks_info(self):
822 822 return self._remote.get_hooks_info()
823 823
824 824
825 825 class BaseCommit(object):
826 826 """
827 827 Each backend should implement it's commit representation.
828 828
829 829 **Attributes**
830 830
831 831 ``repository``
832 832 repository object within which commit exists
833 833
834 834 ``id``
835 835 The commit id, may be ``raw_id`` or i.e. for mercurial's tip
836 836 just ``tip``.
837 837
838 838 ``raw_id``
839 839 raw commit representation (i.e. full 40 length sha for git
840 840 backend)
841 841
842 842 ``short_id``
843 843 shortened (if apply) version of ``raw_id``; it would be simple
844 844 shortcut for ``raw_id[:12]`` for git/mercurial backends or same
845 845 as ``raw_id`` for subversion
846 846
847 847 ``idx``
848 848 commit index
849 849
850 850 ``files``
851 851 list of ``FileNode`` (``Node`` with NodeKind.FILE) objects
852 852
853 853 ``dirs``
854 854 list of ``DirNode`` (``Node`` with NodeKind.DIR) objects
855 855
856 856 ``nodes``
857 857 combined list of ``Node`` objects
858 858
859 859 ``author``
860 860 author of the commit, as unicode
861 861
862 862 ``message``
863 863 message of the commit, as unicode
864 864
865 865 ``parents``
866 866 list of parent commits
867 867
868 868 """
869 869
870 870 branch = None
871 871 """
872 872 Depending on the backend this should be set to the branch name of the
873 873 commit. Backends not supporting branches on commits should leave this
874 874 value as ``None``.
875 875 """
876 876
877 877 _ARCHIVE_PREFIX_TEMPLATE = b'{repo_name}-{short_id}'
878 878 """
879 879 This template is used to generate a default prefix for repository archives
880 880 if no prefix has been specified.
881 881 """
882 882
883 883 def __str__(self):
884 884 return '<%s at %s:%s>' % (
885 885 self.__class__.__name__, self.idx, self.short_id)
886 886
887 887 def __repr__(self):
888 888 return self.__str__()
889 889
890 890 def __unicode__(self):
891 891 return u'%s:%s' % (self.idx, self.short_id)
892 892
893 893 def __eq__(self, other):
894 894 same_instance = isinstance(other, self.__class__)
895 895 return same_instance and self.raw_id == other.raw_id
896 896
897 897 def __json__(self):
898 898 parents = []
899 899 try:
900 900 for parent in self.parents:
901 901 parents.append({'raw_id': parent.raw_id})
902 902 except NotImplementedError:
903 903 # empty commit doesn't have parents implemented
904 904 pass
905 905
906 906 return {
907 907 'short_id': self.short_id,
908 908 'raw_id': self.raw_id,
909 909 'revision': self.idx,
910 910 'message': self.message,
911 911 'date': self.date,
912 912 'author': self.author,
913 913 'parents': parents,
914 914 'branch': self.branch
915 915 }
916 916
917 917 def __getstate__(self):
918 918 d = self.__dict__.copy()
919 919 d.pop('_remote', None)
920 920 d.pop('repository', None)
921 921 return d
922 922
923 923 def _get_refs(self):
924 924 return {
925 925 'branches': [self.branch] if self.branch else [],
926 926 'bookmarks': getattr(self, 'bookmarks', []),
927 927 'tags': self.tags
928 928 }
929 929
930 930 @LazyProperty
931 931 def last(self):
932 932 """
933 933 ``True`` if this is last commit in repository, ``False``
934 934 otherwise; trying to access this attribute while there is no
935 935 commits would raise `EmptyRepositoryError`
936 936 """
937 937 if self.repository is None:
938 938 raise CommitError("Cannot check if it's most recent commit")
939 939 return self.raw_id == self.repository.commit_ids[-1]
940 940
941 941 @LazyProperty
942 942 def parents(self):
943 943 """
944 944 Returns list of parent commits.
945 945 """
946 946 raise NotImplementedError
947 947
948 948 @LazyProperty
949 949 def first_parent(self):
950 950 """
951 951 Returns list of parent commits.
952 952 """
953 953 return self.parents[0] if self.parents else EmptyCommit()
954 954
955 955 @property
956 956 def merge(self):
957 957 """
958 958 Returns boolean if commit is a merge.
959 959 """
960 960 return len(self.parents) > 1
961 961
962 962 @LazyProperty
963 963 def children(self):
964 964 """
965 965 Returns list of child commits.
966 966 """
967 967 raise NotImplementedError
968 968
969 969 @LazyProperty
970 970 def id(self):
971 971 """
972 972 Returns string identifying this commit.
973 973 """
974 974 raise NotImplementedError
975 975
976 976 @LazyProperty
977 977 def raw_id(self):
978 978 """
979 979 Returns raw string identifying this commit.
980 980 """
981 981 raise NotImplementedError
982 982
983 983 @LazyProperty
984 984 def short_id(self):
985 985 """
986 986 Returns shortened version of ``raw_id`` attribute, as string,
987 987 identifying this commit, useful for presentation to users.
988 988 """
989 989 raise NotImplementedError
990 990
991 991 @LazyProperty
992 992 def idx(self):
993 993 """
994 994 Returns integer identifying this commit.
995 995 """
996 996 raise NotImplementedError
997 997
998 998 @LazyProperty
999 999 def committer(self):
1000 1000 """
1001 1001 Returns committer for this commit
1002 1002 """
1003 1003 raise NotImplementedError
1004 1004
1005 1005 @LazyProperty
1006 1006 def committer_name(self):
1007 1007 """
1008 1008 Returns committer name for this commit
1009 1009 """
1010 1010
1011 1011 return author_name(self.committer)
1012 1012
1013 1013 @LazyProperty
1014 1014 def committer_email(self):
1015 1015 """
1016 1016 Returns committer email address for this commit
1017 1017 """
1018 1018
1019 1019 return author_email(self.committer)
1020 1020
1021 1021 @LazyProperty
1022 1022 def author(self):
1023 1023 """
1024 1024 Returns author for this commit
1025 1025 """
1026 1026
1027 1027 raise NotImplementedError
1028 1028
1029 1029 @LazyProperty
1030 1030 def author_name(self):
1031 1031 """
1032 1032 Returns author name for this commit
1033 1033 """
1034 1034
1035 1035 return author_name(self.author)
1036 1036
1037 1037 @LazyProperty
1038 1038 def author_email(self):
1039 1039 """
1040 1040 Returns author email address for this commit
1041 1041 """
1042 1042
1043 1043 return author_email(self.author)
1044 1044
1045 1045 def get_file_mode(self, path):
1046 1046 """
1047 1047 Returns stat mode of the file at `path`.
1048 1048 """
1049 1049 raise NotImplementedError
1050 1050
1051 1051 def is_link(self, path):
1052 1052 """
1053 1053 Returns ``True`` if given `path` is a symlink
1054 1054 """
1055 1055 raise NotImplementedError
1056 1056
1057 1057 def get_file_content(self, path):
1058 1058 """
1059 1059 Returns content of the file at the given `path`.
1060 1060 """
1061 1061 raise NotImplementedError
1062 1062
1063 def get_file_content_streamed(self, path):
1064 """
1065 returns a streaming response from vcsserver with file content
1066 """
1067 raise NotImplementedError
1068
1063 1069 def get_file_size(self, path):
1064 1070 """
1065 1071 Returns size of the file at the given `path`.
1066 1072 """
1067 1073 raise NotImplementedError
1068 1074
1069 1075 def get_path_commit(self, path, pre_load=None):
1070 1076 """
1071 1077 Returns last commit of the file at the given `path`.
1072 1078
1073 1079 :param pre_load: Optional. List of commit attributes to load.
1074 1080 """
1075 1081 commits = self.get_path_history(path, limit=1, pre_load=pre_load)
1076 1082 if not commits:
1077 1083 raise RepositoryError(
1078 1084 'Failed to fetch history for path {}. '
1079 1085 'Please check if such path exists in your repository'.format(
1080 1086 path))
1081 1087 return commits[0]
1082 1088
1083 1089 def get_path_history(self, path, limit=None, pre_load=None):
1084 1090 """
1085 1091 Returns history of file as reversed list of :class:`BaseCommit`
1086 1092 objects for which file at given `path` has been modified.
1087 1093
1088 1094 :param limit: Optional. Allows to limit the size of the returned
1089 1095 history. This is intended as a hint to the underlying backend, so
1090 1096 that it can apply optimizations depending on the limit.
1091 1097 :param pre_load: Optional. List of commit attributes to load.
1092 1098 """
1093 1099 raise NotImplementedError
1094 1100
1095 1101 def get_file_annotate(self, path, pre_load=None):
1096 1102 """
1097 1103 Returns a generator of four element tuples with
1098 1104 lineno, sha, commit lazy loader and line
1099 1105
1100 1106 :param pre_load: Optional. List of commit attributes to load.
1101 1107 """
1102 1108 raise NotImplementedError
1103 1109
1104 1110 def get_nodes(self, path):
1105 1111 """
1106 1112 Returns combined ``DirNode`` and ``FileNode`` objects list representing
1107 1113 state of commit at the given ``path``.
1108 1114
1109 1115 :raises ``CommitError``: if node at the given ``path`` is not
1110 1116 instance of ``DirNode``
1111 1117 """
1112 1118 raise NotImplementedError
1113 1119
1114 1120 def get_node(self, path):
1115 1121 """
1116 1122 Returns ``Node`` object from the given ``path``.
1117 1123
1118 1124 :raises ``NodeDoesNotExistError``: if there is no node at the given
1119 1125 ``path``
1120 1126 """
1121 1127 raise NotImplementedError
1122 1128
1123 1129 def get_largefile_node(self, path):
1124 1130 """
1125 1131 Returns the path to largefile from Mercurial/Git-lfs storage.
1126 1132 or None if it's not a largefile node
1127 1133 """
1128 1134 return None
1129 1135
1130 1136 def archive_repo(self, archive_dest_path, kind='tgz', subrepos=None,
1131 1137 prefix=None, write_metadata=False, mtime=None, archive_at_path='/'):
1132 1138 """
1133 1139 Creates an archive containing the contents of the repository.
1134 1140
1135 1141 :param archive_dest_path: path to the file which to create the archive.
1136 1142 :param kind: one of following: ``"tbz2"``, ``"tgz"``, ``"zip"``.
1137 1143 :param prefix: name of root directory in archive.
1138 1144 Default is repository name and commit's short_id joined with dash:
1139 1145 ``"{repo_name}-{short_id}"``.
1140 1146 :param write_metadata: write a metadata file into archive.
1141 1147 :param mtime: custom modification time for archive creation, defaults
1142 1148 to time.time() if not given.
1143 1149 :param archive_at_path: pack files at this path (default '/')
1144 1150
1145 1151 :raise VCSError: If prefix has a problem.
1146 1152 """
1147 1153 allowed_kinds = [x[0] for x in settings.ARCHIVE_SPECS]
1148 1154 if kind not in allowed_kinds:
1149 1155 raise ImproperArchiveTypeError(
1150 1156 'Archive kind (%s) not supported use one of %s' %
1151 1157 (kind, allowed_kinds))
1152 1158
1153 1159 prefix = self._validate_archive_prefix(prefix)
1154 1160
1155 1161 mtime = mtime is not None or time.mktime(self.date.timetuple())
1156 1162
1157 1163 file_info = []
1158 1164 cur_rev = self.repository.get_commit(commit_id=self.raw_id)
1159 1165 for _r, _d, files in cur_rev.walk(archive_at_path):
1160 1166 for f in files:
1161 1167 f_path = os.path.join(prefix, f.path)
1162 1168 file_info.append(
1163 1169 (f_path, f.mode, f.is_link(), f.raw_bytes))
1164 1170
1165 1171 if write_metadata:
1166 1172 metadata = [
1167 1173 ('repo_name', self.repository.name),
1168 1174 ('commit_id', self.raw_id),
1169 1175 ('mtime', mtime),
1170 1176 ('branch', self.branch),
1171 1177 ('tags', ','.join(self.tags)),
1172 1178 ]
1173 1179 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata]
1174 1180 file_info.append(('.archival.txt', 0o644, False, '\n'.join(meta)))
1175 1181
1176 1182 connection.Hg.archive_repo(archive_dest_path, mtime, file_info, kind)
1177 1183
1178 1184 def _validate_archive_prefix(self, prefix):
1179 1185 if prefix is None:
1180 1186 prefix = self._ARCHIVE_PREFIX_TEMPLATE.format(
1181 1187 repo_name=safe_str(self.repository.name),
1182 1188 short_id=self.short_id)
1183 1189 elif not isinstance(prefix, str):
1184 1190 raise ValueError("prefix not a bytes object: %s" % repr(prefix))
1185 1191 elif prefix.startswith('/'):
1186 1192 raise VCSError("Prefix cannot start with leading slash")
1187 1193 elif prefix.strip() == '':
1188 1194 raise VCSError("Prefix cannot be empty")
1189 1195 return prefix
1190 1196
1191 1197 @LazyProperty
1192 1198 def root(self):
1193 1199 """
1194 1200 Returns ``RootNode`` object for this commit.
1195 1201 """
1196 1202 return self.get_node('')
1197 1203
1198 1204 def next(self, branch=None):
1199 1205 """
1200 1206 Returns next commit from current, if branch is gives it will return
1201 1207 next commit belonging to this branch
1202 1208
1203 1209 :param branch: show commits within the given named branch
1204 1210 """
1205 1211 indexes = xrange(self.idx + 1, self.repository.count())
1206 1212 return self._find_next(indexes, branch)
1207 1213
1208 1214 def prev(self, branch=None):
1209 1215 """
1210 1216 Returns previous commit from current, if branch is gives it will
1211 1217 return previous commit belonging to this branch
1212 1218
1213 1219 :param branch: show commit within the given named branch
1214 1220 """
1215 1221 indexes = xrange(self.idx - 1, -1, -1)
1216 1222 return self._find_next(indexes, branch)
1217 1223
1218 1224 def _find_next(self, indexes, branch=None):
1219 1225 if branch and self.branch != branch:
1220 1226 raise VCSError('Branch option used on commit not belonging '
1221 1227 'to that branch')
1222 1228
1223 1229 for next_idx in indexes:
1224 1230 commit = self.repository.get_commit(commit_idx=next_idx)
1225 1231 if branch and branch != commit.branch:
1226 1232 continue
1227 1233 return commit
1228 1234 raise CommitDoesNotExistError
1229 1235
1230 1236 def diff(self, ignore_whitespace=True, context=3):
1231 1237 """
1232 1238 Returns a `Diff` object representing the change made by this commit.
1233 1239 """
1234 1240 parent = self.first_parent
1235 1241 diff = self.repository.get_diff(
1236 1242 parent, self,
1237 1243 ignore_whitespace=ignore_whitespace,
1238 1244 context=context)
1239 1245 return diff
1240 1246
1241 1247 @LazyProperty
1242 1248 def added(self):
1243 1249 """
1244 1250 Returns list of added ``FileNode`` objects.
1245 1251 """
1246 1252 raise NotImplementedError
1247 1253
1248 1254 @LazyProperty
1249 1255 def changed(self):
1250 1256 """
1251 1257 Returns list of modified ``FileNode`` objects.
1252 1258 """
1253 1259 raise NotImplementedError
1254 1260
1255 1261 @LazyProperty
1256 1262 def removed(self):
1257 1263 """
1258 1264 Returns list of removed ``FileNode`` objects.
1259 1265 """
1260 1266 raise NotImplementedError
1261 1267
1262 1268 @LazyProperty
1263 1269 def size(self):
1264 1270 """
1265 1271 Returns total number of bytes from contents of all filenodes.
1266 1272 """
1267 1273 return sum((node.size for node in self.get_filenodes_generator()))
1268 1274
1269 1275 def walk(self, topurl=''):
1270 1276 """
1271 1277 Similar to os.walk method. Insted of filesystem it walks through
1272 1278 commit starting at given ``topurl``. Returns generator of tuples
1273 1279 (topnode, dirnodes, filenodes).
1274 1280 """
1275 1281 topnode = self.get_node(topurl)
1276 1282 if not topnode.is_dir():
1277 1283 return
1278 1284 yield (topnode, topnode.dirs, topnode.files)
1279 1285 for dirnode in topnode.dirs:
1280 1286 for tup in self.walk(dirnode.path):
1281 1287 yield tup
1282 1288
1283 1289 def get_filenodes_generator(self):
1284 1290 """
1285 1291 Returns generator that yields *all* file nodes.
1286 1292 """
1287 1293 for topnode, dirs, files in self.walk():
1288 1294 for node in files:
1289 1295 yield node
1290 1296
1291 1297 #
1292 1298 # Utilities for sub classes to support consistent behavior
1293 1299 #
1294 1300
1295 1301 def no_node_at_path(self, path):
1296 1302 return NodeDoesNotExistError(
1297 1303 u"There is no file nor directory at the given path: "
1298 1304 u"`%s` at commit %s" % (safe_unicode(path), self.short_id))
1299 1305
1300 1306 def _fix_path(self, path):
1301 1307 """
1302 1308 Paths are stored without trailing slash so we need to get rid off it if
1303 1309 needed.
1304 1310 """
1305 1311 return path.rstrip('/')
1306 1312
1307 1313 #
1308 1314 # Deprecated API based on changesets
1309 1315 #
1310 1316
1311 1317 @property
1312 1318 def revision(self):
1313 1319 warnings.warn("Use idx instead", DeprecationWarning)
1314 1320 return self.idx
1315 1321
1316 1322 @revision.setter
1317 1323 def revision(self, value):
1318 1324 warnings.warn("Use idx instead", DeprecationWarning)
1319 1325 self.idx = value
1320 1326
1321 1327 def get_file_changeset(self, path):
1322 1328 warnings.warn("Use get_path_commit instead", DeprecationWarning)
1323 1329 return self.get_path_commit(path)
1324 1330
1325 1331
1326 1332 class BaseChangesetClass(type):
1327 1333
1328 1334 def __instancecheck__(self, instance):
1329 1335 return isinstance(instance, BaseCommit)
1330 1336
1331 1337
1332 1338 class BaseChangeset(BaseCommit):
1333 1339
1334 1340 __metaclass__ = BaseChangesetClass
1335 1341
1336 1342 def __new__(cls, *args, **kwargs):
1337 1343 warnings.warn(
1338 1344 "Use BaseCommit instead of BaseChangeset", DeprecationWarning)
1339 1345 return super(BaseChangeset, cls).__new__(cls, *args, **kwargs)
1340 1346
1341 1347
1342 1348 class BaseInMemoryCommit(object):
1343 1349 """
1344 1350 Represents differences between repository's state (most recent head) and
1345 1351 changes made *in place*.
1346 1352
1347 1353 **Attributes**
1348 1354
1349 1355 ``repository``
1350 1356 repository object for this in-memory-commit
1351 1357
1352 1358 ``added``
1353 1359 list of ``FileNode`` objects marked as *added*
1354 1360
1355 1361 ``changed``
1356 1362 list of ``FileNode`` objects marked as *changed*
1357 1363
1358 1364 ``removed``
1359 1365 list of ``FileNode`` or ``RemovedFileNode`` objects marked to be
1360 1366 *removed*
1361 1367
1362 1368 ``parents``
1363 1369 list of :class:`BaseCommit` instances representing parents of
1364 1370 in-memory commit. Should always be 2-element sequence.
1365 1371
1366 1372 """
1367 1373
1368 1374 def __init__(self, repository):
1369 1375 self.repository = repository
1370 1376 self.added = []
1371 1377 self.changed = []
1372 1378 self.removed = []
1373 1379 self.parents = []
1374 1380
1375 1381 def add(self, *filenodes):
1376 1382 """
1377 1383 Marks given ``FileNode`` objects as *to be committed*.
1378 1384
1379 1385 :raises ``NodeAlreadyExistsError``: if node with same path exists at
1380 1386 latest commit
1381 1387 :raises ``NodeAlreadyAddedError``: if node with same path is already
1382 1388 marked as *added*
1383 1389 """
1384 1390 # Check if not already marked as *added* first
1385 1391 for node in filenodes:
1386 1392 if node.path in (n.path for n in self.added):
1387 1393 raise NodeAlreadyAddedError(
1388 1394 "Such FileNode %s is already marked for addition"
1389 1395 % node.path)
1390 1396 for node in filenodes:
1391 1397 self.added.append(node)
1392 1398
1393 1399 def change(self, *filenodes):
1394 1400 """
1395 1401 Marks given ``FileNode`` objects to be *changed* in next commit.
1396 1402
1397 1403 :raises ``EmptyRepositoryError``: if there are no commits yet
1398 1404 :raises ``NodeAlreadyExistsError``: if node with same path is already
1399 1405 marked to be *changed*
1400 1406 :raises ``NodeAlreadyRemovedError``: if node with same path is already
1401 1407 marked to be *removed*
1402 1408 :raises ``NodeDoesNotExistError``: if node doesn't exist in latest
1403 1409 commit
1404 1410 :raises ``NodeNotChangedError``: if node hasn't really be changed
1405 1411 """
1406 1412 for node in filenodes:
1407 1413 if node.path in (n.path for n in self.removed):
1408 1414 raise NodeAlreadyRemovedError(
1409 1415 "Node at %s is already marked as removed" % node.path)
1410 1416 try:
1411 1417 self.repository.get_commit()
1412 1418 except EmptyRepositoryError:
1413 1419 raise EmptyRepositoryError(
1414 1420 "Nothing to change - try to *add* new nodes rather than "
1415 1421 "changing them")
1416 1422 for node in filenodes:
1417 1423 if node.path in (n.path for n in self.changed):
1418 1424 raise NodeAlreadyChangedError(
1419 1425 "Node at '%s' is already marked as changed" % node.path)
1420 1426 self.changed.append(node)
1421 1427
1422 1428 def remove(self, *filenodes):
1423 1429 """
1424 1430 Marks given ``FileNode`` (or ``RemovedFileNode``) objects to be
1425 1431 *removed* in next commit.
1426 1432
1427 1433 :raises ``NodeAlreadyRemovedError``: if node has been already marked to
1428 1434 be *removed*
1429 1435 :raises ``NodeAlreadyChangedError``: if node has been already marked to
1430 1436 be *changed*
1431 1437 """
1432 1438 for node in filenodes:
1433 1439 if node.path in (n.path for n in self.removed):
1434 1440 raise NodeAlreadyRemovedError(
1435 1441 "Node is already marked to for removal at %s" % node.path)
1436 1442 if node.path in (n.path for n in self.changed):
1437 1443 raise NodeAlreadyChangedError(
1438 1444 "Node is already marked to be changed at %s" % node.path)
1439 1445 # We only mark node as *removed* - real removal is done by
1440 1446 # commit method
1441 1447 self.removed.append(node)
1442 1448
1443 1449 def reset(self):
1444 1450 """
1445 1451 Resets this instance to initial state (cleans ``added``, ``changed``
1446 1452 and ``removed`` lists).
1447 1453 """
1448 1454 self.added = []
1449 1455 self.changed = []
1450 1456 self.removed = []
1451 1457 self.parents = []
1452 1458
1453 1459 def get_ipaths(self):
1454 1460 """
1455 1461 Returns generator of paths from nodes marked as added, changed or
1456 1462 removed.
1457 1463 """
1458 1464 for node in itertools.chain(self.added, self.changed, self.removed):
1459 1465 yield node.path
1460 1466
1461 1467 def get_paths(self):
1462 1468 """
1463 1469 Returns list of paths from nodes marked as added, changed or removed.
1464 1470 """
1465 1471 return list(self.get_ipaths())
1466 1472
1467 1473 def check_integrity(self, parents=None):
1468 1474 """
1469 1475 Checks in-memory commit's integrity. Also, sets parents if not
1470 1476 already set.
1471 1477
1472 1478 :raises CommitError: if any error occurs (i.e.
1473 1479 ``NodeDoesNotExistError``).
1474 1480 """
1475 1481 if not self.parents:
1476 1482 parents = parents or []
1477 1483 if len(parents) == 0:
1478 1484 try:
1479 1485 parents = [self.repository.get_commit(), None]
1480 1486 except EmptyRepositoryError:
1481 1487 parents = [None, None]
1482 1488 elif len(parents) == 1:
1483 1489 parents += [None]
1484 1490 self.parents = parents
1485 1491
1486 1492 # Local parents, only if not None
1487 1493 parents = [p for p in self.parents if p]
1488 1494
1489 1495 # Check nodes marked as added
1490 1496 for p in parents:
1491 1497 for node in self.added:
1492 1498 try:
1493 1499 p.get_node(node.path)
1494 1500 except NodeDoesNotExistError:
1495 1501 pass
1496 1502 else:
1497 1503 raise NodeAlreadyExistsError(
1498 1504 "Node `%s` already exists at %s" % (node.path, p))
1499 1505
1500 1506 # Check nodes marked as changed
1501 1507 missing = set(self.changed)
1502 1508 not_changed = set(self.changed)
1503 1509 if self.changed and not parents:
1504 1510 raise NodeDoesNotExistError(str(self.changed[0].path))
1505 1511 for p in parents:
1506 1512 for node in self.changed:
1507 1513 try:
1508 1514 old = p.get_node(node.path)
1509 1515 missing.remove(node)
1510 1516 # if content actually changed, remove node from not_changed
1511 1517 if old.content != node.content:
1512 1518 not_changed.remove(node)
1513 1519 except NodeDoesNotExistError:
1514 1520 pass
1515 1521 if self.changed and missing:
1516 1522 raise NodeDoesNotExistError(
1517 1523 "Node `%s` marked as modified but missing in parents: %s"
1518 1524 % (node.path, parents))
1519 1525
1520 1526 if self.changed and not_changed:
1521 1527 raise NodeNotChangedError(
1522 1528 "Node `%s` wasn't actually changed (parents: %s)"
1523 1529 % (not_changed.pop().path, parents))
1524 1530
1525 1531 # Check nodes marked as removed
1526 1532 if self.removed and not parents:
1527 1533 raise NodeDoesNotExistError(
1528 1534 "Cannot remove node at %s as there "
1529 1535 "were no parents specified" % self.removed[0].path)
1530 1536 really_removed = set()
1531 1537 for p in parents:
1532 1538 for node in self.removed:
1533 1539 try:
1534 1540 p.get_node(node.path)
1535 1541 really_removed.add(node)
1536 1542 except CommitError:
1537 1543 pass
1538 1544 not_removed = set(self.removed) - really_removed
1539 1545 if not_removed:
1540 1546 # TODO: johbo: This code branch does not seem to be covered
1541 1547 raise NodeDoesNotExistError(
1542 1548 "Cannot remove node at %s from "
1543 1549 "following parents: %s" % (not_removed, parents))
1544 1550
1545 1551 def commit(self, message, author, parents=None, branch=None, date=None, **kwargs):
1546 1552 """
1547 1553 Performs in-memory commit (doesn't check workdir in any way) and
1548 1554 returns newly created :class:`BaseCommit`. Updates repository's
1549 1555 attribute `commits`.
1550 1556
1551 1557 .. note::
1552 1558
1553 1559 While overriding this method each backend's should call
1554 1560 ``self.check_integrity(parents)`` in the first place.
1555 1561
1556 1562 :param message: message of the commit
1557 1563 :param author: full username, i.e. "Joe Doe <joe.doe@example.com>"
1558 1564 :param parents: single parent or sequence of parents from which commit
1559 1565 would be derived
1560 1566 :param date: ``datetime.datetime`` instance. Defaults to
1561 1567 ``datetime.datetime.now()``.
1562 1568 :param branch: branch name, as string. If none given, default backend's
1563 1569 branch would be used.
1564 1570
1565 1571 :raises ``CommitError``: if any error occurs while committing
1566 1572 """
1567 1573 raise NotImplementedError
1568 1574
1569 1575
1570 1576 class BaseInMemoryChangesetClass(type):
1571 1577
1572 1578 def __instancecheck__(self, instance):
1573 1579 return isinstance(instance, BaseInMemoryCommit)
1574 1580
1575 1581
1576 1582 class BaseInMemoryChangeset(BaseInMemoryCommit):
1577 1583
1578 1584 __metaclass__ = BaseInMemoryChangesetClass
1579 1585
1580 1586 def __new__(cls, *args, **kwargs):
1581 1587 warnings.warn(
1582 1588 "Use BaseCommit instead of BaseInMemoryCommit", DeprecationWarning)
1583 1589 return super(BaseInMemoryChangeset, cls).__new__(cls, *args, **kwargs)
1584 1590
1585 1591
1586 1592 class EmptyCommit(BaseCommit):
1587 1593 """
1588 1594 An dummy empty commit. It's possible to pass hash when creating
1589 1595 an EmptyCommit
1590 1596 """
1591 1597
1592 1598 def __init__(
1593 1599 self, commit_id=EMPTY_COMMIT_ID, repo=None, alias=None, idx=-1,
1594 1600 message='', author='', date=None):
1595 1601 self._empty_commit_id = commit_id
1596 1602 # TODO: johbo: Solve idx parameter, default value does not make
1597 1603 # too much sense
1598 1604 self.idx = idx
1599 1605 self.message = message
1600 1606 self.author = author
1601 1607 self.date = date or datetime.datetime.fromtimestamp(0)
1602 1608 self.repository = repo
1603 1609 self.alias = alias
1604 1610
1605 1611 @LazyProperty
1606 1612 def raw_id(self):
1607 1613 """
1608 1614 Returns raw string identifying this commit, useful for web
1609 1615 representation.
1610 1616 """
1611 1617
1612 1618 return self._empty_commit_id
1613 1619
1614 1620 @LazyProperty
1615 1621 def branch(self):
1616 1622 if self.alias:
1617 1623 from rhodecode.lib.vcs.backends import get_backend
1618 1624 return get_backend(self.alias).DEFAULT_BRANCH_NAME
1619 1625
1620 1626 @LazyProperty
1621 1627 def short_id(self):
1622 1628 return self.raw_id[:12]
1623 1629
1624 1630 @LazyProperty
1625 1631 def id(self):
1626 1632 return self.raw_id
1627 1633
1628 1634 def get_path_commit(self, path):
1629 1635 return self
1630 1636
1631 1637 def get_file_content(self, path):
1632 1638 return u''
1633 1639
1640 def get_file_content_streamed(self, path):
1641 yield self.get_file_content()
1642
1634 1643 def get_file_size(self, path):
1635 1644 return 0
1636 1645
1637 1646
1638 1647 class EmptyChangesetClass(type):
1639 1648
1640 1649 def __instancecheck__(self, instance):
1641 1650 return isinstance(instance, EmptyCommit)
1642 1651
1643 1652
1644 1653 class EmptyChangeset(EmptyCommit):
1645 1654
1646 1655 __metaclass__ = EmptyChangesetClass
1647 1656
1648 1657 def __new__(cls, *args, **kwargs):
1649 1658 warnings.warn(
1650 1659 "Use EmptyCommit instead of EmptyChangeset", DeprecationWarning)
1651 1660 return super(EmptyCommit, cls).__new__(cls, *args, **kwargs)
1652 1661
1653 1662 def __init__(self, cs=EMPTY_COMMIT_ID, repo=None, requested_revision=None,
1654 1663 alias=None, revision=-1, message='', author='', date=None):
1655 1664 if requested_revision is not None:
1656 1665 warnings.warn(
1657 1666 "Parameter requested_revision not supported anymore",
1658 1667 DeprecationWarning)
1659 1668 super(EmptyChangeset, self).__init__(
1660 1669 commit_id=cs, repo=repo, alias=alias, idx=revision,
1661 1670 message=message, author=author, date=date)
1662 1671
1663 1672 @property
1664 1673 def revision(self):
1665 1674 warnings.warn("Use idx instead", DeprecationWarning)
1666 1675 return self.idx
1667 1676
1668 1677 @revision.setter
1669 1678 def revision(self, value):
1670 1679 warnings.warn("Use idx instead", DeprecationWarning)
1671 1680 self.idx = value
1672 1681
1673 1682
1674 1683 class EmptyRepository(BaseRepository):
1675 1684 def __init__(self, repo_path=None, config=None, create=False, **kwargs):
1676 1685 pass
1677 1686
1678 1687 def get_diff(self, *args, **kwargs):
1679 1688 from rhodecode.lib.vcs.backends.git.diff import GitDiff
1680 1689 return GitDiff('')
1681 1690
1682 1691
1683 1692 class CollectionGenerator(object):
1684 1693
1685 1694 def __init__(self, repo, commit_ids, collection_size=None, pre_load=None, translate_tag=None):
1686 1695 self.repo = repo
1687 1696 self.commit_ids = commit_ids
1688 1697 # TODO: (oliver) this isn't currently hooked up
1689 1698 self.collection_size = None
1690 1699 self.pre_load = pre_load
1691 1700 self.translate_tag = translate_tag
1692 1701
1693 1702 def __len__(self):
1694 1703 if self.collection_size is not None:
1695 1704 return self.collection_size
1696 1705 return self.commit_ids.__len__()
1697 1706
1698 1707 def __iter__(self):
1699 1708 for commit_id in self.commit_ids:
1700 1709 # TODO: johbo: Mercurial passes in commit indices or commit ids
1701 1710 yield self._commit_factory(commit_id)
1702 1711
1703 1712 def _commit_factory(self, commit_id):
1704 1713 """
1705 1714 Allows backends to override the way commits are generated.
1706 1715 """
1707 1716 return self.repo.get_commit(
1708 1717 commit_id=commit_id, pre_load=self.pre_load,
1709 1718 translate_tag=self.translate_tag)
1710 1719
1711 1720 def __getslice__(self, i, j):
1712 1721 """
1713 1722 Returns an iterator of sliced repository
1714 1723 """
1715 1724 commit_ids = self.commit_ids[i:j]
1716 1725 return self.__class__(
1717 1726 self.repo, commit_ids, pre_load=self.pre_load,
1718 1727 translate_tag=self.translate_tag)
1719 1728
1720 1729 def __repr__(self):
1721 1730 return '<CollectionGenerator[len:%s]>' % (self.__len__())
1722 1731
1723 1732
1724 1733 class Config(object):
1725 1734 """
1726 1735 Represents the configuration for a repository.
1727 1736
1728 1737 The API is inspired by :class:`ConfigParser.ConfigParser` from the
1729 1738 standard library. It implements only the needed subset.
1730 1739 """
1731 1740
1732 1741 def __init__(self):
1733 1742 self._values = {}
1734 1743
1735 1744 def copy(self):
1736 1745 clone = Config()
1737 1746 for section, values in self._values.items():
1738 1747 clone._values[section] = values.copy()
1739 1748 return clone
1740 1749
1741 1750 def __repr__(self):
1742 1751 return '<Config(%s sections) at %s>' % (
1743 1752 len(self._values), hex(id(self)))
1744 1753
1745 1754 def items(self, section):
1746 1755 return self._values.get(section, {}).iteritems()
1747 1756
1748 1757 def get(self, section, option):
1749 1758 return self._values.get(section, {}).get(option)
1750 1759
1751 1760 def set(self, section, option, value):
1752 1761 section_values = self._values.setdefault(section, {})
1753 1762 section_values[option] = value
1754 1763
1755 1764 def clear_section(self, section):
1756 1765 self._values[section] = {}
1757 1766
1758 1767 def serialize(self):
1759 1768 """
1760 1769 Creates a list of three tuples (section, key, value) representing
1761 1770 this config object.
1762 1771 """
1763 1772 items = []
1764 1773 for section in self._values:
1765 1774 for option, value in self._values[section].items():
1766 1775 items.append(
1767 1776 (safe_str(section), safe_str(option), safe_str(value)))
1768 1777 return items
1769 1778
1770 1779
1771 1780 class Diff(object):
1772 1781 """
1773 1782 Represents a diff result from a repository backend.
1774 1783
1775 1784 Subclasses have to provide a backend specific value for
1776 1785 :attr:`_header_re` and :attr:`_meta_re`.
1777 1786 """
1778 1787 _meta_re = None
1779 1788 _header_re = None
1780 1789
1781 1790 def __init__(self, raw_diff):
1782 1791 self.raw = raw_diff
1783 1792
1784 1793 def chunks(self):
1785 1794 """
1786 1795 split the diff in chunks of separate --git a/file b/file chunks
1787 1796 to make diffs consistent we must prepend with \n, and make sure
1788 1797 we can detect last chunk as this was also has special rule
1789 1798 """
1790 1799
1791 1800 diff_parts = ('\n' + self.raw).split('\ndiff --git')
1792 1801 header = diff_parts[0]
1793 1802
1794 1803 if self._meta_re:
1795 1804 match = self._meta_re.match(header)
1796 1805
1797 1806 chunks = diff_parts[1:]
1798 1807 total_chunks = len(chunks)
1799 1808
1800 1809 return (
1801 1810 DiffChunk(chunk, self, cur_chunk == total_chunks)
1802 1811 for cur_chunk, chunk in enumerate(chunks, start=1))
1803 1812
1804 1813
1805 1814 class DiffChunk(object):
1806 1815
1807 1816 def __init__(self, chunk, diff, last_chunk):
1808 1817 self._diff = diff
1809 1818
1810 1819 # since we split by \ndiff --git that part is lost from original diff
1811 1820 # we need to re-apply it at the end, EXCEPT ! if it's last chunk
1812 1821 if not last_chunk:
1813 1822 chunk += '\n'
1814 1823
1815 1824 match = self._diff._header_re.match(chunk)
1816 1825 self.header = match.groupdict()
1817 1826 self.diff = chunk[match.end():]
1818 1827 self.raw = chunk
1819 1828
1820 1829
1821 1830 class BasePathPermissionChecker(object):
1822 1831
1823 1832 @staticmethod
1824 1833 def create_from_patterns(includes, excludes):
1825 1834 if includes and '*' in includes and not excludes:
1826 1835 return AllPathPermissionChecker()
1827 1836 elif excludes and '*' in excludes:
1828 1837 return NonePathPermissionChecker()
1829 1838 else:
1830 1839 return PatternPathPermissionChecker(includes, excludes)
1831 1840
1832 1841 @property
1833 1842 def has_full_access(self):
1834 1843 raise NotImplemented()
1835 1844
1836 1845 def has_access(self, path):
1837 1846 raise NotImplemented()
1838 1847
1839 1848
1840 1849 class AllPathPermissionChecker(BasePathPermissionChecker):
1841 1850
1842 1851 @property
1843 1852 def has_full_access(self):
1844 1853 return True
1845 1854
1846 1855 def has_access(self, path):
1847 1856 return True
1848 1857
1849 1858
1850 1859 class NonePathPermissionChecker(BasePathPermissionChecker):
1851 1860
1852 1861 @property
1853 1862 def has_full_access(self):
1854 1863 return False
1855 1864
1856 1865 def has_access(self, path):
1857 1866 return False
1858 1867
1859 1868
1860 1869 class PatternPathPermissionChecker(BasePathPermissionChecker):
1861 1870
1862 1871 def __init__(self, includes, excludes):
1863 1872 self.includes = includes
1864 1873 self.excludes = excludes
1865 1874 self.includes_re = [] if not includes else [
1866 1875 re.compile(fnmatch.translate(pattern)) for pattern in includes]
1867 1876 self.excludes_re = [] if not excludes else [
1868 1877 re.compile(fnmatch.translate(pattern)) for pattern in excludes]
1869 1878
1870 1879 @property
1871 1880 def has_full_access(self):
1872 1881 return '*' in self.includes and not self.excludes
1873 1882
1874 1883 def has_access(self, path):
1875 1884 for regex in self.excludes_re:
1876 1885 if regex.match(path):
1877 1886 return False
1878 1887 for regex in self.includes_re:
1879 1888 if regex.match(path):
1880 1889 return True
1881 1890 return False
@@ -1,474 +1,479 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 GIT commit module
23 23 """
24 24
25 25 import re
26 26 import stat
27 27 from itertools import chain
28 28 from StringIO import StringIO
29 29
30 30 from zope.cachedescriptors.property import Lazy as LazyProperty
31 31
32 32 from rhodecode.lib.datelib import utcdate_fromtimestamp
33 33 from rhodecode.lib.utils import safe_unicode, safe_str
34 34 from rhodecode.lib.utils2 import safe_int
35 35 from rhodecode.lib.vcs.conf import settings
36 36 from rhodecode.lib.vcs.backends import base
37 37 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
38 38 from rhodecode.lib.vcs.nodes import (
39 39 FileNode, DirNode, NodeKind, RootNode, SubModuleNode,
40 40 ChangedFileNodesGenerator, AddedFileNodesGenerator,
41 41 RemovedFileNodesGenerator, LargeFileNode)
42 42 from rhodecode.lib.vcs.compat import configparser
43 43
44 44
45 45 class GitCommit(base.BaseCommit):
46 46 """
47 47 Represents state of the repository at single commit id.
48 48 """
49 49
50 50 _filter_pre_load = [
51 51 # done through a more complex tree walk on parents
52 52 "affected_files",
53 53 # done through subprocess not remote call
54 54 "children",
55 55 # done through a more complex tree walk on parents
56 56 "status",
57 57 # mercurial specific property not supported here
58 58 "_file_paths",
59 59 # mercurial specific property not supported here
60 60 'obsolete',
61 61 # mercurial specific property not supported here
62 62 'phase',
63 63 # mercurial specific property not supported here
64 64 'hidden'
65 65 ]
66 66
67 67 def __init__(self, repository, raw_id, idx, pre_load=None):
68 68 self.repository = repository
69 69 self._remote = repository._remote
70 70 # TODO: johbo: Tweak of raw_id should not be necessary
71 71 self.raw_id = safe_str(raw_id)
72 72 self.idx = idx
73 73
74 74 self._set_bulk_properties(pre_load)
75 75
76 76 # caches
77 77 self._stat_modes = {} # stat info for paths
78 78 self._paths = {} # path processed with parse_tree
79 79 self.nodes = {}
80 80 self._submodules = None
81 81
82 82 def _set_bulk_properties(self, pre_load):
83 83
84 84 if not pre_load:
85 85 return
86 86 pre_load = [entry for entry in pre_load
87 87 if entry not in self._filter_pre_load]
88 88 if not pre_load:
89 89 return
90 90
91 91 result = self._remote.bulk_request(self.raw_id, pre_load)
92 92 for attr, value in result.items():
93 93 if attr in ["author", "message"]:
94 94 if value:
95 95 value = safe_unicode(value)
96 96 elif attr == "date":
97 97 value = utcdate_fromtimestamp(*value)
98 98 elif attr == "parents":
99 99 value = self._make_commits(value)
100 100 elif attr == "branch":
101 101 value = value[0] if value else None
102 102 self.__dict__[attr] = value
103 103
104 104 @LazyProperty
105 105 def _commit(self):
106 106 return self._remote[self.raw_id]
107 107
108 108 @LazyProperty
109 109 def _tree_id(self):
110 110 return self._remote[self._commit['tree']]['id']
111 111
112 112 @LazyProperty
113 113 def id(self):
114 114 return self.raw_id
115 115
116 116 @LazyProperty
117 117 def short_id(self):
118 118 return self.raw_id[:12]
119 119
120 120 @LazyProperty
121 121 def message(self):
122 122 return safe_unicode(self._remote.message(self.id))
123 123
124 124 @LazyProperty
125 125 def committer(self):
126 126 return safe_unicode(self._remote.author(self.id))
127 127
128 128 @LazyProperty
129 129 def author(self):
130 130 return safe_unicode(self._remote.author(self.id))
131 131
132 132 @LazyProperty
133 133 def date(self):
134 134 unix_ts, tz = self._remote.date(self.raw_id)
135 135 return utcdate_fromtimestamp(unix_ts, tz)
136 136
137 137 @LazyProperty
138 138 def status(self):
139 139 """
140 140 Returns modified, added, removed, deleted files for current commit
141 141 """
142 142 return self.changed, self.added, self.removed
143 143
144 144 @LazyProperty
145 145 def tags(self):
146 146 tags = [safe_unicode(name) for name,
147 147 commit_id in self.repository.tags.iteritems()
148 148 if commit_id == self.raw_id]
149 149 return tags
150 150
151 151 @LazyProperty
152 152 def commit_branches(self):
153 153 branches = []
154 154 for name, commit_id in self.repository.branches.iteritems():
155 155 if commit_id == self.raw_id:
156 156 branches.append(name)
157 157 return branches
158 158
159 159 @LazyProperty
160 160 def branch(self):
161 161 branches = self._remote.branch(self.raw_id)
162 162
163 163 if branches:
164 164 # actually commit can have multiple branches in git
165 165 return safe_unicode(branches[0])
166 166
167 167 def _get_tree_id_for_path(self, path):
168 168 path = safe_str(path)
169 169 if path in self._paths:
170 170 return self._paths[path]
171 171
172 172 tree_id = self._tree_id
173 173
174 174 path = path.strip('/')
175 175 if path == '':
176 176 data = [tree_id, "tree"]
177 177 self._paths[''] = data
178 178 return data
179 179
180 180 tree_id, tree_type, tree_mode = \
181 181 self._remote.tree_and_type_for_path(self.raw_id, path)
182 182 if tree_id is None:
183 183 raise self.no_node_at_path(path)
184 184
185 185 self._paths[path] = [tree_id, tree_type]
186 186 self._stat_modes[path] = tree_mode
187 187
188 188 if path not in self._paths:
189 189 raise self.no_node_at_path(path)
190 190
191 191 return self._paths[path]
192 192
193 193 def _get_kind(self, path):
194 194 tree_id, type_ = self._get_tree_id_for_path(path)
195 195 if type_ == 'blob':
196 196 return NodeKind.FILE
197 197 elif type_ == 'tree':
198 198 return NodeKind.DIR
199 199 elif type_ == 'link':
200 200 return NodeKind.SUBMODULE
201 201 return None
202 202
203 203 def _get_filectx(self, path):
204 204 path = self._fix_path(path)
205 205 if self._get_kind(path) != NodeKind.FILE:
206 206 raise CommitError(
207 207 "File does not exist for commit %s at '%s'" % (self.raw_id, path))
208 208 return path
209 209
210 210 def _get_file_nodes(self):
211 211 return chain(*(t[2] for t in self.walk()))
212 212
213 213 @LazyProperty
214 214 def parents(self):
215 215 """
216 216 Returns list of parent commits.
217 217 """
218 218 parent_ids = self._remote.parents(self.id)
219 219 return self._make_commits(parent_ids)
220 220
221 221 @LazyProperty
222 222 def children(self):
223 223 """
224 224 Returns list of child commits.
225 225 """
226 226
227 227 children = self._remote.children(self.raw_id)
228 228 return self._make_commits(children)
229 229
230 230 def _make_commits(self, commit_ids):
231 231 def commit_maker(_commit_id):
232 232 return self.repository.get_commit(commit_id=commit_id)
233 233
234 234 return [commit_maker(commit_id) for commit_id in commit_ids]
235 235
236 236 def get_file_mode(self, path):
237 237 """
238 238 Returns stat mode of the file at the given `path`.
239 239 """
240 240 path = safe_str(path)
241 241 # ensure path is traversed
242 242 self._get_tree_id_for_path(path)
243 243 return self._stat_modes[path]
244 244
245 245 def is_link(self, path):
246 246 return stat.S_ISLNK(self.get_file_mode(path))
247 247
248 248 def get_file_content(self, path):
249 249 """
250 250 Returns content of the file at given `path`.
251 251 """
252 252 tree_id, _ = self._get_tree_id_for_path(path)
253 253 return self._remote.blob_as_pretty_string(tree_id)
254 254
255 def get_file_content_streamed(self, path):
256 tree_id, _ = self._get_tree_id_for_path(path)
257 stream_method = getattr(self._remote, 'stream:blob_as_pretty_string')
258 return stream_method(tree_id)
259
255 260 def get_file_size(self, path):
256 261 """
257 262 Returns size of the file at given `path`.
258 263 """
259 264 tree_id, _ = self._get_tree_id_for_path(path)
260 265 return self._remote.blob_raw_length(tree_id)
261 266
262 267 def get_path_history(self, path, limit=None, pre_load=None):
263 268 """
264 269 Returns history of file as reversed list of `GitCommit` objects for
265 270 which file at given `path` has been modified.
266 271 """
267 272
268 273 path = self._get_filectx(path)
269 274 hist = self._remote.node_history(self.raw_id, path, limit)
270 275 return [
271 276 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
272 277 for commit_id in hist]
273 278
274 279 def get_file_annotate(self, path, pre_load=None):
275 280 """
276 281 Returns a generator of four element tuples with
277 282 lineno, commit_id, commit lazy loader and line
278 283 """
279 284
280 285 result = self._remote.node_annotate(self.raw_id, path)
281 286
282 287 for ln_no, commit_id, content in result:
283 288 yield (
284 289 ln_no, commit_id,
285 290 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
286 291 content)
287 292
288 293 def get_nodes(self, path):
289 294
290 295 if self._get_kind(path) != NodeKind.DIR:
291 296 raise CommitError(
292 297 "Directory does not exist for commit %s at '%s'" % (self.raw_id, path))
293 298 path = self._fix_path(path)
294 299
295 300 tree_id, _ = self._get_tree_id_for_path(path)
296 301
297 302 dirnodes = []
298 303 filenodes = []
299 304
300 305 # extracted tree ID gives us our files...
301 306 for name, stat_, id_, type_ in self._remote.tree_items(tree_id):
302 307 if type_ == 'link':
303 308 url = self._get_submodule_url('/'.join((path, name)))
304 309 dirnodes.append(SubModuleNode(
305 310 name, url=url, commit=id_, alias=self.repository.alias))
306 311 continue
307 312
308 313 if path != '':
309 314 obj_path = '/'.join((path, name))
310 315 else:
311 316 obj_path = name
312 317 if obj_path not in self._stat_modes:
313 318 self._stat_modes[obj_path] = stat_
314 319
315 320 if type_ == 'tree':
316 321 dirnodes.append(DirNode(obj_path, commit=self))
317 322 elif type_ == 'blob':
318 323 filenodes.append(FileNode(obj_path, commit=self, mode=stat_))
319 324 else:
320 325 raise CommitError(
321 326 "Requested object should be Tree or Blob, is %s", type_)
322 327
323 328 nodes = dirnodes + filenodes
324 329 for node in nodes:
325 330 if node.path not in self.nodes:
326 331 self.nodes[node.path] = node
327 332 nodes.sort()
328 333 return nodes
329 334
330 335 def get_node(self, path, pre_load=None):
331 336 if isinstance(path, unicode):
332 337 path = path.encode('utf-8')
333 338 path = self._fix_path(path)
334 339 if path not in self.nodes:
335 340 try:
336 341 tree_id, type_ = self._get_tree_id_for_path(path)
337 342 except CommitError:
338 343 raise NodeDoesNotExistError(
339 344 "Cannot find one of parents' directories for a given "
340 345 "path: %s" % path)
341 346
342 347 if type_ == 'link':
343 348 url = self._get_submodule_url(path)
344 349 node = SubModuleNode(path, url=url, commit=tree_id,
345 350 alias=self.repository.alias)
346 351 elif type_ == 'tree':
347 352 if path == '':
348 353 node = RootNode(commit=self)
349 354 else:
350 355 node = DirNode(path, commit=self)
351 356 elif type_ == 'blob':
352 357 node = FileNode(path, commit=self, pre_load=pre_load)
353 358 self._stat_modes[path] = node.mode
354 359 else:
355 360 raise self.no_node_at_path(path)
356 361
357 362 # cache node
358 363 self.nodes[path] = node
359 364
360 365 return self.nodes[path]
361 366
362 367 def get_largefile_node(self, path):
363 368 tree_id, _ = self._get_tree_id_for_path(path)
364 369 pointer_spec = self._remote.is_large_file(tree_id)
365 370
366 371 if pointer_spec:
367 372 # content of that file regular FileNode is the hash of largefile
368 373 file_id = pointer_spec.get('oid_hash')
369 374 if self._remote.in_largefiles_store(file_id):
370 375 lf_path = self._remote.store_path(file_id)
371 376 return LargeFileNode(lf_path, commit=self, org_path=path)
372 377
373 378 @LazyProperty
374 379 def affected_files(self):
375 380 """
376 381 Gets a fast accessible file changes for given commit
377 382 """
378 383 added, modified, deleted = self._changes_cache
379 384 return list(added.union(modified).union(deleted))
380 385
381 386 @LazyProperty
382 387 def _changes_cache(self):
383 388 added = set()
384 389 modified = set()
385 390 deleted = set()
386 391 _r = self._remote
387 392
388 393 parents = self.parents
389 394 if not self.parents:
390 395 parents = [base.EmptyCommit()]
391 396 for parent in parents:
392 397 if isinstance(parent, base.EmptyCommit):
393 398 oid = None
394 399 else:
395 400 oid = parent.raw_id
396 401 changes = _r.tree_changes(oid, self.raw_id)
397 402 for (oldpath, newpath), (_, _), (_, _) in changes:
398 403 if newpath and oldpath:
399 404 modified.add(newpath)
400 405 elif newpath and not oldpath:
401 406 added.add(newpath)
402 407 elif not newpath and oldpath:
403 408 deleted.add(oldpath)
404 409 return added, modified, deleted
405 410
406 411 def _get_paths_for_status(self, status):
407 412 """
408 413 Returns sorted list of paths for given ``status``.
409 414
410 415 :param status: one of: *added*, *modified* or *deleted*
411 416 """
412 417 added, modified, deleted = self._changes_cache
413 418 return sorted({
414 419 'added': list(added),
415 420 'modified': list(modified),
416 421 'deleted': list(deleted)}[status]
417 422 )
418 423
419 424 @LazyProperty
420 425 def added(self):
421 426 """
422 427 Returns list of added ``FileNode`` objects.
423 428 """
424 429 if not self.parents:
425 430 return list(self._get_file_nodes())
426 431 return AddedFileNodesGenerator(
427 432 [n for n in self._get_paths_for_status('added')], self)
428 433
429 434 @LazyProperty
430 435 def changed(self):
431 436 """
432 437 Returns list of modified ``FileNode`` objects.
433 438 """
434 439 if not self.parents:
435 440 return []
436 441 return ChangedFileNodesGenerator(
437 442 [n for n in self._get_paths_for_status('modified')], self)
438 443
439 444 @LazyProperty
440 445 def removed(self):
441 446 """
442 447 Returns list of removed ``FileNode`` objects.
443 448 """
444 449 if not self.parents:
445 450 return []
446 451 return RemovedFileNodesGenerator(
447 452 [n for n in self._get_paths_for_status('deleted')], self)
448 453
449 454 def _get_submodule_url(self, submodule_path):
450 455 git_modules_path = '.gitmodules'
451 456
452 457 if self._submodules is None:
453 458 self._submodules = {}
454 459
455 460 try:
456 461 submodules_node = self.get_node(git_modules_path)
457 462 except NodeDoesNotExistError:
458 463 return None
459 464
460 465 content = submodules_node.content
461 466
462 467 # ConfigParser fails if there are whitespaces
463 468 content = '\n'.join(l.strip() for l in content.split('\n'))
464 469
465 470 parser = configparser.ConfigParser()
466 471 parser.readfp(StringIO(content))
467 472
468 473 for section in parser.sections():
469 474 path = parser.get(section, 'path')
470 475 url = parser.get(section, 'url')
471 476 if path and url:
472 477 self._submodules[path.strip('/')] = url
473 478
474 479 return self._submodules.get(submodule_path.strip('/'))
@@ -1,380 +1,385 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 HG commit module
23 23 """
24 24
25 25 import os
26 26
27 27 from zope.cachedescriptors.property import Lazy as LazyProperty
28 28
29 29 from rhodecode.lib.datelib import utcdate_fromtimestamp
30 30 from rhodecode.lib.utils import safe_str, safe_unicode
31 31 from rhodecode.lib.vcs import path as vcspath
32 32 from rhodecode.lib.vcs.backends import base
33 33 from rhodecode.lib.vcs.backends.hg.diff import MercurialDiff
34 34 from rhodecode.lib.vcs.exceptions import CommitError
35 35 from rhodecode.lib.vcs.nodes import (
36 36 AddedFileNodesGenerator, ChangedFileNodesGenerator, DirNode, FileNode,
37 37 NodeKind, RemovedFileNodesGenerator, RootNode, SubModuleNode,
38 38 LargeFileNode, LARGEFILE_PREFIX)
39 39 from rhodecode.lib.vcs.utils.paths import get_dirs_for_path
40 40
41 41
42 42 class MercurialCommit(base.BaseCommit):
43 43 """
44 44 Represents state of the repository at the single commit.
45 45 """
46 46
47 47 _filter_pre_load = [
48 48 # git specific property not supported here
49 49 "_commit",
50 50 ]
51 51
52 52 def __init__(self, repository, raw_id, idx, pre_load=None):
53 53 raw_id = safe_str(raw_id)
54 54
55 55 self.repository = repository
56 56 self._remote = repository._remote
57 57
58 58 self.raw_id = raw_id
59 59 self.idx = idx
60 60
61 61 self._set_bulk_properties(pre_load)
62 62
63 63 # caches
64 64 self.nodes = {}
65 65
66 66 def _set_bulk_properties(self, pre_load):
67 67 if not pre_load:
68 68 return
69 69 pre_load = [entry for entry in pre_load
70 70 if entry not in self._filter_pre_load]
71 71 if not pre_load:
72 72 return
73 73
74 74 result = self._remote.bulk_request(self.raw_id, pre_load)
75 75 for attr, value in result.items():
76 76 if attr in ["author", "branch", "message"]:
77 77 value = safe_unicode(value)
78 78 elif attr == "affected_files":
79 79 value = map(safe_unicode, value)
80 80 elif attr == "date":
81 81 value = utcdate_fromtimestamp(*value)
82 82 elif attr in ["children", "parents"]:
83 83 value = self._make_commits(value)
84 84 elif attr in ["phase"]:
85 85 value = self._get_phase_text(value)
86 86 self.__dict__[attr] = value
87 87
88 88 @LazyProperty
89 89 def tags(self):
90 90 tags = [name for name, commit_id in self.repository.tags.iteritems()
91 91 if commit_id == self.raw_id]
92 92 return tags
93 93
94 94 @LazyProperty
95 95 def branch(self):
96 96 return safe_unicode(self._remote.ctx_branch(self.raw_id))
97 97
98 98 @LazyProperty
99 99 def bookmarks(self):
100 100 bookmarks = [
101 101 name for name, commit_id in self.repository.bookmarks.iteritems()
102 102 if commit_id == self.raw_id]
103 103 return bookmarks
104 104
105 105 @LazyProperty
106 106 def message(self):
107 107 return safe_unicode(self._remote.ctx_description(self.raw_id))
108 108
109 109 @LazyProperty
110 110 def committer(self):
111 111 return safe_unicode(self.author)
112 112
113 113 @LazyProperty
114 114 def author(self):
115 115 return safe_unicode(self._remote.ctx_user(self.raw_id))
116 116
117 117 @LazyProperty
118 118 def date(self):
119 119 return utcdate_fromtimestamp(*self._remote.ctx_date(self.raw_id))
120 120
121 121 @LazyProperty
122 122 def status(self):
123 123 """
124 124 Returns modified, added, removed, deleted files for current commit
125 125 """
126 126 return self._remote.ctx_status(self.raw_id)
127 127
128 128 @LazyProperty
129 129 def _file_paths(self):
130 130 return self._remote.ctx_list(self.raw_id)
131 131
132 132 @LazyProperty
133 133 def _dir_paths(self):
134 134 p = list(set(get_dirs_for_path(*self._file_paths)))
135 135 p.insert(0, '')
136 136 return p
137 137
138 138 @LazyProperty
139 139 def _paths(self):
140 140 return self._dir_paths + self._file_paths
141 141
142 142 @LazyProperty
143 143 def id(self):
144 144 if self.last:
145 145 return u'tip'
146 146 return self.short_id
147 147
148 148 @LazyProperty
149 149 def short_id(self):
150 150 return self.raw_id[:12]
151 151
152 152 def _make_commits(self, indexes, pre_load=None):
153 153 return [self.repository.get_commit(commit_idx=idx, pre_load=pre_load)
154 154 for idx in indexes if idx >= 0]
155 155
156 156 @LazyProperty
157 157 def parents(self):
158 158 """
159 159 Returns list of parent commits.
160 160 """
161 161 parents = self._remote.ctx_parents(self.raw_id)
162 162 return self._make_commits(parents)
163 163
164 164 def _get_phase_text(self, phase_id):
165 165 return {
166 166 0: 'public',
167 167 1: 'draft',
168 168 2: 'secret',
169 169 }.get(phase_id) or ''
170 170
171 171 @LazyProperty
172 172 def phase(self):
173 173 phase_id = self._remote.ctx_phase(self.raw_id)
174 174 phase_text = self._get_phase_text(phase_id)
175 175
176 176 return safe_unicode(phase_text)
177 177
178 178 @LazyProperty
179 179 def obsolete(self):
180 180 obsolete = self._remote.ctx_obsolete(self.raw_id)
181 181 return obsolete
182 182
183 183 @LazyProperty
184 184 def hidden(self):
185 185 hidden = self._remote.ctx_hidden(self.raw_id)
186 186 return hidden
187 187
188 188 @LazyProperty
189 189 def children(self):
190 190 """
191 191 Returns list of child commits.
192 192 """
193 193 children = self._remote.ctx_children(self.raw_id)
194 194 return self._make_commits(children)
195 195
196 196 def _fix_path(self, path):
197 197 """
198 198 Mercurial keeps filenodes as str so we need to encode from unicode
199 199 to str.
200 200 """
201 201 return safe_str(super(MercurialCommit, self)._fix_path(path))
202 202
203 203 def _get_kind(self, path):
204 204 path = self._fix_path(path)
205 205 if path in self._file_paths:
206 206 return NodeKind.FILE
207 207 elif path in self._dir_paths:
208 208 return NodeKind.DIR
209 209 else:
210 210 raise CommitError(
211 211 "Node does not exist at the given path '%s'" % (path, ))
212 212
213 213 def _get_filectx(self, path):
214 214 path = self._fix_path(path)
215 215 if self._get_kind(path) != NodeKind.FILE:
216 216 raise CommitError(
217 217 "File does not exist for idx %s at '%s'" % (self.raw_id, path))
218 218 return path
219 219
220 220 def get_file_mode(self, path):
221 221 """
222 222 Returns stat mode of the file at the given ``path``.
223 223 """
224 224 path = self._get_filectx(path)
225 225 if 'x' in self._remote.fctx_flags(self.raw_id, path):
226 226 return base.FILEMODE_EXECUTABLE
227 227 else:
228 228 return base.FILEMODE_DEFAULT
229 229
230 230 def is_link(self, path):
231 231 path = self._get_filectx(path)
232 232 return 'l' in self._remote.fctx_flags(self.raw_id, path)
233 233
234 234 def get_file_content(self, path):
235 235 """
236 236 Returns content of the file at given ``path``.
237 237 """
238 238 path = self._get_filectx(path)
239 239 return self._remote.fctx_node_data(self.raw_id, path)
240 240
241 def get_file_content_streamed(self, path):
242 path = self._get_filectx(path)
243 stream_method = getattr(self._remote, 'stream:fctx_node_data')
244 return stream_method(self.raw_id, path)
245
241 246 def get_file_size(self, path):
242 247 """
243 248 Returns size of the file at given ``path``.
244 249 """
245 250 path = self._get_filectx(path)
246 251 return self._remote.fctx_size(self.raw_id, path)
247 252
248 253 def get_path_history(self, path, limit=None, pre_load=None):
249 254 """
250 255 Returns history of file as reversed list of `MercurialCommit` objects
251 256 for which file at given ``path`` has been modified.
252 257 """
253 258 path = self._get_filectx(path)
254 259 hist = self._remote.node_history(self.raw_id, path, limit)
255 260 return [
256 261 self.repository.get_commit(commit_id=commit_id, pre_load=pre_load)
257 262 for commit_id in hist]
258 263
259 264 def get_file_annotate(self, path, pre_load=None):
260 265 """
261 266 Returns a generator of four element tuples with
262 267 lineno, commit_id, commit lazy loader and line
263 268 """
264 269 result = self._remote.fctx_annotate(self.raw_id, path)
265 270
266 271 for ln_no, commit_id, content in result:
267 272 yield (
268 273 ln_no, commit_id,
269 274 lambda: self.repository.get_commit(commit_id=commit_id, pre_load=pre_load),
270 275 content)
271 276
272 277 def get_nodes(self, path):
273 278 """
274 279 Returns combined ``DirNode`` and ``FileNode`` objects list representing
275 280 state of commit at the given ``path``. If node at the given ``path``
276 281 is not instance of ``DirNode``, CommitError would be raised.
277 282 """
278 283
279 284 if self._get_kind(path) != NodeKind.DIR:
280 285 raise CommitError(
281 286 "Directory does not exist for idx %s at '%s'" % (self.raw_id, path))
282 287 path = self._fix_path(path)
283 288
284 289 filenodes = [
285 290 FileNode(f, commit=self) for f in self._file_paths
286 291 if os.path.dirname(f) == path]
287 292 # TODO: johbo: Check if this can be done in a more obvious way
288 293 dirs = path == '' and '' or [
289 294 d for d in self._dir_paths
290 295 if d and vcspath.dirname(d) == path]
291 296 dirnodes = [
292 297 DirNode(d, commit=self) for d in dirs
293 298 if os.path.dirname(d) == path]
294 299
295 300 alias = self.repository.alias
296 301 for k, vals in self._submodules.iteritems():
297 302 if vcspath.dirname(k) == path:
298 303 loc = vals[0]
299 304 commit = vals[1]
300 305 dirnodes.append(SubModuleNode(k, url=loc, commit=commit, alias=alias))
301 306
302 307 nodes = dirnodes + filenodes
303 308 for node in nodes:
304 309 if node.path not in self.nodes:
305 310 self.nodes[node.path] = node
306 311 nodes.sort()
307 312
308 313 return nodes
309 314
310 315 def get_node(self, path, pre_load=None):
311 316 """
312 317 Returns `Node` object from the given `path`. If there is no node at
313 318 the given `path`, `NodeDoesNotExistError` would be raised.
314 319 """
315 320 path = self._fix_path(path)
316 321
317 322 if path not in self.nodes:
318 323 if path in self._file_paths:
319 324 node = FileNode(path, commit=self, pre_load=pre_load)
320 325 elif path in self._dir_paths:
321 326 if path == '':
322 327 node = RootNode(commit=self)
323 328 else:
324 329 node = DirNode(path, commit=self)
325 330 else:
326 331 raise self.no_node_at_path(path)
327 332
328 333 # cache node
329 334 self.nodes[path] = node
330 335 return self.nodes[path]
331 336
332 337 def get_largefile_node(self, path):
333 338 pointer_spec = self._remote.is_large_file(path)
334 339 if pointer_spec:
335 340 # content of that file regular FileNode is the hash of largefile
336 341 file_id = self.get_file_content(path).strip()
337 342
338 343 if self._remote.in_largefiles_store(file_id):
339 344 lf_path = self._remote.store_path(file_id)
340 345 return LargeFileNode(lf_path, commit=self, org_path=path)
341 346 elif self._remote.in_user_cache(file_id):
342 347 lf_path = self._remote.store_path(file_id)
343 348 self._remote.link(file_id, path)
344 349 return LargeFileNode(lf_path, commit=self, org_path=path)
345 350
346 351 @LazyProperty
347 352 def _submodules(self):
348 353 """
349 354 Returns a dictionary with submodule information from substate file
350 355 of hg repository.
351 356 """
352 357 return self._remote.ctx_substate(self.raw_id)
353 358
354 359 @LazyProperty
355 360 def affected_files(self):
356 361 """
357 362 Gets a fast accessible file changes for given commit
358 363 """
359 364 return self._remote.ctx_files(self.raw_id)
360 365
361 366 @property
362 367 def added(self):
363 368 """
364 369 Returns list of added ``FileNode`` objects.
365 370 """
366 371 return AddedFileNodesGenerator([n for n in self.status[1]], self)
367 372
368 373 @property
369 374 def changed(self):
370 375 """
371 376 Returns list of modified ``FileNode`` objects.
372 377 """
373 378 return ChangedFileNodesGenerator([n for n in self.status[0]], self)
374 379
375 380 @property
376 381 def removed(self):
377 382 """
378 383 Returns list of removed ``FileNode`` objects.
379 384 """
380 385 return RemovedFileNodesGenerator([n for n in self.status[2]], self)
@@ -1,236 +1,241 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 SVN commit module
23 23 """
24 24
25 25
26 26 import dateutil.parser
27 27 from zope.cachedescriptors.property import Lazy as LazyProperty
28 28
29 29 from rhodecode.lib.utils import safe_str, safe_unicode
30 30 from rhodecode.lib.vcs import nodes, path as vcspath
31 31 from rhodecode.lib.vcs.backends import base
32 32 from rhodecode.lib.vcs.exceptions import CommitError, NodeDoesNotExistError
33 33
34 34
35 35 _SVN_PROP_TRUE = '*'
36 36
37 37
38 38 class SubversionCommit(base.BaseCommit):
39 39 """
40 40 Subversion specific implementation of commits
41 41
42 42 .. attribute:: branch
43 43
44 44 The Subversion backend does not support to assign branches to
45 45 specific commits. This attribute has always the value `None`.
46 46
47 47 """
48 48
49 49 def __init__(self, repository, commit_id):
50 50 self.repository = repository
51 51 self.idx = self.repository._get_commit_idx(commit_id)
52 52 self._svn_rev = self.idx + 1
53 53 self._remote = repository._remote
54 54 # TODO: handling of raw_id should be a method on repository itself,
55 55 # which knows how to translate commit index and commit id
56 56 self.raw_id = commit_id
57 57 self.short_id = commit_id
58 58 self.id = 'r%s' % (commit_id, )
59 59
60 60 # TODO: Implement the following placeholder attributes
61 61 self.nodes = {}
62 62 self.tags = []
63 63
64 64 @property
65 65 def author(self):
66 66 return safe_unicode(self._properties.get('svn:author'))
67 67
68 68 @property
69 69 def date(self):
70 70 return _date_from_svn_properties(self._properties)
71 71
72 72 @property
73 73 def message(self):
74 74 return safe_unicode(self._properties.get('svn:log'))
75 75
76 76 @LazyProperty
77 77 def _properties(self):
78 78 return self._remote.revision_properties(self._svn_rev)
79 79
80 80 @LazyProperty
81 81 def parents(self):
82 82 parent_idx = self.idx - 1
83 83 if parent_idx >= 0:
84 84 parent = self.repository.get_commit(commit_idx=parent_idx)
85 85 return [parent]
86 86 return []
87 87
88 88 @LazyProperty
89 89 def children(self):
90 90 child_idx = self.idx + 1
91 91 if child_idx < len(self.repository.commit_ids):
92 92 child = self.repository.get_commit(commit_idx=child_idx)
93 93 return [child]
94 94 return []
95 95
96 96 def get_file_mode(self, path):
97 97 # Note: Subversion flags files which are executable with a special
98 98 # property `svn:executable` which is set to the value ``"*"``.
99 99 if self._get_file_property(path, 'svn:executable') == _SVN_PROP_TRUE:
100 100 return base.FILEMODE_EXECUTABLE
101 101 else:
102 102 return base.FILEMODE_DEFAULT
103 103
104 104 def is_link(self, path):
105 105 # Note: Subversion has a flag for special files, the content of the
106 106 # file contains the type of that file.
107 107 if self._get_file_property(path, 'svn:special') == _SVN_PROP_TRUE:
108 108 return self.get_file_content(path).startswith('link')
109 109 return False
110 110
111 111 def _get_file_property(self, path, name):
112 112 file_properties = self._remote.node_properties(
113 113 safe_str(path), self._svn_rev)
114 114 return file_properties.get(name)
115 115
116 116 def get_file_content(self, path):
117 117 path = self._fix_path(path)
118 118 return self._remote.get_file_content(safe_str(path), self._svn_rev)
119 119
120 def get_file_content_streamed(self, path):
121 path = self._fix_path(path)
122 stream_method = getattr(self._remote, 'stream:get_file_content')
123 return stream_method(safe_str(path), self._svn_rev)
124
120 125 def get_file_size(self, path):
121 126 path = self._fix_path(path)
122 127 return self._remote.get_file_size(safe_str(path), self._svn_rev)
123 128
124 129 def get_path_history(self, path, limit=None, pre_load=None):
125 130 path = safe_str(self._fix_path(path))
126 131 history = self._remote.node_history(path, self._svn_rev, limit)
127 132 return [
128 133 self.repository.get_commit(commit_id=str(svn_rev))
129 134 for svn_rev in history]
130 135
131 136 def get_file_annotate(self, path, pre_load=None):
132 137 result = self._remote.file_annotate(safe_str(path), self._svn_rev)
133 138
134 139 for zero_based_line_no, svn_rev, content in result:
135 140 commit_id = str(svn_rev)
136 141 line_no = zero_based_line_no + 1
137 142 yield (
138 143 line_no,
139 144 commit_id,
140 145 lambda: self.repository.get_commit(commit_id=commit_id),
141 146 content)
142 147
143 148 def get_node(self, path, pre_load=None):
144 149 path = self._fix_path(path)
145 150 if path not in self.nodes:
146 151
147 152 if path == '':
148 153 node = nodes.RootNode(commit=self)
149 154 else:
150 155 node_type = self._remote.get_node_type(
151 156 safe_str(path), self._svn_rev)
152 157 if node_type == 'dir':
153 158 node = nodes.DirNode(path, commit=self)
154 159 elif node_type == 'file':
155 160 node = nodes.FileNode(path, commit=self, pre_load=pre_load)
156 161 else:
157 162 raise self.no_node_at_path(path)
158 163
159 164 self.nodes[path] = node
160 165 return self.nodes[path]
161 166
162 167 def get_nodes(self, path):
163 168 if self._get_kind(path) != nodes.NodeKind.DIR:
164 169 raise CommitError(
165 170 "Directory does not exist for commit %s at "
166 171 " '%s'" % (self.raw_id, path))
167 172 path = self._fix_path(path)
168 173
169 174 path_nodes = []
170 175 for name, kind in self._remote.get_nodes(
171 176 safe_str(path), revision=self._svn_rev):
172 177 node_path = vcspath.join(path, name)
173 178 if kind == 'dir':
174 179 node = nodes.DirNode(node_path, commit=self)
175 180 elif kind == 'file':
176 181 node = nodes.FileNode(node_path, commit=self)
177 182 else:
178 183 raise ValueError("Node kind %s not supported." % (kind, ))
179 184 self.nodes[node_path] = node
180 185 path_nodes.append(node)
181 186
182 187 return path_nodes
183 188
184 189 def _get_kind(self, path):
185 190 path = self._fix_path(path)
186 191 kind = self._remote.get_node_type(path, self._svn_rev)
187 192 if kind == 'file':
188 193 return nodes.NodeKind.FILE
189 194 elif kind == 'dir':
190 195 return nodes.NodeKind.DIR
191 196 else:
192 197 raise CommitError(
193 198 "Node does not exist at the given path '%s'" % (path, ))
194 199
195 200 @LazyProperty
196 201 def _changes_cache(self):
197 202 return self._remote.revision_changes(self._svn_rev)
198 203
199 204 @LazyProperty
200 205 def affected_files(self):
201 206 changed_files = set()
202 207 for files in self._changes_cache.itervalues():
203 208 changed_files.update(files)
204 209 return list(changed_files)
205 210
206 211 @LazyProperty
207 212 def id(self):
208 213 return self.raw_id
209 214
210 215 @property
211 216 def added(self):
212 217 return nodes.AddedFileNodesGenerator(
213 218 self._changes_cache['added'], self)
214 219
215 220 @property
216 221 def changed(self):
217 222 return nodes.ChangedFileNodesGenerator(
218 223 self._changes_cache['changed'], self)
219 224
220 225 @property
221 226 def removed(self):
222 227 return nodes.RemovedFileNodesGenerator(
223 228 self._changes_cache['removed'], self)
224 229
225 230
226 231 def _date_from_svn_properties(properties):
227 232 """
228 233 Parses the date out of given svn properties.
229 234
230 235 :return: :class:`datetime.datetime` instance. The object is naive.
231 236 """
232 237
233 238 aware_date = dateutil.parser.parse(properties.get('svn:date'))
234 239 # final_date = aware_date.astimezone(dateutil.tz.tzlocal())
235 240 final_date = aware_date
236 241 return final_date.replace(tzinfo=None)
@@ -1,318 +1,346 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2016-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Client for the VCSServer implemented based on HTTP.
23 23 """
24 24
25 25 import copy
26 26 import logging
27 27 import threading
28 28 import time
29 29 import urllib2
30 30 import urlparse
31 31 import uuid
32 32 import traceback
33 33
34 34 import pycurl
35 35 import msgpack
36 36 import requests
37 37 from requests.packages.urllib3.util.retry import Retry
38 38
39 39 import rhodecode
40 40 from rhodecode.lib.system_info import get_cert_path
41 41 from rhodecode.lib.vcs import exceptions, CurlSession
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 # TODO: mikhail: Keep it in sync with vcsserver's
47 47 # HTTPApplication.ALLOWED_EXCEPTIONS
48 48 EXCEPTIONS_MAP = {
49 49 'KeyError': KeyError,
50 50 'URLError': urllib2.URLError,
51 51 }
52 52
53 53
54 class RepoMaker(object):
55
56 def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory):
57 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
58 self._session_factory = session_factory
59 self.backend_type = backend_type
60
61 def __call__(self, path, repo_id, config, with_wire=None):
62 log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path)
63 return RemoteRepo(path, repo_id, config, self.url, self._session_factory(),
64 with_wire=with_wire)
65
66 def __getattr__(self, name):
67 def f(*args, **kwargs):
68 return self._call(name, *args, **kwargs)
69 return f
70
71 @exceptions.map_vcs_exceptions
72 def _call(self, name, *args, **kwargs):
73 payload = {
74 'id': str(uuid.uuid4()),
75 'method': name,
76 'backend': self.backend_type,
77 'params': {'args': args, 'kwargs': kwargs}
78 }
79 return _remote_call(
80 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
81
82
83 class ServiceConnection(object):
84 def __init__(self, server_and_port, backend_endpoint, session_factory):
85 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
86 self._session_factory = session_factory
87
88 def __getattr__(self, name):
89 def f(*args, **kwargs):
90 return self._call(name, *args, **kwargs)
91
92 return f
93
94 @exceptions.map_vcs_exceptions
95 def _call(self, name, *args, **kwargs):
96 payload = {
97 'id': str(uuid.uuid4()),
98 'method': name,
99 'params': {'args': args, 'kwargs': kwargs}
100 }
101 return _remote_call(
102 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
103
104
105 class RemoteRepo(object):
106
107 def __init__(self, path, repo_id, config, url, session, with_wire=None):
108 self.url = url
109 self._session = session
110 with_wire = with_wire or {}
111
112 repo_state_uid = with_wire.get('repo_state_uid') or 'state'
113 self._wire = {
114 "path": path, # repo path
115 "repo_id": repo_id,
116 "config": config,
117 "repo_state_uid": repo_state_uid,
118 "context": self._create_vcs_cache_context(path, repo_state_uid)
119 }
120
121 if with_wire:
122 self._wire.update(with_wire)
123
124 # NOTE(johbo): Trading complexity for performance. Avoiding the call to
125 # log.debug brings a few percent gain even if is is not active.
126 if log.isEnabledFor(logging.DEBUG):
127 self._call_with_logging = True
128
129 self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__'))
130
131 def __getattr__(self, name):
132 def f(*args, **kwargs):
133 return self._call(name, *args, **kwargs)
134 return f
135
136 @exceptions.map_vcs_exceptions
137 def _call(self, name, *args, **kwargs):
138 # TODO: oliver: This is currently necessary pre-call since the
139 # config object is being changed for hooking scenarios
140 wire = copy.deepcopy(self._wire)
141 wire["config"] = wire["config"].serialize()
142 wire["config"].append(('vcs', 'ssl_dir', self.cert_dir))
143
144 payload = {
145 'id': str(uuid.uuid4()),
146 'method': name,
147 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
148 }
149
150 if self._call_with_logging:
151 start = time.time()
152 context_uid = wire.get('context')
153 log.debug('Calling %s@%s with args:%.10240r. wire_context: %s',
154 self.url, name, args, context_uid)
155 result = _remote_call(self.url, payload, EXCEPTIONS_MAP, self._session)
156 if self._call_with_logging:
157 log.debug('Call %s@%s took: %.4fs. wire_context: %s',
158 self.url, name, time.time()-start, context_uid)
159 return result
160
161 def __getitem__(self, key):
162 return self.revision(key)
163
164 def _create_vcs_cache_context(self, *args):
165 """
166 Creates a unique string which is passed to the VCSServer on every
167 remote call. It is used as cache key in the VCSServer.
168 """
169 hash_key = '-'.join(map(str, args))
170 return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key))
171
172 def invalidate_vcs_cache(self):
173 """
174 This invalidates the context which is sent to the VCSServer on every
175 call to a remote method. It forces the VCSServer to create a fresh
176 repository instance on the next call to a remote method.
177 """
178 self._wire['context'] = str(uuid.uuid4())
179
180
181 class RemoteObject(object):
182
183 def __init__(self, url, session):
184 self._url = url
185 self._session = session
186
187 # johbo: Trading complexity for performance. Avoiding the call to
188 # log.debug brings a few percent gain even if is is not active.
189 if log.isEnabledFor(logging.DEBUG):
190 self._call = self._call_with_logging
191
192 def __getattr__(self, name):
193 def f(*args, **kwargs):
194 return self._call(name, *args, **kwargs)
195 return f
196
197 @exceptions.map_vcs_exceptions
198 def _call(self, name, *args, **kwargs):
199 payload = {
200 'id': str(uuid.uuid4()),
201 'method': name,
202 'params': {'args': args, 'kwargs': kwargs}
203 }
204 return _remote_call(self._url, payload, EXCEPTIONS_MAP, self._session)
205
206 def _call_with_logging(self, name, *args, **kwargs):
207 log.debug('Calling %s@%s', self._url, name)
208 return RemoteObject._call(self, name, *args, **kwargs)
209
210
211 54 def _remote_call(url, payload, exceptions_map, session):
212 55 try:
213 56 response = session.post(url, data=msgpack.packb(payload))
214 57 except pycurl.error as e:
215 58 msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc())
216 59 raise exceptions.HttpVCSCommunicationError(msg)
217 60 except Exception as e:
218 61 message = getattr(e, 'message', '')
219 62 if 'Failed to connect' in message:
220 63 # gevent doesn't return proper pycurl errors
221 64 raise exceptions.HttpVCSCommunicationError(e)
222 65 else:
223 66 raise
224 67
225 68 if response.status_code >= 400:
226 69 log.error('Call to %s returned non 200 HTTP code: %s',
227 70 url, response.status_code)
228 71 raise exceptions.HttpVCSCommunicationError(repr(response.content))
229 72
230 73 try:
231 74 response = msgpack.unpackb(response.content)
232 75 except Exception:
233 76 log.exception('Failed to decode response %r', response.content)
234 77 raise
235 78
236 79 error = response.get('error')
237 80 if error:
238 81 type_ = error.get('type', 'Exception')
239 82 exc = exceptions_map.get(type_, Exception)
240 83 exc = exc(error.get('message'))
241 84 try:
242 85 exc._vcs_kind = error['_vcs_kind']
243 86 except KeyError:
244 87 pass
245 88
246 89 try:
247 90 exc._vcs_server_traceback = error['traceback']
248 91 exc._vcs_server_org_exc_name = error['org_exc']
249 92 exc._vcs_server_org_exc_tb = error['org_exc_tb']
250 93 except KeyError:
251 94 pass
252 95
253 96 raise exc
254 97 return response.get('result')
255 98
256 99
100 def _streaming_remote_call(url, payload, exceptions_map, session, chunk_size):
101 try:
102 response = session.post(url, data=msgpack.packb(payload))
103 except pycurl.error as e:
104 msg = '{}. \npycurl traceback: {}'.format(e, traceback.format_exc())
105 raise exceptions.HttpVCSCommunicationError(msg)
106 except Exception as e:
107 message = getattr(e, 'message', '')
108 if 'Failed to connect' in message:
109 # gevent doesn't return proper pycurl errors
110 raise exceptions.HttpVCSCommunicationError(e)
111 else:
112 raise
113
114 if response.status_code >= 400:
115 log.error('Call to %s returned non 200 HTTP code: %s',
116 url, response.status_code)
117 raise exceptions.HttpVCSCommunicationError(repr(response.content))
118
119 return response.iter_content(chunk_size=chunk_size)
120
121
122 class ServiceConnection(object):
123 def __init__(self, server_and_port, backend_endpoint, session_factory):
124 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
125 self._session_factory = session_factory
126
127 def __getattr__(self, name):
128 def f(*args, **kwargs):
129 return self._call(name, *args, **kwargs)
130
131 return f
132
133 @exceptions.map_vcs_exceptions
134 def _call(self, name, *args, **kwargs):
135 payload = {
136 'id': str(uuid.uuid4()),
137 'method': name,
138 'params': {'args': args, 'kwargs': kwargs}
139 }
140 return _remote_call(
141 self.url, payload, EXCEPTIONS_MAP, self._session_factory())
142
143
144 class RemoteVCSMaker(object):
145
146 def __init__(self, server_and_port, backend_endpoint, backend_type, session_factory):
147 self.url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
148 self.stream_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint+'/stream')
149
150 self._session_factory = session_factory
151 self.backend_type = backend_type
152
153 def __call__(self, path, repo_id, config, with_wire=None):
154 log.debug('%s RepoMaker call on %s', self.backend_type.upper(), path)
155 return RemoteRepo(path, repo_id, config, self, with_wire=with_wire)
156
157 def __getattr__(self, name):
158 def remote_attr(*args, **kwargs):
159 return self._call(name, *args, **kwargs)
160 return remote_attr
161
162 @exceptions.map_vcs_exceptions
163 def _call(self, func_name, *args, **kwargs):
164 payload = {
165 'id': str(uuid.uuid4()),
166 'method': func_name,
167 'backend': self.backend_type,
168 'params': {'args': args, 'kwargs': kwargs}
169 }
170 url = self.url
171 return _remote_call(url, payload, EXCEPTIONS_MAP, self._session_factory())
172
173
174 class RemoteRepo(object):
175 CHUNK_SIZE = 16384
176
177 def __init__(self, path, repo_id, config, remote_maker, with_wire=None):
178 self.url = remote_maker.url
179 self.stream_url = remote_maker.stream_url
180 self._session = remote_maker._session_factory()
181
182 with_wire = with_wire or {}
183
184 repo_state_uid = with_wire.get('repo_state_uid') or 'state'
185 self._wire = {
186 "path": path, # repo path
187 "repo_id": repo_id,
188 "config": config,
189 "repo_state_uid": repo_state_uid,
190 "context": self._create_vcs_cache_context(path, repo_state_uid)
191 }
192
193 if with_wire:
194 self._wire.update(with_wire)
195
196 # NOTE(johbo): Trading complexity for performance. Avoiding the call to
197 # log.debug brings a few percent gain even if is is not active.
198 if log.isEnabledFor(logging.DEBUG):
199 self._call_with_logging = True
200
201 self.cert_dir = get_cert_path(rhodecode.CONFIG.get('__file__'))
202
203 def __getattr__(self, name):
204
205 if name.startswith('stream:'):
206 def repo_remote_attr(*args, **kwargs):
207 return self._call_stream(name, *args, **kwargs)
208 else:
209 def repo_remote_attr(*args, **kwargs):
210 return self._call(name, *args, **kwargs)
211
212 return repo_remote_attr
213
214 def _base_call(self, name, *args, **kwargs):
215 # TODO: oliver: This is currently necessary pre-call since the
216 # config object is being changed for hooking scenarios
217 wire = copy.deepcopy(self._wire)
218 wire["config"] = wire["config"].serialize()
219 wire["config"].append(('vcs', 'ssl_dir', self.cert_dir))
220
221 payload = {
222 'id': str(uuid.uuid4()),
223 'method': name,
224 'params': {'wire': wire, 'args': args, 'kwargs': kwargs}
225 }
226
227 context_uid = wire.get('context')
228 return context_uid, payload
229
230 @exceptions.map_vcs_exceptions
231 def _call(self, name, *args, **kwargs):
232 context_uid, payload = self._base_call(name, *args, **kwargs)
233 url = self.url
234
235 start = time.time()
236 if self._call_with_logging:
237 log.debug('Calling %s@%s with args:%.10240r. wire_context: %s',
238 url, name, args, context_uid)
239
240 result = _remote_call(url, payload, EXCEPTIONS_MAP, self._session)
241 if self._call_with_logging:
242 log.debug('Call %s@%s took: %.4fs. wire_context: %s',
243 url, name, time.time()-start, context_uid)
244 return result
245
246 @exceptions.map_vcs_exceptions
247 def _call_stream(self, name, *args, **kwargs):
248 context_uid, payload = self._base_call(name, *args, **kwargs)
249 payload['chunk_size'] = self.CHUNK_SIZE
250 url = self.stream_url
251
252 start = time.time()
253 if self._call_with_logging:
254 log.debug('Calling %s@%s with args:%.10240r. wire_context: %s',
255 url, name, args, context_uid)
256
257 result = _streaming_remote_call(url, payload, EXCEPTIONS_MAP, self._session,
258 self.CHUNK_SIZE)
259
260 if self._call_with_logging:
261 log.debug('Call %s@%s took: %.4fs. wire_context: %s',
262 url, name, time.time()-start, context_uid)
263 return result
264
265 def __getitem__(self, key):
266 return self.revision(key)
267
268 def _create_vcs_cache_context(self, *args):
269 """
270 Creates a unique string which is passed to the VCSServer on every
271 remote call. It is used as cache key in the VCSServer.
272 """
273 hash_key = '-'.join(map(str, args))
274 return str(uuid.uuid5(uuid.NAMESPACE_URL, hash_key))
275
276 def invalidate_vcs_cache(self):
277 """
278 This invalidates the context which is sent to the VCSServer on every
279 call to a remote method. It forces the VCSServer to create a fresh
280 repository instance on the next call to a remote method.
281 """
282 self._wire['context'] = str(uuid.uuid4())
283
284
257 285 class VcsHttpProxy(object):
258 286
259 287 CHUNK_SIZE = 16384
260 288
261 289 def __init__(self, server_and_port, backend_endpoint):
262 290 retries = Retry(total=5, connect=None, read=None, redirect=None)
263 291
264 292 adapter = requests.adapters.HTTPAdapter(max_retries=retries)
265 293 self.base_url = urlparse.urljoin('http://%s' % server_and_port, backend_endpoint)
266 294 self.session = requests.Session()
267 295 self.session.mount('http://', adapter)
268 296
269 297 def handle(self, environment, input_data, *args, **kwargs):
270 298 data = {
271 299 'environment': environment,
272 300 'input_data': input_data,
273 301 'args': args,
274 302 'kwargs': kwargs
275 303 }
276 304 result = self.session.post(
277 305 self.base_url, msgpack.packb(data), stream=True)
278 306 return self._get_result(result)
279 307
280 308 def _deserialize_and_raise(self, error):
281 309 exception = Exception(error['message'])
282 310 try:
283 311 exception._vcs_kind = error['_vcs_kind']
284 312 except KeyError:
285 313 pass
286 314 raise exception
287 315
288 316 def _iterate(self, result):
289 317 unpacker = msgpack.Unpacker()
290 318 for line in result.iter_content(chunk_size=self.CHUNK_SIZE):
291 319 unpacker.feed(line)
292 320 for chunk in unpacker:
293 321 yield chunk
294 322
295 323 def _get_result(self, result):
296 324 iterator = self._iterate(result)
297 325 error = iterator.next()
298 326 if error:
299 327 self._deserialize_and_raise(error)
300 328
301 329 status = iterator.next()
302 330 headers = iterator.next()
303 331
304 332 return iterator, status, headers
305 333
306 334
307 335 class ThreadlocalSessionFactory(object):
308 336 """
309 337 Creates one CurlSession per thread on demand.
310 338 """
311 339
312 340 def __init__(self):
313 341 self._thread_local = threading.local()
314 342
315 343 def __call__(self):
316 344 if not hasattr(self._thread_local, 'curl_session'):
317 345 self._thread_local.curl_session = CurlSession()
318 346 return self._thread_local.curl_session
@@ -1,850 +1,870 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2014-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 """
22 22 Module holding everything related to vcs nodes, with vcs2 architecture.
23 23 """
24 24
25 25 import os
26 26 import stat
27 27
28 28 from zope.cachedescriptors.property import Lazy as LazyProperty
29 29
30 import rhodecode
30 31 from rhodecode.config.conf import LANGUAGES_EXTENSIONS_MAP
31 32 from rhodecode.lib.utils import safe_unicode, safe_str
32 33 from rhodecode.lib.utils2 import md5
33 34 from rhodecode.lib.vcs import path as vcspath
34 35 from rhodecode.lib.vcs.backends.base import EmptyCommit, FILEMODE_DEFAULT
35 36 from rhodecode.lib.vcs.conf.mtypes import get_mimetypes_db
36 37 from rhodecode.lib.vcs.exceptions import NodeError, RemovedFileNodeError
37 38
38 39 LARGEFILE_PREFIX = '.hglf'
39 40
40 41
41 42 class NodeKind:
42 43 SUBMODULE = -1
43 44 DIR = 1
44 45 FILE = 2
45 46 LARGEFILE = 3
46 47
47 48
48 49 class NodeState:
49 50 ADDED = u'added'
50 51 CHANGED = u'changed'
51 52 NOT_CHANGED = u'not changed'
52 53 REMOVED = u'removed'
53 54
54 55
55 56 class NodeGeneratorBase(object):
56 57 """
57 58 Base class for removed added and changed filenodes, it's a lazy generator
58 59 class that will create filenodes only on iteration or call
59 60
60 61 The len method doesn't need to create filenodes at all
61 62 """
62 63
63 64 def __init__(self, current_paths, cs):
64 65 self.cs = cs
65 66 self.current_paths = current_paths
66 67
67 68 def __call__(self):
68 69 return [n for n in self]
69 70
70 71 def __getslice__(self, i, j):
71 72 for p in self.current_paths[i:j]:
72 73 yield self.cs.get_node(p)
73 74
74 75 def __len__(self):
75 76 return len(self.current_paths)
76 77
77 78 def __iter__(self):
78 79 for p in self.current_paths:
79 80 yield self.cs.get_node(p)
80 81
81 82
82 83 class AddedFileNodesGenerator(NodeGeneratorBase):
83 84 """
84 85 Class holding added files for current commit
85 86 """
86 87
87 88
88 89 class ChangedFileNodesGenerator(NodeGeneratorBase):
89 90 """
90 91 Class holding changed files for current commit
91 92 """
92 93
93 94
94 95 class RemovedFileNodesGenerator(NodeGeneratorBase):
95 96 """
96 97 Class holding removed files for current commit
97 98 """
98 99 def __iter__(self):
99 100 for p in self.current_paths:
100 101 yield RemovedFileNode(path=p)
101 102
102 103 def __getslice__(self, i, j):
103 104 for p in self.current_paths[i:j]:
104 105 yield RemovedFileNode(path=p)
105 106
106 107
107 108 class Node(object):
108 109 """
109 110 Simplest class representing file or directory on repository. SCM backends
110 111 should use ``FileNode`` and ``DirNode`` subclasses rather than ``Node``
111 112 directly.
112 113
113 114 Node's ``path`` cannot start with slash as we operate on *relative* paths
114 115 only. Moreover, every single node is identified by the ``path`` attribute,
115 116 so it cannot end with slash, too. Otherwise, path could lead to mistakes.
116 117 """
117 118 RTLO_MARKER = u"\u202E" # RTLO marker allows swapping text, and certain
118 119 # security attacks could be used with this
119 120 commit = None
120 121
121 122 def __init__(self, path, kind):
122 123 self._validate_path(path) # can throw exception if path is invalid
123 124 self.path = safe_str(path.rstrip('/')) # we store paths as str
124 125 if path == '' and kind != NodeKind.DIR:
125 126 raise NodeError("Only DirNode and its subclasses may be "
126 127 "initialized with empty path")
127 128 self.kind = kind
128 129
129 130 if self.is_root() and not self.is_dir():
130 131 raise NodeError("Root node cannot be FILE kind")
131 132
132 133 def _validate_path(self, path):
133 134 if path.startswith('/'):
134 135 raise NodeError(
135 136 "Cannot initialize Node objects with slash at "
136 137 "the beginning as only relative paths are supported. "
137 138 "Got %s" % (path,))
138 139
139 140 @LazyProperty
140 141 def parent(self):
141 142 parent_path = self.get_parent_path()
142 143 if parent_path:
143 144 if self.commit:
144 145 return self.commit.get_node(parent_path)
145 146 return DirNode(parent_path)
146 147 return None
147 148
148 149 @LazyProperty
149 150 def unicode_path(self):
150 151 return safe_unicode(self.path)
151 152
152 153 @LazyProperty
153 154 def has_rtlo(self):
154 155 """Detects if a path has right-to-left-override marker"""
155 156 return self.RTLO_MARKER in self.unicode_path
156 157
157 158 @LazyProperty
158 159 def unicode_path_safe(self):
159 160 """
160 161 Special SAFE representation of path without the right-to-left-override.
161 162 This should be only used for "showing" the file, cannot be used for any
162 163 urls etc.
163 164 """
164 165 return safe_unicode(self.path).replace(self.RTLO_MARKER, '')
165 166
166 167 @LazyProperty
167 168 def dir_path(self):
168 169 """
169 170 Returns name of the directory from full path of this vcs node. Empty
170 171 string is returned if there's no directory in the path
171 172 """
172 173 _parts = self.path.rstrip('/').rsplit('/', 1)
173 174 if len(_parts) == 2:
174 175 return safe_unicode(_parts[0])
175 176 return u''
176 177
177 178 @LazyProperty
178 179 def name(self):
179 180 """
180 181 Returns name of the node so if its path
181 182 then only last part is returned.
182 183 """
183 184 return safe_unicode(self.path.rstrip('/').split('/')[-1])
184 185
185 186 @property
186 187 def kind(self):
187 188 return self._kind
188 189
189 190 @kind.setter
190 191 def kind(self, kind):
191 192 if hasattr(self, '_kind'):
192 193 raise NodeError("Cannot change node's kind")
193 194 else:
194 195 self._kind = kind
195 196 # Post setter check (path's trailing slash)
196 197 if self.path.endswith('/'):
197 198 raise NodeError("Node's path cannot end with slash")
198 199
199 200 def __cmp__(self, other):
200 201 """
201 202 Comparator using name of the node, needed for quick list sorting.
202 203 """
203 204
204 205 kind_cmp = cmp(self.kind, other.kind)
205 206 if kind_cmp:
206 207 if isinstance(self, SubModuleNode):
207 208 # we make submodules equal to dirnode for "sorting" purposes
208 209 return NodeKind.DIR
209 210 return kind_cmp
210 211 return cmp(self.name, other.name)
211 212
212 213 def __eq__(self, other):
213 214 for attr in ['name', 'path', 'kind']:
214 215 if getattr(self, attr) != getattr(other, attr):
215 216 return False
216 217 if self.is_file():
217 218 if self.content != other.content:
218 219 return False
219 220 else:
220 221 # For DirNode's check without entering each dir
221 222 self_nodes_paths = list(sorted(n.path for n in self.nodes))
222 223 other_nodes_paths = list(sorted(n.path for n in self.nodes))
223 224 if self_nodes_paths != other_nodes_paths:
224 225 return False
225 226 return True
226 227
227 228 def __ne__(self, other):
228 229 return not self.__eq__(other)
229 230
230 231 def __repr__(self):
231 232 return '<%s %r>' % (self.__class__.__name__, self.path)
232 233
233 234 def __str__(self):
234 235 return self.__repr__()
235 236
236 237 def __unicode__(self):
237 238 return self.name
238 239
239 240 def get_parent_path(self):
240 241 """
241 242 Returns node's parent path or empty string if node is root.
242 243 """
243 244 if self.is_root():
244 245 return ''
245 246 return vcspath.dirname(self.path.rstrip('/')) + '/'
246 247
247 248 def is_file(self):
248 249 """
249 250 Returns ``True`` if node's kind is ``NodeKind.FILE``, ``False``
250 251 otherwise.
251 252 """
252 253 return self.kind == NodeKind.FILE
253 254
254 255 def is_dir(self):
255 256 """
256 257 Returns ``True`` if node's kind is ``NodeKind.DIR``, ``False``
257 258 otherwise.
258 259 """
259 260 return self.kind == NodeKind.DIR
260 261
261 262 def is_root(self):
262 263 """
263 264 Returns ``True`` if node is a root node and ``False`` otherwise.
264 265 """
265 266 return self.kind == NodeKind.DIR and self.path == ''
266 267
267 268 def is_submodule(self):
268 269 """
269 270 Returns ``True`` if node's kind is ``NodeKind.SUBMODULE``, ``False``
270 271 otherwise.
271 272 """
272 273 return self.kind == NodeKind.SUBMODULE
273 274
274 275 def is_largefile(self):
275 276 """
276 277 Returns ``True`` if node's kind is ``NodeKind.LARGEFILE``, ``False``
277 278 otherwise
278 279 """
279 280 return self.kind == NodeKind.LARGEFILE
280 281
281 282 def is_link(self):
282 283 if self.commit:
283 284 return self.commit.is_link(self.path)
284 285 return False
285 286
286 287 @LazyProperty
287 288 def added(self):
288 289 return self.state is NodeState.ADDED
289 290
290 291 @LazyProperty
291 292 def changed(self):
292 293 return self.state is NodeState.CHANGED
293 294
294 295 @LazyProperty
295 296 def not_changed(self):
296 297 return self.state is NodeState.NOT_CHANGED
297 298
298 299 @LazyProperty
299 300 def removed(self):
300 301 return self.state is NodeState.REMOVED
301 302
302 303
303 304 class FileNode(Node):
304 305 """
305 306 Class representing file nodes.
306 307
307 308 :attribute: path: path to the node, relative to repository's root
308 309 :attribute: content: if given arbitrary sets content of the file
309 310 :attribute: commit: if given, first time content is accessed, callback
310 311 :attribute: mode: stat mode for a node. Default is `FILEMODE_DEFAULT`.
311 312 """
312 313 _filter_pre_load = []
313 314
314 315 def __init__(self, path, content=None, commit=None, mode=None, pre_load=None):
315 316 """
316 317 Only one of ``content`` and ``commit`` may be given. Passing both
317 318 would raise ``NodeError`` exception.
318 319
319 320 :param path: relative path to the node
320 321 :param content: content may be passed to constructor
321 322 :param commit: if given, will use it to lazily fetch content
322 323 :param mode: ST_MODE (i.e. 0100644)
323 324 """
324 325 if content and commit:
325 326 raise NodeError("Cannot use both content and commit")
326 327 super(FileNode, self).__init__(path, kind=NodeKind.FILE)
327 328 self.commit = commit
328 329 self._content = content
329 330 self._mode = mode or FILEMODE_DEFAULT
330 331
331 332 self._set_bulk_properties(pre_load)
332 333
333 334 def _set_bulk_properties(self, pre_load):
334 335 if not pre_load:
335 336 return
336 337 pre_load = [entry for entry in pre_load
337 338 if entry not in self._filter_pre_load]
338 339 if not pre_load:
339 340 return
340 341
341 342 for attr_name in pre_load:
342 343 result = getattr(self, attr_name)
343 344 if callable(result):
344 345 result = result()
345 346 self.__dict__[attr_name] = result
346 347
347 348 @LazyProperty
348 349 def mode(self):
349 350 """
350 351 Returns lazily mode of the FileNode. If `commit` is not set, would
351 352 use value given at initialization or `FILEMODE_DEFAULT` (default).
352 353 """
353 354 if self.commit:
354 355 mode = self.commit.get_file_mode(self.path)
355 356 else:
356 357 mode = self._mode
357 358 return mode
358 359
359 360 @LazyProperty
360 361 def raw_bytes(self):
361 362 """
362 363 Returns lazily the raw bytes of the FileNode.
363 364 """
364 365 if self.commit:
365 366 if self._content is None:
366 367 self._content = self.commit.get_file_content(self.path)
367 368 content = self._content
368 369 else:
369 370 content = self._content
370 371 return content
371 372
373 def stream_bytes(self):
374 """
375 Returns an iterator that will stream the content of the file directly from
376 vcsserver without loading it to memory.
377 """
378 if self.commit:
379 return self.commit.get_file_content_streamed(self.path)
380 raise NodeError(
381 "Cannot retrieve message of the file without related "
382 "commit attribute")
383
372 384 @LazyProperty
373 385 def md5(self):
374 386 """
375 387 Returns md5 of the file node.
376 388 """
377 389 return md5(self.raw_bytes)
378 390
379 391 def metadata_uncached(self):
380 392 """
381 393 Returns md5, binary flag of the file node, without any cache usage.
382 394 """
383 395
384 396 content = self.content_uncached()
385 397
386 398 is_binary = content and '\0' in content
387 399 size = 0
388 400 if content:
389 401 size = len(content)
390 402
391 403 return is_binary, md5(content), size, content
392 404
393 405 def content_uncached(self):
394 406 """
395 407 Returns lazily content of the FileNode. If possible, would try to
396 408 decode content from UTF-8.
397 409 """
398 410 if self.commit:
399 411 content = self.commit.get_file_content(self.path)
400 412 else:
401 413 content = self._content
402 414 return content
403 415
404 416 @LazyProperty
405 417 def content(self):
406 418 """
407 419 Returns lazily content of the FileNode. If possible, would try to
408 420 decode content from UTF-8.
409 421 """
410 422 content = self.raw_bytes
411 423
412 424 if self.is_binary:
413 425 return content
414 426 return safe_unicode(content)
415 427
416 428 @LazyProperty
417 429 def size(self):
418 430 if self.commit:
419 431 return self.commit.get_file_size(self.path)
420 432 raise NodeError(
421 433 "Cannot retrieve size of the file without related "
422 434 "commit attribute")
423 435
424 436 @LazyProperty
425 437 def message(self):
426 438 if self.commit:
427 439 return self.last_commit.message
428 440 raise NodeError(
429 441 "Cannot retrieve message of the file without related "
430 442 "commit attribute")
431 443
432 444 @LazyProperty
433 445 def last_commit(self):
434 446 if self.commit:
435 447 pre_load = ["author", "date", "message", "parents"]
436 448 return self.commit.get_path_commit(self.path, pre_load=pre_load)
437 449 raise NodeError(
438 450 "Cannot retrieve last commit of the file without "
439 451 "related commit attribute")
440 452
441 453 def get_mimetype(self):
442 454 """
443 455 Mimetype is calculated based on the file's content. If ``_mimetype``
444 456 attribute is available, it will be returned (backends which store
445 457 mimetypes or can easily recognize them, should set this private
446 458 attribute to indicate that type should *NOT* be calculated).
447 459 """
448 460
449 461 if hasattr(self, '_mimetype'):
450 462 if (isinstance(self._mimetype, (tuple, list,)) and
451 463 len(self._mimetype) == 2):
452 464 return self._mimetype
453 465 else:
454 466 raise NodeError('given _mimetype attribute must be an 2 '
455 467 'element list or tuple')
456 468
457 469 db = get_mimetypes_db()
458 470 mtype, encoding = db.guess_type(self.name)
459 471
460 472 if mtype is None:
461 473 if self.is_binary:
462 474 mtype = 'application/octet-stream'
463 475 encoding = None
464 476 else:
465 477 mtype = 'text/plain'
466 478 encoding = None
467 479
468 480 # try with pygments
469 481 try:
470 482 from pygments.lexers import get_lexer_for_filename
471 483 mt = get_lexer_for_filename(self.name).mimetypes
472 484 except Exception:
473 485 mt = None
474 486
475 487 if mt:
476 488 mtype = mt[0]
477 489
478 490 return mtype, encoding
479 491
480 492 @LazyProperty
481 493 def mimetype(self):
482 494 """
483 495 Wrapper around full mimetype info. It returns only type of fetched
484 496 mimetype without the encoding part. use get_mimetype function to fetch
485 497 full set of (type,encoding)
486 498 """
487 499 return self.get_mimetype()[0]
488 500
489 501 @LazyProperty
490 502 def mimetype_main(self):
491 503 return self.mimetype.split('/')[0]
492 504
493 505 @classmethod
494 506 def get_lexer(cls, filename, content=None):
495 507 from pygments import lexers
496 508
497 509 extension = filename.split('.')[-1]
498 510 lexer = None
499 511
500 512 try:
501 513 lexer = lexers.guess_lexer_for_filename(
502 514 filename, content, stripnl=False)
503 515 except lexers.ClassNotFound:
504 516 lexer = None
505 517
506 518 # try our EXTENSION_MAP
507 519 if not lexer:
508 520 try:
509 521 lexer_class = LANGUAGES_EXTENSIONS_MAP.get(extension)
510 522 if lexer_class:
511 523 lexer = lexers.get_lexer_by_name(lexer_class[0])
512 524 except lexers.ClassNotFound:
513 525 lexer = None
514 526
515 527 if not lexer:
516 528 lexer = lexers.TextLexer(stripnl=False)
517 529
518 530 return lexer
519 531
520 532 @LazyProperty
521 533 def lexer(self):
522 534 """
523 535 Returns pygment's lexer class. Would try to guess lexer taking file's
524 536 content, name and mimetype.
525 537 """
526 538 return self.get_lexer(self.name, self.content)
527 539
528 540 @LazyProperty
529 541 def lexer_alias(self):
530 542 """
531 543 Returns first alias of the lexer guessed for this file.
532 544 """
533 545 return self.lexer.aliases[0]
534 546
535 547 @LazyProperty
536 548 def history(self):
537 549 """
538 550 Returns a list of commit for this file in which the file was changed
539 551 """
540 552 if self.commit is None:
541 553 raise NodeError('Unable to get commit for this FileNode')
542 554 return self.commit.get_path_history(self.path)
543 555
544 556 @LazyProperty
545 557 def annotate(self):
546 558 """
547 559 Returns a list of three element tuples with lineno, commit and line
548 560 """
549 561 if self.commit is None:
550 562 raise NodeError('Unable to get commit for this FileNode')
551 563 pre_load = ["author", "date", "message", "parents"]
552 564 return self.commit.get_file_annotate(self.path, pre_load=pre_load)
553 565
554 566 @LazyProperty
555 567 def state(self):
556 568 if not self.commit:
557 569 raise NodeError(
558 570 "Cannot check state of the node if it's not "
559 571 "linked with commit")
560 572 elif self.path in (node.path for node in self.commit.added):
561 573 return NodeState.ADDED
562 574 elif self.path in (node.path for node in self.commit.changed):
563 575 return NodeState.CHANGED
564 576 else:
565 577 return NodeState.NOT_CHANGED
566 578
567 579 @LazyProperty
568 580 def is_binary(self):
569 581 """
570 582 Returns True if file has binary content.
571 583 """
572 584 _bin = self.raw_bytes and '\0' in self.raw_bytes
573 585 return _bin
574 586
575 587 @LazyProperty
576 588 def extension(self):
577 589 """Returns filenode extension"""
578 590 return self.name.split('.')[-1]
579 591
580 592 @property
581 593 def is_executable(self):
582 594 """
583 595 Returns ``True`` if file has executable flag turned on.
584 596 """
585 597 return bool(self.mode & stat.S_IXUSR)
586 598
587 599 def get_largefile_node(self):
588 600 """
589 601 Try to return a Mercurial FileNode from this node. It does internal
590 602 checks inside largefile store, if that file exist there it will
591 603 create special instance of LargeFileNode which can get content from
592 604 LF store.
593 605 """
594 606 if self.commit:
595 607 return self.commit.get_largefile_node(self.path)
596 608
597 609 def lines(self, count_empty=False):
598 610 all_lines, empty_lines = 0, 0
599 611
600 612 if not self.is_binary:
601 613 content = self.content
602 614 if count_empty:
603 615 all_lines = 0
604 616 empty_lines = 0
605 617 for line in content.splitlines(True):
606 618 if line == '\n':
607 619 empty_lines += 1
608 620 all_lines += 1
609 621
610 622 return all_lines, all_lines - empty_lines
611 623 else:
612 624 # fast method
613 625 empty_lines = all_lines = content.count('\n')
614 626 if all_lines == 0 and content:
615 627 # one-line without a newline
616 628 empty_lines = all_lines = 1
617 629
618 630 return all_lines, empty_lines
619 631
620 632 def __repr__(self):
621 633 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
622 634 getattr(self.commit, 'short_id', ''))
623 635
624 636
625 637 class RemovedFileNode(FileNode):
626 638 """
627 639 Dummy FileNode class - trying to access any public attribute except path,
628 640 name, kind or state (or methods/attributes checking those two) would raise
629 641 RemovedFileNodeError.
630 642 """
631 643 ALLOWED_ATTRIBUTES = [
632 644 'name', 'path', 'state', 'is_root', 'is_file', 'is_dir', 'kind',
633 645 'added', 'changed', 'not_changed', 'removed'
634 646 ]
635 647
636 648 def __init__(self, path):
637 649 """
638 650 :param path: relative path to the node
639 651 """
640 652 super(RemovedFileNode, self).__init__(path=path)
641 653
642 654 def __getattribute__(self, attr):
643 655 if attr.startswith('_') or attr in RemovedFileNode.ALLOWED_ATTRIBUTES:
644 656 return super(RemovedFileNode, self).__getattribute__(attr)
645 657 raise RemovedFileNodeError(
646 658 "Cannot access attribute %s on RemovedFileNode" % attr)
647 659
648 660 @LazyProperty
649 661 def state(self):
650 662 return NodeState.REMOVED
651 663
652 664
653 665 class DirNode(Node):
654 666 """
655 667 DirNode stores list of files and directories within this node.
656 668 Nodes may be used standalone but within repository context they
657 669 lazily fetch data within same repositorty's commit.
658 670 """
659 671
660 672 def __init__(self, path, nodes=(), commit=None):
661 673 """
662 674 Only one of ``nodes`` and ``commit`` may be given. Passing both
663 675 would raise ``NodeError`` exception.
664 676
665 677 :param path: relative path to the node
666 678 :param nodes: content may be passed to constructor
667 679 :param commit: if given, will use it to lazily fetch content
668 680 """
669 681 if nodes and commit:
670 682 raise NodeError("Cannot use both nodes and commit")
671 683 super(DirNode, self).__init__(path, NodeKind.DIR)
672 684 self.commit = commit
673 685 self._nodes = nodes
674 686
675 687 @LazyProperty
676 688 def content(self):
677 689 raise NodeError(
678 690 "%s represents a dir and has no `content` attribute" % self)
679 691
680 692 @LazyProperty
681 693 def nodes(self):
682 694 if self.commit:
683 695 nodes = self.commit.get_nodes(self.path)
684 696 else:
685 697 nodes = self._nodes
686 698 self._nodes_dict = dict((node.path, node) for node in nodes)
687 699 return sorted(nodes)
688 700
689 701 @LazyProperty
690 702 def files(self):
691 703 return sorted((node for node in self.nodes if node.is_file()))
692 704
693 705 @LazyProperty
694 706 def dirs(self):
695 707 return sorted((node for node in self.nodes if node.is_dir()))
696 708
697 709 def __iter__(self):
698 710 for node in self.nodes:
699 711 yield node
700 712
701 713 def get_node(self, path):
702 714 """
703 715 Returns node from within this particular ``DirNode``, so it is now
704 716 allowed to fetch, i.e. node located at 'docs/api/index.rst' from node
705 717 'docs'. In order to access deeper nodes one must fetch nodes between
706 718 them first - this would work::
707 719
708 720 docs = root.get_node('docs')
709 721 docs.get_node('api').get_node('index.rst')
710 722
711 723 :param: path - relative to the current node
712 724
713 725 .. note::
714 726 To access lazily (as in example above) node have to be initialized
715 727 with related commit object - without it node is out of
716 728 context and may know nothing about anything else than nearest
717 729 (located at same level) nodes.
718 730 """
719 731 try:
720 732 path = path.rstrip('/')
721 733 if path == '':
722 734 raise NodeError("Cannot retrieve node without path")
723 735 self.nodes # access nodes first in order to set _nodes_dict
724 736 paths = path.split('/')
725 737 if len(paths) == 1:
726 738 if not self.is_root():
727 739 path = '/'.join((self.path, paths[0]))
728 740 else:
729 741 path = paths[0]
730 742 return self._nodes_dict[path]
731 743 elif len(paths) > 1:
732 744 if self.commit is None:
733 745 raise NodeError(
734 746 "Cannot access deeper nodes without commit")
735 747 else:
736 748 path1, path2 = paths[0], '/'.join(paths[1:])
737 749 return self.get_node(path1).get_node(path2)
738 750 else:
739 751 raise KeyError
740 752 except KeyError:
741 753 raise NodeError("Node does not exist at %s" % path)
742 754
743 755 @LazyProperty
744 756 def state(self):
745 757 raise NodeError("Cannot access state of DirNode")
746 758
747 759 @LazyProperty
748 760 def size(self):
749 761 size = 0
750 762 for root, dirs, files in self.commit.walk(self.path):
751 763 for f in files:
752 764 size += f.size
753 765
754 766 return size
755 767
756 768 @LazyProperty
757 769 def last_commit(self):
758 770 if self.commit:
759 771 pre_load = ["author", "date", "message", "parents"]
760 772 return self.commit.get_path_commit(self.path, pre_load=pre_load)
761 773 raise NodeError(
762 774 "Cannot retrieve last commit of the file without "
763 775 "related commit attribute")
764 776
765 777 def __repr__(self):
766 778 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
767 779 getattr(self.commit, 'short_id', ''))
768 780
769 781
770 782 class RootNode(DirNode):
771 783 """
772 784 DirNode being the root node of the repository.
773 785 """
774 786
775 787 def __init__(self, nodes=(), commit=None):
776 788 super(RootNode, self).__init__(path='', nodes=nodes, commit=commit)
777 789
778 790 def __repr__(self):
779 791 return '<%s>' % self.__class__.__name__
780 792
781 793
782 794 class SubModuleNode(Node):
783 795 """
784 796 represents a SubModule of Git or SubRepo of Mercurial
785 797 """
786 798 is_binary = False
787 799 size = 0
788 800
789 801 def __init__(self, name, url=None, commit=None, alias=None):
790 802 self.path = name
791 803 self.kind = NodeKind.SUBMODULE
792 804 self.alias = alias
793 805
794 806 # we have to use EmptyCommit here since this can point to svn/git/hg
795 807 # submodules we cannot get from repository
796 808 self.commit = EmptyCommit(str(commit), alias=alias)
797 809 self.url = url or self._extract_submodule_url()
798 810
799 811 def __repr__(self):
800 812 return '<%s %r @ %s>' % (self.__class__.__name__, self.path,
801 813 getattr(self.commit, 'short_id', ''))
802 814
803 815 def _extract_submodule_url(self):
804 816 # TODO: find a way to parse gits submodule file and extract the
805 817 # linking URL
806 818 return self.path
807 819
808 820 @LazyProperty
809 821 def name(self):
810 822 """
811 823 Returns name of the node so if its path
812 824 then only last part is returned.
813 825 """
814 826 org = safe_unicode(self.path.rstrip('/').split('/')[-1])
815 827 return u'%s @ %s' % (org, self.commit.short_id)
816 828
817 829
818 830 class LargeFileNode(FileNode):
819 831
820 832 def __init__(self, path, url=None, commit=None, alias=None, org_path=None):
821 833 self.path = path
822 834 self.org_path = org_path
823 835 self.kind = NodeKind.LARGEFILE
824 836 self.alias = alias
825 837
826 838 def _validate_path(self, path):
827 839 """
828 840 we override check since the LargeFileNode path is system absolute
829 841 """
830 842 pass
831 843
832 844 def __repr__(self):
833 845 return '<%s %r>' % (self.__class__.__name__, self.path)
834 846
835 847 @LazyProperty
836 848 def size(self):
837 849 return os.stat(self.path).st_size
838 850
839 851 @LazyProperty
840 852 def raw_bytes(self):
841 853 with open(self.path, 'rb') as f:
842 854 content = f.read()
843 855 return content
844 856
845 857 @LazyProperty
846 858 def name(self):
847 859 """
848 860 Overwrites name to be the org lf path
849 861 """
850 862 return self.org_path
863
864 def stream_bytes(self):
865 with open(self.path, 'rb') as stream:
866 while True:
867 data = stream.read(16 * 1024)
868 if not data:
869 break
870 yield data
@@ -1,1902 +1,1826 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import collections
22 22 import datetime
23 23 import hashlib
24 24 import os
25 25 import re
26 26 import pprint
27 27 import shutil
28 28 import socket
29 29 import subprocess32
30 30 import time
31 31 import uuid
32 32 import dateutil.tz
33 33 import functools
34 34
35 35 import mock
36 36 import pyramid.testing
37 37 import pytest
38 38 import colander
39 39 import requests
40 40 import pyramid.paster
41 41
42 42 import rhodecode
43 43 from rhodecode.lib.utils2 import AttributeDict
44 44 from rhodecode.model.changeset_status import ChangesetStatusModel
45 45 from rhodecode.model.comment import CommentsModel
46 46 from rhodecode.model.db import (
47 47 PullRequest, Repository, RhodeCodeSetting, ChangesetStatus, RepoGroup,
48 48 UserGroup, RepoRhodeCodeUi, RepoRhodeCodeSetting, RhodeCodeUi)
49 49 from rhodecode.model.meta import Session
50 50 from rhodecode.model.pull_request import PullRequestModel
51 51 from rhodecode.model.repo import RepoModel
52 52 from rhodecode.model.repo_group import RepoGroupModel
53 53 from rhodecode.model.user import UserModel
54 54 from rhodecode.model.settings import VcsSettingsModel
55 55 from rhodecode.model.user_group import UserGroupModel
56 56 from rhodecode.model.integration import IntegrationModel
57 57 from rhodecode.integrations import integration_type_registry
58 58 from rhodecode.integrations.types.base import IntegrationTypeBase
59 59 from rhodecode.lib.utils import repo2db_mapper
60 from rhodecode.lib.vcs import create_vcsserver_proxy
61 60 from rhodecode.lib.vcs.backends import get_backend
62 61 from rhodecode.lib.vcs.nodes import FileNode
63 62 from rhodecode.tests import (
64 63 login_user_session, get_new_dir, utils, TESTS_TMP_PATH,
65 64 TEST_USER_ADMIN_LOGIN, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR2_LOGIN,
66 65 TEST_USER_REGULAR_PASS)
67 66 from rhodecode.tests.utils import CustomTestApp, set_anonymous_access
68 67 from rhodecode.tests.fixture import Fixture
69 68 from rhodecode.config import utils as config_utils
70 69
71 70 def _split_comma(value):
72 71 return value.split(',')
73 72
74 73
75 74 def pytest_addoption(parser):
76 75 parser.addoption(
77 76 '--keep-tmp-path', action='store_true',
78 77 help="Keep the test temporary directories")
79 78 parser.addoption(
80 79 '--backends', action='store', type=_split_comma,
81 80 default=['git', 'hg', 'svn'],
82 81 help="Select which backends to test for backend specific tests.")
83 82 parser.addoption(
84 83 '--dbs', action='store', type=_split_comma,
85 84 default=['sqlite'],
86 85 help="Select which database to test for database specific tests. "
87 86 "Possible options are sqlite,postgres,mysql")
88 87 parser.addoption(
89 88 '--appenlight', '--ae', action='store_true',
90 89 help="Track statistics in appenlight.")
91 90 parser.addoption(
92 91 '--appenlight-api-key', '--ae-key',
93 92 help="API key for Appenlight.")
94 93 parser.addoption(
95 94 '--appenlight-url', '--ae-url',
96 95 default="https://ae.rhodecode.com",
97 96 help="Appenlight service URL, defaults to https://ae.rhodecode.com")
98 97 parser.addoption(
99 98 '--sqlite-connection-string', action='store',
100 99 default='', help="Connection string for the dbs tests with SQLite")
101 100 parser.addoption(
102 101 '--postgres-connection-string', action='store',
103 102 default='', help="Connection string for the dbs tests with Postgres")
104 103 parser.addoption(
105 104 '--mysql-connection-string', action='store',
106 105 default='', help="Connection string for the dbs tests with MySQL")
107 106 parser.addoption(
108 107 '--repeat', type=int, default=100,
109 108 help="Number of repetitions in performance tests.")
110 109
111 110
112 111 def pytest_configure(config):
113 112 from rhodecode.config import patches
114 113
115 114
116 115 def pytest_collection_modifyitems(session, config, items):
117 116 # nottest marked, compare nose, used for transition from nose to pytest
118 117 remaining = [
119 118 i for i in items if getattr(i.obj, '__test__', True)]
120 119 items[:] = remaining
121 120
122 121
123 122 def pytest_generate_tests(metafunc):
124 123 # Support test generation based on --backend parameter
125 124 if 'backend_alias' in metafunc.fixturenames:
126 125 backends = get_backends_from_metafunc(metafunc)
127 126 scope = None
128 127 if not backends:
129 128 pytest.skip("Not enabled for any of selected backends")
130 129 metafunc.parametrize('backend_alias', backends, scope=scope)
131 130 elif hasattr(metafunc.function, 'backends'):
132 131 backends = get_backends_from_metafunc(metafunc)
133 132 if not backends:
134 133 pytest.skip("Not enabled for any of selected backends")
135 134
136 135
137 136 def get_backends_from_metafunc(metafunc):
138 137 requested_backends = set(metafunc.config.getoption('--backends'))
139 138 if hasattr(metafunc.function, 'backends'):
140 139 # Supported backends by this test function, created from
141 140 # pytest.mark.backends
142 141 backends = metafunc.definition.get_closest_marker('backends').args
143 142 elif hasattr(metafunc.cls, 'backend_alias'):
144 143 # Support class attribute "backend_alias", this is mainly
145 144 # for legacy reasons for tests not yet using pytest.mark.backends
146 145 backends = [metafunc.cls.backend_alias]
147 146 else:
148 147 backends = metafunc.config.getoption('--backends')
149 148 return requested_backends.intersection(backends)
150 149
151 150
152 151 @pytest.fixture(scope='session', autouse=True)
153 152 def activate_example_rcextensions(request):
154 153 """
155 154 Patch in an example rcextensions module which verifies passed in kwargs.
156 155 """
157 156 from rhodecode.config import rcextensions
158 157
159 158 old_extensions = rhodecode.EXTENSIONS
160 159 rhodecode.EXTENSIONS = rcextensions
161 160 rhodecode.EXTENSIONS.calls = collections.defaultdict(list)
162 161
163 162 @request.addfinalizer
164 163 def cleanup():
165 164 rhodecode.EXTENSIONS = old_extensions
166 165
167 166
168 167 @pytest.fixture
169 168 def capture_rcextensions():
170 169 """
171 170 Returns the recorded calls to entry points in rcextensions.
172 171 """
173 172 calls = rhodecode.EXTENSIONS.calls
174 173 calls.clear()
175 174 # Note: At this moment, it is still the empty dict, but that will
176 175 # be filled during the test run and since it is a reference this
177 176 # is enough to make it work.
178 177 return calls
179 178
180 179
181 180 @pytest.fixture(scope='session')
182 181 def http_environ_session():
183 182 """
184 183 Allow to use "http_environ" in session scope.
185 184 """
186 185 return plain_http_environ()
187 186
188 187
189 188 def plain_http_host_stub():
190 189 """
191 190 Value of HTTP_HOST in the test run.
192 191 """
193 192 return 'example.com:80'
194 193
195 194
196 195 @pytest.fixture
197 196 def http_host_stub():
198 197 """
199 198 Value of HTTP_HOST in the test run.
200 199 """
201 200 return plain_http_host_stub()
202 201
203 202
204 203 def plain_http_host_only_stub():
205 204 """
206 205 Value of HTTP_HOST in the test run.
207 206 """
208 207 return plain_http_host_stub().split(':')[0]
209 208
210 209
211 210 @pytest.fixture
212 211 def http_host_only_stub():
213 212 """
214 213 Value of HTTP_HOST in the test run.
215 214 """
216 215 return plain_http_host_only_stub()
217 216
218 217
219 218 def plain_http_environ():
220 219 """
221 220 HTTP extra environ keys.
222 221
223 222 User by the test application and as well for setting up the pylons
224 223 environment. In the case of the fixture "app" it should be possible
225 224 to override this for a specific test case.
226 225 """
227 226 return {
228 227 'SERVER_NAME': plain_http_host_only_stub(),
229 228 'SERVER_PORT': plain_http_host_stub().split(':')[1],
230 229 'HTTP_HOST': plain_http_host_stub(),
231 230 'HTTP_USER_AGENT': 'rc-test-agent',
232 231 'REQUEST_METHOD': 'GET'
233 232 }
234 233
235 234
236 235 @pytest.fixture
237 236 def http_environ():
238 237 """
239 238 HTTP extra environ keys.
240 239
241 240 User by the test application and as well for setting up the pylons
242 241 environment. In the case of the fixture "app" it should be possible
243 242 to override this for a specific test case.
244 243 """
245 244 return plain_http_environ()
246 245
247 246
248 247 @pytest.fixture(scope='session')
249 248 def baseapp(ini_config, vcsserver, http_environ_session):
250 249 from rhodecode.lib.pyramid_utils import get_app_config
251 250 from rhodecode.config.middleware import make_pyramid_app
252 251
253 252 print("Using the RhodeCode configuration:{}".format(ini_config))
254 253 pyramid.paster.setup_logging(ini_config)
255 254
256 255 settings = get_app_config(ini_config)
257 256 app = make_pyramid_app({'__file__': ini_config}, **settings)
258 257
259 258 return app
260 259
261 260
262 261 @pytest.fixture(scope='function')
263 262 def app(request, config_stub, baseapp, http_environ):
264 263 app = CustomTestApp(
265 264 baseapp,
266 265 extra_environ=http_environ)
267 266 if request.cls:
268 267 request.cls.app = app
269 268 return app
270 269
271 270
272 271 @pytest.fixture(scope='session')
273 272 def app_settings(baseapp, ini_config):
274 273 """
275 274 Settings dictionary used to create the app.
276 275
277 276 Parses the ini file and passes the result through the sanitize and apply
278 277 defaults mechanism in `rhodecode.config.middleware`.
279 278 """
280 279 return baseapp.config.get_settings()
281 280
282 281
283 282 @pytest.fixture(scope='session')
284 283 def db_connection(ini_settings):
285 284 # Initialize the database connection.
286 285 config_utils.initialize_database(ini_settings)
287 286
288 287
289 288 LoginData = collections.namedtuple('LoginData', ('csrf_token', 'user'))
290 289
291 290
292 291 def _autologin_user(app, *args):
293 292 session = login_user_session(app, *args)
294 293 csrf_token = rhodecode.lib.auth.get_csrf_token(session)
295 294 return LoginData(csrf_token, session['rhodecode_user'])
296 295
297 296
298 297 @pytest.fixture
299 298 def autologin_user(app):
300 299 """
301 300 Utility fixture which makes sure that the admin user is logged in
302 301 """
303 302 return _autologin_user(app)
304 303
305 304
306 305 @pytest.fixture
307 306 def autologin_regular_user(app):
308 307 """
309 308 Utility fixture which makes sure that the regular user is logged in
310 309 """
311 310 return _autologin_user(
312 311 app, TEST_USER_REGULAR_LOGIN, TEST_USER_REGULAR_PASS)
313 312
314 313
315 314 @pytest.fixture(scope='function')
316 315 def csrf_token(request, autologin_user):
317 316 return autologin_user.csrf_token
318 317
319 318
320 319 @pytest.fixture(scope='function')
321 320 def xhr_header(request):
322 321 return {'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'}
323 322
324 323
325 324 @pytest.fixture
326 325 def real_crypto_backend(monkeypatch):
327 326 """
328 327 Switch the production crypto backend on for this test.
329 328
330 329 During the test run the crypto backend is replaced with a faster
331 330 implementation based on the MD5 algorithm.
332 331 """
333 332 monkeypatch.setattr(rhodecode, 'is_test', False)
334 333
335 334
336 335 @pytest.fixture(scope='class')
337 336 def index_location(request, baseapp):
338 337 index_location = baseapp.config.get_settings()['search.location']
339 338 if request.cls:
340 339 request.cls.index_location = index_location
341 340 return index_location
342 341
343 342
344 343 @pytest.fixture(scope='session', autouse=True)
345 344 def tests_tmp_path(request):
346 345 """
347 346 Create temporary directory to be used during the test session.
348 347 """
349 348 if not os.path.exists(TESTS_TMP_PATH):
350 349 os.makedirs(TESTS_TMP_PATH)
351 350
352 351 if not request.config.getoption('--keep-tmp-path'):
353 352 @request.addfinalizer
354 353 def remove_tmp_path():
355 354 shutil.rmtree(TESTS_TMP_PATH)
356 355
357 356 return TESTS_TMP_PATH
358 357
359 358
360 359 @pytest.fixture
361 360 def test_repo_group(request):
362 361 """
363 362 Create a temporary repository group, and destroy it after
364 363 usage automatically
365 364 """
366 365 fixture = Fixture()
367 366 repogroupid = 'test_repo_group_%s' % str(time.time()).replace('.', '')
368 367 repo_group = fixture.create_repo_group(repogroupid)
369 368
370 369 def _cleanup():
371 370 fixture.destroy_repo_group(repogroupid)
372 371
373 372 request.addfinalizer(_cleanup)
374 373 return repo_group
375 374
376 375
377 376 @pytest.fixture
378 377 def test_user_group(request):
379 378 """
380 379 Create a temporary user group, and destroy it after
381 380 usage automatically
382 381 """
383 382 fixture = Fixture()
384 383 usergroupid = 'test_user_group_%s' % str(time.time()).replace('.', '')
385 384 user_group = fixture.create_user_group(usergroupid)
386 385
387 386 def _cleanup():
388 387 fixture.destroy_user_group(user_group)
389 388
390 389 request.addfinalizer(_cleanup)
391 390 return user_group
392 391
393 392
394 393 @pytest.fixture(scope='session')
395 394 def test_repo(request):
396 395 container = TestRepoContainer()
397 396 request.addfinalizer(container._cleanup)
398 397 return container
399 398
400 399
401 400 class TestRepoContainer(object):
402 401 """
403 402 Container for test repositories which are used read only.
404 403
405 404 Repositories will be created on demand and re-used during the lifetime
406 405 of this object.
407 406
408 407 Usage to get the svn test repository "minimal"::
409 408
410 409 test_repo = TestContainer()
411 410 repo = test_repo('minimal', 'svn')
412 411
413 412 """
414 413
415 414 dump_extractors = {
416 415 'git': utils.extract_git_repo_from_dump,
417 416 'hg': utils.extract_hg_repo_from_dump,
418 417 'svn': utils.extract_svn_repo_from_dump,
419 418 }
420 419
421 420 def __init__(self):
422 421 self._cleanup_repos = []
423 422 self._fixture = Fixture()
424 423 self._repos = {}
425 424
426 425 def __call__(self, dump_name, backend_alias, config=None):
427 426 key = (dump_name, backend_alias)
428 427 if key not in self._repos:
429 428 repo = self._create_repo(dump_name, backend_alias, config)
430 429 self._repos[key] = repo.repo_id
431 430 return Repository.get(self._repos[key])
432 431
433 432 def _create_repo(self, dump_name, backend_alias, config):
434 433 repo_name = '%s-%s' % (backend_alias, dump_name)
435 434 backend = get_backend(backend_alias)
436 435 dump_extractor = self.dump_extractors[backend_alias]
437 436 repo_path = dump_extractor(dump_name, repo_name)
438 437
439 438 vcs_repo = backend(repo_path, config=config)
440 439 repo2db_mapper({repo_name: vcs_repo})
441 440
442 441 repo = RepoModel().get_by_repo_name(repo_name)
443 442 self._cleanup_repos.append(repo_name)
444 443 return repo
445 444
446 445 def _cleanup(self):
447 446 for repo_name in reversed(self._cleanup_repos):
448 447 self._fixture.destroy_repo(repo_name)
449 448
450 449
451 450 def backend_base(request, backend_alias, baseapp, test_repo):
452 451 if backend_alias not in request.config.getoption('--backends'):
453 452 pytest.skip("Backend %s not selected." % (backend_alias, ))
454 453
455 454 utils.check_xfail_backends(request.node, backend_alias)
456 455 utils.check_skip_backends(request.node, backend_alias)
457 456
458 457 repo_name = 'vcs_test_%s' % (backend_alias, )
459 458 backend = Backend(
460 459 alias=backend_alias,
461 460 repo_name=repo_name,
462 461 test_name=request.node.name,
463 462 test_repo_container=test_repo)
464 463 request.addfinalizer(backend.cleanup)
465 464 return backend
466 465
467 466
468 467 @pytest.fixture
469 468 def backend(request, backend_alias, baseapp, test_repo):
470 469 """
471 470 Parametrized fixture which represents a single backend implementation.
472 471
473 472 It respects the option `--backends` to focus the test run on specific
474 473 backend implementations.
475 474
476 475 It also supports `pytest.mark.xfail_backends` to mark tests as failing
477 476 for specific backends. This is intended as a utility for incremental
478 477 development of a new backend implementation.
479 478 """
480 479 return backend_base(request, backend_alias, baseapp, test_repo)
481 480
482 481
483 482 @pytest.fixture
484 483 def backend_git(request, baseapp, test_repo):
485 484 return backend_base(request, 'git', baseapp, test_repo)
486 485
487 486
488 487 @pytest.fixture
489 488 def backend_hg(request, baseapp, test_repo):
490 489 return backend_base(request, 'hg', baseapp, test_repo)
491 490
492 491
493 492 @pytest.fixture
494 493 def backend_svn(request, baseapp, test_repo):
495 494 return backend_base(request, 'svn', baseapp, test_repo)
496 495
497 496
498 497 @pytest.fixture
499 498 def backend_random(backend_git):
500 499 """
501 500 Use this to express that your tests need "a backend.
502 501
503 502 A few of our tests need a backend, so that we can run the code. This
504 503 fixture is intended to be used for such cases. It will pick one of the
505 504 backends and run the tests.
506 505
507 506 The fixture `backend` would run the test multiple times for each
508 507 available backend which is a pure waste of time if the test is
509 508 independent of the backend type.
510 509 """
511 510 # TODO: johbo: Change this to pick a random backend
512 511 return backend_git
513 512
514 513
515 514 @pytest.fixture
516 515 def backend_stub(backend_git):
517 516 """
518 517 Use this to express that your tests need a backend stub
519 518
520 519 TODO: mikhail: Implement a real stub logic instead of returning
521 520 a git backend
522 521 """
523 522 return backend_git
524 523
525 524
526 525 @pytest.fixture
527 526 def repo_stub(backend_stub):
528 527 """
529 528 Use this to express that your tests need a repository stub
530 529 """
531 530 return backend_stub.create_repo()
532 531
533 532
534 533 class Backend(object):
535 534 """
536 535 Represents the test configuration for one supported backend
537 536
538 537 Provides easy access to different test repositories based on
539 538 `__getitem__`. Such repositories will only be created once per test
540 539 session.
541 540 """
542 541
543 542 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
544 543 _master_repo = None
545 544 _commit_ids = {}
546 545
547 546 def __init__(self, alias, repo_name, test_name, test_repo_container):
548 547 self.alias = alias
549 548 self.repo_name = repo_name
550 549 self._cleanup_repos = []
551 550 self._test_name = test_name
552 551 self._test_repo_container = test_repo_container
553 552 # TODO: johbo: Used as a delegate interim. Not yet sure if Backend or
554 553 # Fixture will survive in the end.
555 554 self._fixture = Fixture()
556 555
557 556 def __getitem__(self, key):
558 557 return self._test_repo_container(key, self.alias)
559 558
560 559 def create_test_repo(self, key, config=None):
561 560 return self._test_repo_container(key, self.alias, config)
562 561
563 562 @property
564 563 def repo(self):
565 564 """
566 565 Returns the "current" repository. This is the vcs_test repo or the
567 566 last repo which has been created with `create_repo`.
568 567 """
569 568 from rhodecode.model.db import Repository
570 569 return Repository.get_by_repo_name(self.repo_name)
571 570
572 571 @property
573 572 def default_branch_name(self):
574 573 VcsRepository = get_backend(self.alias)
575 574 return VcsRepository.DEFAULT_BRANCH_NAME
576 575
577 576 @property
578 577 def default_head_id(self):
579 578 """
580 579 Returns the default head id of the underlying backend.
581 580
582 581 This will be the default branch name in case the backend does have a
583 582 default branch. In the other cases it will point to a valid head
584 583 which can serve as the base to create a new commit on top of it.
585 584 """
586 585 vcsrepo = self.repo.scm_instance()
587 586 head_id = (
588 587 vcsrepo.DEFAULT_BRANCH_NAME or
589 588 vcsrepo.commit_ids[-1])
590 589 return head_id
591 590
592 591 @property
593 592 def commit_ids(self):
594 593 """
595 594 Returns the list of commits for the last created repository
596 595 """
597 596 return self._commit_ids
598 597
599 598 def create_master_repo(self, commits):
600 599 """
601 600 Create a repository and remember it as a template.
602 601
603 602 This allows to easily create derived repositories to construct
604 603 more complex scenarios for diff, compare and pull requests.
605 604
606 605 Returns a commit map which maps from commit message to raw_id.
607 606 """
608 607 self._master_repo = self.create_repo(commits=commits)
609 608 return self._commit_ids
610 609
611 610 def create_repo(
612 611 self, commits=None, number_of_commits=0, heads=None,
613 612 name_suffix=u'', bare=False, **kwargs):
614 613 """
615 614 Create a repository and record it for later cleanup.
616 615
617 616 :param commits: Optional. A sequence of dict instances.
618 617 Will add a commit per entry to the new repository.
619 618 :param number_of_commits: Optional. If set to a number, this number of
620 619 commits will be added to the new repository.
621 620 :param heads: Optional. Can be set to a sequence of of commit
622 621 names which shall be pulled in from the master repository.
623 622 :param name_suffix: adds special suffix to generated repo name
624 623 :param bare: set a repo as bare (no checkout)
625 624 """
626 625 self.repo_name = self._next_repo_name() + name_suffix
627 626 repo = self._fixture.create_repo(
628 627 self.repo_name, repo_type=self.alias, bare=bare, **kwargs)
629 628 self._cleanup_repos.append(repo.repo_name)
630 629
631 630 commits = commits or [
632 631 {'message': 'Commit %s of %s' % (x, self.repo_name)}
633 632 for x in range(number_of_commits)]
634 633 vcs_repo = repo.scm_instance()
635 634 vcs_repo.count()
636 635 self._add_commits_to_repo(vcs_repo, commits)
637 636 if heads:
638 637 self.pull_heads(repo, heads)
639 638
640 639 return repo
641 640
642 641 def pull_heads(self, repo, heads):
643 642 """
644 643 Make sure that repo contains all commits mentioned in `heads`
645 644 """
646 645 vcsmaster = self._master_repo.scm_instance()
647 646 vcsrepo = repo.scm_instance()
648 647 vcsrepo.config.clear_section('hooks')
649 648 commit_ids = [self._commit_ids[h] for h in heads]
650 649 vcsrepo.pull(vcsmaster.path, commit_ids=commit_ids)
651 650
652 651 def create_fork(self):
653 652 repo_to_fork = self.repo_name
654 653 self.repo_name = self._next_repo_name()
655 654 repo = self._fixture.create_fork(repo_to_fork, self.repo_name)
656 655 self._cleanup_repos.append(self.repo_name)
657 656 return repo
658 657
659 658 def new_repo_name(self, suffix=u''):
660 659 self.repo_name = self._next_repo_name() + suffix
661 660 self._cleanup_repos.append(self.repo_name)
662 661 return self.repo_name
663 662
664 663 def _next_repo_name(self):
665 664 return u"%s_%s" % (
666 665 self.invalid_repo_name.sub(u'_', self._test_name), len(self._cleanup_repos))
667 666
668 667 def ensure_file(self, filename, content='Test content\n'):
669 668 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
670 669 commits = [
671 670 {'added': [
672 671 FileNode(filename, content=content),
673 672 ]},
674 673 ]
675 674 self._add_commits_to_repo(self.repo.scm_instance(), commits)
676 675
677 676 def enable_downloads(self):
678 677 repo = self.repo
679 678 repo.enable_downloads = True
680 679 Session().add(repo)
681 680 Session().commit()
682 681
683 682 def cleanup(self):
684 683 for repo_name in reversed(self._cleanup_repos):
685 684 self._fixture.destroy_repo(repo_name)
686 685
687 686 def _add_commits_to_repo(self, repo, commits):
688 687 commit_ids = _add_commits_to_repo(repo, commits)
689 688 if not commit_ids:
690 689 return
691 690 self._commit_ids = commit_ids
692 691
693 692 # Creating refs for Git to allow fetching them from remote repository
694 693 if self.alias == 'git':
695 694 refs = {}
696 695 for message in self._commit_ids:
697 696 # TODO: mikhail: do more special chars replacements
698 697 ref_name = 'refs/test-refs/{}'.format(
699 698 message.replace(' ', ''))
700 699 refs[ref_name] = self._commit_ids[message]
701 700 self._create_refs(repo, refs)
702 701
703 702 def _create_refs(self, repo, refs):
704 703 for ref_name in refs:
705 704 repo.set_refs(ref_name, refs[ref_name])
706 705
707 706
708 707 def vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo):
709 708 if backend_alias not in request.config.getoption('--backends'):
710 709 pytest.skip("Backend %s not selected." % (backend_alias, ))
711 710
712 711 utils.check_xfail_backends(request.node, backend_alias)
713 712 utils.check_skip_backends(request.node, backend_alias)
714 713
715 714 repo_name = 'vcs_test_%s' % (backend_alias, )
716 715 repo_path = os.path.join(tests_tmp_path, repo_name)
717 716 backend = VcsBackend(
718 717 alias=backend_alias,
719 718 repo_path=repo_path,
720 719 test_name=request.node.name,
721 720 test_repo_container=test_repo)
722 721 request.addfinalizer(backend.cleanup)
723 722 return backend
724 723
725 724
726 725 @pytest.fixture
727 726 def vcsbackend(request, backend_alias, tests_tmp_path, baseapp, test_repo):
728 727 """
729 728 Parametrized fixture which represents a single vcs backend implementation.
730 729
731 730 See the fixture `backend` for more details. This one implements the same
732 731 concept, but on vcs level. So it does not provide model instances etc.
733 732
734 733 Parameters are generated dynamically, see :func:`pytest_generate_tests`
735 734 for how this works.
736 735 """
737 736 return vcsbackend_base(request, backend_alias, tests_tmp_path, baseapp, test_repo)
738 737
739 738
740 739 @pytest.fixture
741 740 def vcsbackend_git(request, tests_tmp_path, baseapp, test_repo):
742 741 return vcsbackend_base(request, 'git', tests_tmp_path, baseapp, test_repo)
743 742
744 743
745 744 @pytest.fixture
746 745 def vcsbackend_hg(request, tests_tmp_path, baseapp, test_repo):
747 746 return vcsbackend_base(request, 'hg', tests_tmp_path, baseapp, test_repo)
748 747
749 748
750 749 @pytest.fixture
751 750 def vcsbackend_svn(request, tests_tmp_path, baseapp, test_repo):
752 751 return vcsbackend_base(request, 'svn', tests_tmp_path, baseapp, test_repo)
753 752
754 753
755 754 @pytest.fixture
756 755 def vcsbackend_stub(vcsbackend_git):
757 756 """
758 757 Use this to express that your test just needs a stub of a vcsbackend.
759 758
760 759 Plan is to eventually implement an in-memory stub to speed tests up.
761 760 """
762 761 return vcsbackend_git
763 762
764 763
765 764 class VcsBackend(object):
766 765 """
767 766 Represents the test configuration for one supported vcs backend.
768 767 """
769 768
770 769 invalid_repo_name = re.compile(r'[^0-9a-zA-Z]+')
771 770
772 771 def __init__(self, alias, repo_path, test_name, test_repo_container):
773 772 self.alias = alias
774 773 self._repo_path = repo_path
775 774 self._cleanup_repos = []
776 775 self._test_name = test_name
777 776 self._test_repo_container = test_repo_container
778 777
779 778 def __getitem__(self, key):
780 779 return self._test_repo_container(key, self.alias).scm_instance()
781 780
782 781 @property
783 782 def repo(self):
784 783 """
785 784 Returns the "current" repository. This is the vcs_test repo of the last
786 785 repo which has been created.
787 786 """
788 787 Repository = get_backend(self.alias)
789 788 return Repository(self._repo_path)
790 789
791 790 @property
792 791 def backend(self):
793 792 """
794 793 Returns the backend implementation class.
795 794 """
796 795 return get_backend(self.alias)
797 796
798 797 def create_repo(self, commits=None, number_of_commits=0, _clone_repo=None,
799 798 bare=False):
800 799 repo_name = self._next_repo_name()
801 800 self._repo_path = get_new_dir(repo_name)
802 801 repo_class = get_backend(self.alias)
803 802 src_url = None
804 803 if _clone_repo:
805 804 src_url = _clone_repo.path
806 805 repo = repo_class(self._repo_path, create=True, src_url=src_url, bare=bare)
807 806 self._cleanup_repos.append(repo)
808 807
809 808 commits = commits or [
810 809 {'message': 'Commit %s of %s' % (x, repo_name)}
811 810 for x in xrange(number_of_commits)]
812 811 _add_commits_to_repo(repo, commits)
813 812 return repo
814 813
815 814 def clone_repo(self, repo):
816 815 return self.create_repo(_clone_repo=repo)
817 816
818 817 def cleanup(self):
819 818 for repo in self._cleanup_repos:
820 819 shutil.rmtree(repo.path)
821 820
822 821 def new_repo_path(self):
823 822 repo_name = self._next_repo_name()
824 823 self._repo_path = get_new_dir(repo_name)
825 824 return self._repo_path
826 825
827 826 def _next_repo_name(self):
828 827 return "%s_%s" % (
829 828 self.invalid_repo_name.sub('_', self._test_name),
830 829 len(self._cleanup_repos))
831 830
832 831 def add_file(self, repo, filename, content='Test content\n'):
833 832 imc = repo.in_memory_commit
834 833 imc.add(FileNode(filename, content=content))
835 834 imc.commit(
836 835 message=u'Automatic commit from vcsbackend fixture',
837 836 author=u'Automatic <automatic@rhodecode.com>')
838 837
839 838 def ensure_file(self, filename, content='Test content\n'):
840 839 assert self._cleanup_repos, "Avoid writing into vcs_test repos"
841 840 self.add_file(self.repo, filename, content)
842 841
843 842
844 843 def _add_commits_to_repo(vcs_repo, commits):
845 844 commit_ids = {}
846 845 if not commits:
847 846 return commit_ids
848 847
849 848 imc = vcs_repo.in_memory_commit
850 849 commit = None
851 850
852 851 for idx, commit in enumerate(commits):
853 852 message = unicode(commit.get('message', 'Commit %s' % idx))
854 853
855 854 for node in commit.get('added', []):
856 855 imc.add(FileNode(node.path, content=node.content))
857 856 for node in commit.get('changed', []):
858 857 imc.change(FileNode(node.path, content=node.content))
859 858 for node in commit.get('removed', []):
860 859 imc.remove(FileNode(node.path))
861 860
862 861 parents = [
863 862 vcs_repo.get_commit(commit_id=commit_ids[p])
864 863 for p in commit.get('parents', [])]
865 864
866 865 operations = ('added', 'changed', 'removed')
867 866 if not any((commit.get(o) for o in operations)):
868 867 imc.add(FileNode('file_%s' % idx, content=message))
869 868
870 869 commit = imc.commit(
871 870 message=message,
872 871 author=unicode(commit.get('author', 'Automatic <automatic@rhodecode.com>')),
873 872 date=commit.get('date'),
874 873 branch=commit.get('branch'),
875 874 parents=parents)
876 875
877 876 commit_ids[commit.message] = commit.raw_id
878 877
879 878 return commit_ids
880 879
881 880
882 881 @pytest.fixture
883 882 def reposerver(request):
884 883 """
885 884 Allows to serve a backend repository
886 885 """
887 886
888 887 repo_server = RepoServer()
889 888 request.addfinalizer(repo_server.cleanup)
890 889 return repo_server
891 890
892 891
893 892 class RepoServer(object):
894 893 """
895 894 Utility to serve a local repository for the duration of a test case.
896 895
897 896 Supports only Subversion so far.
898 897 """
899 898
900 899 url = None
901 900
902 901 def __init__(self):
903 902 self._cleanup_servers = []
904 903
905 904 def serve(self, vcsrepo):
906 905 if vcsrepo.alias != 'svn':
907 906 raise TypeError("Backend %s not supported" % vcsrepo.alias)
908 907
909 908 proc = subprocess32.Popen(
910 909 ['svnserve', '-d', '--foreground', '--listen-host', 'localhost',
911 910 '--root', vcsrepo.path])
912 911 self._cleanup_servers.append(proc)
913 912 self.url = 'svn://localhost'
914 913
915 914 def cleanup(self):
916 915 for proc in self._cleanup_servers:
917 916 proc.terminate()
918 917
919 918
920 919 @pytest.fixture
921 920 def pr_util(backend, request, config_stub):
922 921 """
923 922 Utility for tests of models and for functional tests around pull requests.
924 923
925 924 It gives an instance of :class:`PRTestUtility` which provides various
926 925 utility methods around one pull request.
927 926
928 927 This fixture uses `backend` and inherits its parameterization.
929 928 """
930 929
931 930 util = PRTestUtility(backend)
932 931 request.addfinalizer(util.cleanup)
933 932
934 933 return util
935 934
936 935
937 936 class PRTestUtility(object):
938 937
939 938 pull_request = None
940 939 pull_request_id = None
941 940 mergeable_patcher = None
942 941 mergeable_mock = None
943 942 notification_patcher = None
944 943
945 944 def __init__(self, backend):
946 945 self.backend = backend
947 946
948 947 def create_pull_request(
949 948 self, commits=None, target_head=None, source_head=None,
950 949 revisions=None, approved=False, author=None, mergeable=False,
951 950 enable_notifications=True, name_suffix=u'', reviewers=None,
952 951 title=u"Test", description=u"Description"):
953 952 self.set_mergeable(mergeable)
954 953 if not enable_notifications:
955 954 # mock notification side effect
956 955 self.notification_patcher = mock.patch(
957 956 'rhodecode.model.notification.NotificationModel.create')
958 957 self.notification_patcher.start()
959 958
960 959 if not self.pull_request:
961 960 if not commits:
962 961 commits = [
963 962 {'message': 'c1'},
964 963 {'message': 'c2'},
965 964 {'message': 'c3'},
966 965 ]
967 966 target_head = 'c1'
968 967 source_head = 'c2'
969 968 revisions = ['c2']
970 969
971 970 self.commit_ids = self.backend.create_master_repo(commits)
972 971 self.target_repository = self.backend.create_repo(
973 972 heads=[target_head], name_suffix=name_suffix)
974 973 self.source_repository = self.backend.create_repo(
975 974 heads=[source_head], name_suffix=name_suffix)
976 975 self.author = author or UserModel().get_by_username(
977 976 TEST_USER_ADMIN_LOGIN)
978 977
979 978 model = PullRequestModel()
980 979 self.create_parameters = {
981 980 'created_by': self.author,
982 981 'source_repo': self.source_repository.repo_name,
983 982 'source_ref': self._default_branch_reference(source_head),
984 983 'target_repo': self.target_repository.repo_name,
985 984 'target_ref': self._default_branch_reference(target_head),
986 985 'revisions': [self.commit_ids[r] for r in revisions],
987 986 'reviewers': reviewers or self._get_reviewers(),
988 987 'title': title,
989 988 'description': description,
990 989 }
991 990 self.pull_request = model.create(**self.create_parameters)
992 991 assert model.get_versions(self.pull_request) == []
993 992
994 993 self.pull_request_id = self.pull_request.pull_request_id
995 994
996 995 if approved:
997 996 self.approve()
998 997
999 998 Session().add(self.pull_request)
1000 999 Session().commit()
1001 1000
1002 1001 return self.pull_request
1003 1002
1004 1003 def approve(self):
1005 1004 self.create_status_votes(
1006 1005 ChangesetStatus.STATUS_APPROVED,
1007 1006 *self.pull_request.reviewers)
1008 1007
1009 1008 def close(self):
1010 1009 PullRequestModel().close_pull_request(self.pull_request, self.author)
1011 1010
1012 1011 def _default_branch_reference(self, commit_message):
1013 1012 reference = '%s:%s:%s' % (
1014 1013 'branch',
1015 1014 self.backend.default_branch_name,
1016 1015 self.commit_ids[commit_message])
1017 1016 return reference
1018 1017
1019 1018 def _get_reviewers(self):
1020 1019 return [
1021 1020 (TEST_USER_REGULAR_LOGIN, ['default1'], False, []),
1022 1021 (TEST_USER_REGULAR2_LOGIN, ['default2'], False, []),
1023 1022 ]
1024 1023
1025 1024 def update_source_repository(self, head=None):
1026 1025 heads = [head or 'c3']
1027 1026 self.backend.pull_heads(self.source_repository, heads=heads)
1028 1027
1029 1028 def add_one_commit(self, head=None):
1030 1029 self.update_source_repository(head=head)
1031 1030 old_commit_ids = set(self.pull_request.revisions)
1032 1031 PullRequestModel().update_commits(self.pull_request)
1033 1032 commit_ids = set(self.pull_request.revisions)
1034 1033 new_commit_ids = commit_ids - old_commit_ids
1035 1034 assert len(new_commit_ids) == 1
1036 1035 return new_commit_ids.pop()
1037 1036
1038 1037 def remove_one_commit(self):
1039 1038 assert len(self.pull_request.revisions) == 2
1040 1039 source_vcs = self.source_repository.scm_instance()
1041 1040 removed_commit_id = source_vcs.commit_ids[-1]
1042 1041
1043 1042 # TODO: johbo: Git and Mercurial have an inconsistent vcs api here,
1044 1043 # remove the if once that's sorted out.
1045 1044 if self.backend.alias == "git":
1046 1045 kwargs = {'branch_name': self.backend.default_branch_name}
1047 1046 else:
1048 1047 kwargs = {}
1049 1048 source_vcs.strip(removed_commit_id, **kwargs)
1050 1049
1051 1050 PullRequestModel().update_commits(self.pull_request)
1052 1051 assert len(self.pull_request.revisions) == 1
1053 1052 return removed_commit_id
1054 1053
1055 1054 def create_comment(self, linked_to=None):
1056 1055 comment = CommentsModel().create(
1057 1056 text=u"Test comment",
1058 1057 repo=self.target_repository.repo_name,
1059 1058 user=self.author,
1060 1059 pull_request=self.pull_request)
1061 1060 assert comment.pull_request_version_id is None
1062 1061
1063 1062 if linked_to:
1064 1063 PullRequestModel()._link_comments_to_version(linked_to)
1065 1064
1066 1065 return comment
1067 1066
1068 1067 def create_inline_comment(
1069 1068 self, linked_to=None, line_no=u'n1', file_path='file_1'):
1070 1069 comment = CommentsModel().create(
1071 1070 text=u"Test comment",
1072 1071 repo=self.target_repository.repo_name,
1073 1072 user=self.author,
1074 1073 line_no=line_no,
1075 1074 f_path=file_path,
1076 1075 pull_request=self.pull_request)
1077 1076 assert comment.pull_request_version_id is None
1078 1077
1079 1078 if linked_to:
1080 1079 PullRequestModel()._link_comments_to_version(linked_to)
1081 1080
1082 1081 return comment
1083 1082
1084 1083 def create_version_of_pull_request(self):
1085 1084 pull_request = self.create_pull_request()
1086 1085 version = PullRequestModel()._create_version_from_snapshot(
1087 1086 pull_request)
1088 1087 return version
1089 1088
1090 1089 def create_status_votes(self, status, *reviewers):
1091 1090 for reviewer in reviewers:
1092 1091 ChangesetStatusModel().set_status(
1093 1092 repo=self.pull_request.target_repo,
1094 1093 status=status,
1095 1094 user=reviewer.user_id,
1096 1095 pull_request=self.pull_request)
1097 1096
1098 1097 def set_mergeable(self, value):
1099 1098 if not self.mergeable_patcher:
1100 1099 self.mergeable_patcher = mock.patch.object(
1101 1100 VcsSettingsModel, 'get_general_settings')
1102 1101 self.mergeable_mock = self.mergeable_patcher.start()
1103 1102 self.mergeable_mock.return_value = {
1104 1103 'rhodecode_pr_merge_enabled': value}
1105 1104
1106 1105 def cleanup(self):
1107 1106 # In case the source repository is already cleaned up, the pull
1108 1107 # request will already be deleted.
1109 1108 pull_request = PullRequest().get(self.pull_request_id)
1110 1109 if pull_request:
1111 1110 PullRequestModel().delete(pull_request, pull_request.author)
1112 1111 Session().commit()
1113 1112
1114 1113 if self.notification_patcher:
1115 1114 self.notification_patcher.stop()
1116 1115
1117 1116 if self.mergeable_patcher:
1118 1117 self.mergeable_patcher.stop()
1119 1118
1120 1119
1121 1120 @pytest.fixture
1122 1121 def user_admin(baseapp):
1123 1122 """
1124 1123 Provides the default admin test user as an instance of `db.User`.
1125 1124 """
1126 1125 user = UserModel().get_by_username(TEST_USER_ADMIN_LOGIN)
1127 1126 return user
1128 1127
1129 1128
1130 1129 @pytest.fixture
1131 1130 def user_regular(baseapp):
1132 1131 """
1133 1132 Provides the default regular test user as an instance of `db.User`.
1134 1133 """
1135 1134 user = UserModel().get_by_username(TEST_USER_REGULAR_LOGIN)
1136 1135 return user
1137 1136
1138 1137
1139 1138 @pytest.fixture
1140 1139 def user_util(request, db_connection):
1141 1140 """
1142 1141 Provides a wired instance of `UserUtility` with integrated cleanup.
1143 1142 """
1144 1143 utility = UserUtility(test_name=request.node.name)
1145 1144 request.addfinalizer(utility.cleanup)
1146 1145 return utility
1147 1146
1148 1147
1149 1148 # TODO: johbo: Split this up into utilities per domain or something similar
1150 1149 class UserUtility(object):
1151 1150
1152 1151 def __init__(self, test_name="test"):
1153 1152 self._test_name = self._sanitize_name(test_name)
1154 1153 self.fixture = Fixture()
1155 1154 self.repo_group_ids = []
1156 1155 self.repos_ids = []
1157 1156 self.user_ids = []
1158 1157 self.user_group_ids = []
1159 1158 self.user_repo_permission_ids = []
1160 1159 self.user_group_repo_permission_ids = []
1161 1160 self.user_repo_group_permission_ids = []
1162 1161 self.user_group_repo_group_permission_ids = []
1163 1162 self.user_user_group_permission_ids = []
1164 1163 self.user_group_user_group_permission_ids = []
1165 1164 self.user_permissions = []
1166 1165
1167 1166 def _sanitize_name(self, name):
1168 1167 for char in ['[', ']']:
1169 1168 name = name.replace(char, '_')
1170 1169 return name
1171 1170
1172 1171 def create_repo_group(
1173 1172 self, owner=TEST_USER_ADMIN_LOGIN, auto_cleanup=True):
1174 1173 group_name = "{prefix}_repogroup_{count}".format(
1175 1174 prefix=self._test_name,
1176 1175 count=len(self.repo_group_ids))
1177 1176 repo_group = self.fixture.create_repo_group(
1178 1177 group_name, cur_user=owner)
1179 1178 if auto_cleanup:
1180 1179 self.repo_group_ids.append(repo_group.group_id)
1181 1180 return repo_group
1182 1181
1183 1182 def create_repo(self, owner=TEST_USER_ADMIN_LOGIN, parent=None,
1184 1183 auto_cleanup=True, repo_type='hg', bare=False):
1185 1184 repo_name = "{prefix}_repository_{count}".format(
1186 1185 prefix=self._test_name,
1187 1186 count=len(self.repos_ids))
1188 1187
1189 1188 repository = self.fixture.create_repo(
1190 1189 repo_name, cur_user=owner, repo_group=parent, repo_type=repo_type, bare=bare)
1191 1190 if auto_cleanup:
1192 1191 self.repos_ids.append(repository.repo_id)
1193 1192 return repository
1194 1193
1195 1194 def create_user(self, auto_cleanup=True, **kwargs):
1196 1195 user_name = "{prefix}_user_{count}".format(
1197 1196 prefix=self._test_name,
1198 1197 count=len(self.user_ids))
1199 1198 user = self.fixture.create_user(user_name, **kwargs)
1200 1199 if auto_cleanup:
1201 1200 self.user_ids.append(user.user_id)
1202 1201 return user
1203 1202
1204 1203 def create_additional_user_email(self, user, email):
1205 1204 uem = self.fixture.create_additional_user_email(user=user, email=email)
1206 1205 return uem
1207 1206
1208 1207 def create_user_with_group(self):
1209 1208 user = self.create_user()
1210 1209 user_group = self.create_user_group(members=[user])
1211 1210 return user, user_group
1212 1211
1213 1212 def create_user_group(self, owner=TEST_USER_ADMIN_LOGIN, members=None,
1214 1213 auto_cleanup=True, **kwargs):
1215 1214 group_name = "{prefix}_usergroup_{count}".format(
1216 1215 prefix=self._test_name,
1217 1216 count=len(self.user_group_ids))
1218 1217 user_group = self.fixture.create_user_group(
1219 1218 group_name, cur_user=owner, **kwargs)
1220 1219
1221 1220 if auto_cleanup:
1222 1221 self.user_group_ids.append(user_group.users_group_id)
1223 1222 if members:
1224 1223 for user in members:
1225 1224 UserGroupModel().add_user_to_group(user_group, user)
1226 1225 return user_group
1227 1226
1228 1227 def grant_user_permission(self, user_name, permission_name):
1229 1228 self.inherit_default_user_permissions(user_name, False)
1230 1229 self.user_permissions.append((user_name, permission_name))
1231 1230
1232 1231 def grant_user_permission_to_repo_group(
1233 1232 self, repo_group, user, permission_name):
1234 1233 permission = RepoGroupModel().grant_user_permission(
1235 1234 repo_group, user, permission_name)
1236 1235 self.user_repo_group_permission_ids.append(
1237 1236 (repo_group.group_id, user.user_id))
1238 1237 return permission
1239 1238
1240 1239 def grant_user_group_permission_to_repo_group(
1241 1240 self, repo_group, user_group, permission_name):
1242 1241 permission = RepoGroupModel().grant_user_group_permission(
1243 1242 repo_group, user_group, permission_name)
1244 1243 self.user_group_repo_group_permission_ids.append(
1245 1244 (repo_group.group_id, user_group.users_group_id))
1246 1245 return permission
1247 1246
1248 1247 def grant_user_permission_to_repo(
1249 1248 self, repo, user, permission_name):
1250 1249 permission = RepoModel().grant_user_permission(
1251 1250 repo, user, permission_name)
1252 1251 self.user_repo_permission_ids.append(
1253 1252 (repo.repo_id, user.user_id))
1254 1253 return permission
1255 1254
1256 1255 def grant_user_group_permission_to_repo(
1257 1256 self, repo, user_group, permission_name):
1258 1257 permission = RepoModel().grant_user_group_permission(
1259 1258 repo, user_group, permission_name)
1260 1259 self.user_group_repo_permission_ids.append(
1261 1260 (repo.repo_id, user_group.users_group_id))
1262 1261 return permission
1263 1262
1264 1263 def grant_user_permission_to_user_group(
1265 1264 self, target_user_group, user, permission_name):
1266 1265 permission = UserGroupModel().grant_user_permission(
1267 1266 target_user_group, user, permission_name)
1268 1267 self.user_user_group_permission_ids.append(
1269 1268 (target_user_group.users_group_id, user.user_id))
1270 1269 return permission
1271 1270
1272 1271 def grant_user_group_permission_to_user_group(
1273 1272 self, target_user_group, user_group, permission_name):
1274 1273 permission = UserGroupModel().grant_user_group_permission(
1275 1274 target_user_group, user_group, permission_name)
1276 1275 self.user_group_user_group_permission_ids.append(
1277 1276 (target_user_group.users_group_id, user_group.users_group_id))
1278 1277 return permission
1279 1278
1280 1279 def revoke_user_permission(self, user_name, permission_name):
1281 1280 self.inherit_default_user_permissions(user_name, True)
1282 1281 UserModel().revoke_perm(user_name, permission_name)
1283 1282
1284 1283 def inherit_default_user_permissions(self, user_name, value):
1285 1284 user = UserModel().get_by_username(user_name)
1286 1285 user.inherit_default_permissions = value
1287 1286 Session().add(user)
1288 1287 Session().commit()
1289 1288
1290 1289 def cleanup(self):
1291 1290 self._cleanup_permissions()
1292 1291 self._cleanup_repos()
1293 1292 self._cleanup_repo_groups()
1294 1293 self._cleanup_user_groups()
1295 1294 self._cleanup_users()
1296 1295
1297 1296 def _cleanup_permissions(self):
1298 1297 if self.user_permissions:
1299 1298 for user_name, permission_name in self.user_permissions:
1300 1299 self.revoke_user_permission(user_name, permission_name)
1301 1300
1302 1301 for permission in self.user_repo_permission_ids:
1303 1302 RepoModel().revoke_user_permission(*permission)
1304 1303
1305 1304 for permission in self.user_group_repo_permission_ids:
1306 1305 RepoModel().revoke_user_group_permission(*permission)
1307 1306
1308 1307 for permission in self.user_repo_group_permission_ids:
1309 1308 RepoGroupModel().revoke_user_permission(*permission)
1310 1309
1311 1310 for permission in self.user_group_repo_group_permission_ids:
1312 1311 RepoGroupModel().revoke_user_group_permission(*permission)
1313 1312
1314 1313 for permission in self.user_user_group_permission_ids:
1315 1314 UserGroupModel().revoke_user_permission(*permission)
1316 1315
1317 1316 for permission in self.user_group_user_group_permission_ids:
1318 1317 UserGroupModel().revoke_user_group_permission(*permission)
1319 1318
1320 1319 def _cleanup_repo_groups(self):
1321 1320 def _repo_group_compare(first_group_id, second_group_id):
1322 1321 """
1323 1322 Gives higher priority to the groups with the most complex paths
1324 1323 """
1325 1324 first_group = RepoGroup.get(first_group_id)
1326 1325 second_group = RepoGroup.get(second_group_id)
1327 1326 first_group_parts = (
1328 1327 len(first_group.group_name.split('/')) if first_group else 0)
1329 1328 second_group_parts = (
1330 1329 len(second_group.group_name.split('/')) if second_group else 0)
1331 1330 return cmp(second_group_parts, first_group_parts)
1332 1331
1333 1332 sorted_repo_group_ids = sorted(
1334 1333 self.repo_group_ids, cmp=_repo_group_compare)
1335 1334 for repo_group_id in sorted_repo_group_ids:
1336 1335 self.fixture.destroy_repo_group(repo_group_id)
1337 1336
1338 1337 def _cleanup_repos(self):
1339 1338 sorted_repos_ids = sorted(self.repos_ids)
1340 1339 for repo_id in sorted_repos_ids:
1341 1340 self.fixture.destroy_repo(repo_id)
1342 1341
1343 1342 def _cleanup_user_groups(self):
1344 1343 def _user_group_compare(first_group_id, second_group_id):
1345 1344 """
1346 1345 Gives higher priority to the groups with the most complex paths
1347 1346 """
1348 1347 first_group = UserGroup.get(first_group_id)
1349 1348 second_group = UserGroup.get(second_group_id)
1350 1349 first_group_parts = (
1351 1350 len(first_group.users_group_name.split('/'))
1352 1351 if first_group else 0)
1353 1352 second_group_parts = (
1354 1353 len(second_group.users_group_name.split('/'))
1355 1354 if second_group else 0)
1356 1355 return cmp(second_group_parts, first_group_parts)
1357 1356
1358 1357 sorted_user_group_ids = sorted(
1359 1358 self.user_group_ids, cmp=_user_group_compare)
1360 1359 for user_group_id in sorted_user_group_ids:
1361 1360 self.fixture.destroy_user_group(user_group_id)
1362 1361
1363 1362 def _cleanup_users(self):
1364 1363 for user_id in self.user_ids:
1365 1364 self.fixture.destroy_user(user_id)
1366 1365
1367 1366
1368 1367 # TODO: Think about moving this into a pytest-pyro package and make it a
1369 1368 # pytest plugin
1370 1369 @pytest.hookimpl(tryfirst=True, hookwrapper=True)
1371 1370 def pytest_runtest_makereport(item, call):
1372 1371 """
1373 1372 Adding the remote traceback if the exception has this information.
1374 1373
1375 1374 VCSServer attaches this information as the attribute `_vcs_server_traceback`
1376 1375 to the exception instance.
1377 1376 """
1378 1377 outcome = yield
1379 1378 report = outcome.get_result()
1380 1379 if call.excinfo:
1381 1380 _add_vcsserver_remote_traceback(report, call.excinfo.value)
1382 1381
1383 1382
1384 1383 def _add_vcsserver_remote_traceback(report, exc):
1385 1384 vcsserver_traceback = getattr(exc, '_vcs_server_traceback', None)
1386 1385
1387 1386 if vcsserver_traceback:
1388 1387 section = 'VCSServer remote traceback ' + report.when
1389 1388 report.sections.append((section, vcsserver_traceback))
1390 1389
1391 1390
1392 1391 @pytest.fixture(scope='session')
1393 1392 def testrun():
1394 1393 return {
1395 1394 'uuid': uuid.uuid4(),
1396 1395 'start': datetime.datetime.utcnow().isoformat(),
1397 1396 'timestamp': int(time.time()),
1398 1397 }
1399 1398
1400 1399
1401 @pytest.fixture(autouse=True)
1402 def collect_appenlight_stats(request, testrun):
1403 """
1404 This fixture reports memory consumtion of single tests.
1405
1406 It gathers data based on `psutil` and sends them to Appenlight. The option
1407 ``--ae`` has te be used to enable this fixture and the API key for your
1408 application has to be provided in ``--ae-key``.
1409 """
1410 try:
1411 # cygwin cannot have yet psutil support.
1412 import psutil
1413 except ImportError:
1414 return
1415
1416 if not request.config.getoption('--appenlight'):
1417 return
1418 else:
1419 # Only request the baseapp fixture if appenlight tracking is
1420 # enabled. This will speed up a test run of unit tests by 2 to 3
1421 # seconds if appenlight is not enabled.
1422 baseapp = request.getfuncargvalue("baseapp")
1423 url = '{}/api/logs'.format(request.config.getoption('--appenlight-url'))
1424 client = AppenlightClient(
1425 url=url,
1426 api_key=request.config.getoption('--appenlight-api-key'),
1427 namespace=request.node.nodeid,
1428 request=str(testrun['uuid']),
1429 testrun=testrun)
1430
1431 client.collect({
1432 'message': "Starting",
1433 })
1434
1435 server_and_port = baseapp.config.get_settings()['vcs.server']
1436 protocol = baseapp.config.get_settings()['vcs.server.protocol']
1437 server = create_vcsserver_proxy(server_and_port, protocol)
1438 with server:
1439 vcs_pid = server.get_pid()
1440 server.run_gc()
1441 vcs_process = psutil.Process(vcs_pid)
1442 mem = vcs_process.memory_info()
1443 client.tag_before('vcsserver.rss', mem.rss)
1444 client.tag_before('vcsserver.vms', mem.vms)
1445
1446 test_process = psutil.Process()
1447 mem = test_process.memory_info()
1448 client.tag_before('test.rss', mem.rss)
1449 client.tag_before('test.vms', mem.vms)
1450
1451 client.tag_before('time', time.time())
1452
1453 @request.addfinalizer
1454 def send_stats():
1455 client.tag_after('time', time.time())
1456 with server:
1457 gc_stats = server.run_gc()
1458 for tag, value in gc_stats.items():
1459 client.tag_after(tag, value)
1460 mem = vcs_process.memory_info()
1461 client.tag_after('vcsserver.rss', mem.rss)
1462 client.tag_after('vcsserver.vms', mem.vms)
1463
1464 mem = test_process.memory_info()
1465 client.tag_after('test.rss', mem.rss)
1466 client.tag_after('test.vms', mem.vms)
1467
1468 client.collect({
1469 'message': "Finished",
1470 })
1471 client.send_stats()
1472
1473 return client
1474
1475
1476 class AppenlightClient():
1400 class AppenlightClient(object):
1477 1401
1478 1402 url_template = '{url}?protocol_version=0.5'
1479 1403
1480 1404 def __init__(
1481 1405 self, url, api_key, add_server=True, add_timestamp=True,
1482 1406 namespace=None, request=None, testrun=None):
1483 1407 self.url = self.url_template.format(url=url)
1484 1408 self.api_key = api_key
1485 1409 self.add_server = add_server
1486 1410 self.add_timestamp = add_timestamp
1487 1411 self.namespace = namespace
1488 1412 self.request = request
1489 1413 self.server = socket.getfqdn(socket.gethostname())
1490 1414 self.tags_before = {}
1491 1415 self.tags_after = {}
1492 1416 self.stats = []
1493 1417 self.testrun = testrun or {}
1494 1418
1495 1419 def tag_before(self, tag, value):
1496 1420 self.tags_before[tag] = value
1497 1421
1498 1422 def tag_after(self, tag, value):
1499 1423 self.tags_after[tag] = value
1500 1424
1501 1425 def collect(self, data):
1502 1426 if self.add_server:
1503 1427 data.setdefault('server', self.server)
1504 1428 if self.add_timestamp:
1505 1429 data.setdefault('date', datetime.datetime.utcnow().isoformat())
1506 1430 if self.namespace:
1507 1431 data.setdefault('namespace', self.namespace)
1508 1432 if self.request:
1509 1433 data.setdefault('request', self.request)
1510 1434 self.stats.append(data)
1511 1435
1512 1436 def send_stats(self):
1513 1437 tags = [
1514 1438 ('testrun', self.request),
1515 1439 ('testrun.start', self.testrun['start']),
1516 1440 ('testrun.timestamp', self.testrun['timestamp']),
1517 1441 ('test', self.namespace),
1518 1442 ]
1519 1443 for key, value in self.tags_before.items():
1520 1444 tags.append((key + '.before', value))
1521 1445 try:
1522 1446 delta = self.tags_after[key] - value
1523 1447 tags.append((key + '.delta', delta))
1524 1448 except Exception:
1525 1449 pass
1526 1450 for key, value in self.tags_after.items():
1527 1451 tags.append((key + '.after', value))
1528 1452 self.collect({
1529 1453 'message': "Collected tags",
1530 1454 'tags': tags,
1531 1455 })
1532 1456
1533 1457 response = requests.post(
1534 1458 self.url,
1535 1459 headers={
1536 1460 'X-appenlight-api-key': self.api_key},
1537 1461 json=self.stats,
1538 1462 )
1539 1463
1540 1464 if not response.status_code == 200:
1541 1465 pprint.pprint(self.stats)
1542 1466 print(response.headers)
1543 1467 print(response.text)
1544 1468 raise Exception('Sending to appenlight failed')
1545 1469
1546 1470
1547 1471 @pytest.fixture
1548 1472 def gist_util(request, db_connection):
1549 1473 """
1550 1474 Provides a wired instance of `GistUtility` with integrated cleanup.
1551 1475 """
1552 1476 utility = GistUtility()
1553 1477 request.addfinalizer(utility.cleanup)
1554 1478 return utility
1555 1479
1556 1480
1557 1481 class GistUtility(object):
1558 1482 def __init__(self):
1559 1483 self.fixture = Fixture()
1560 1484 self.gist_ids = []
1561 1485
1562 1486 def create_gist(self, **kwargs):
1563 1487 gist = self.fixture.create_gist(**kwargs)
1564 1488 self.gist_ids.append(gist.gist_id)
1565 1489 return gist
1566 1490
1567 1491 def cleanup(self):
1568 1492 for id_ in self.gist_ids:
1569 1493 self.fixture.destroy_gists(str(id_))
1570 1494
1571 1495
1572 1496 @pytest.fixture
1573 1497 def enabled_backends(request):
1574 1498 backends = request.config.option.backends
1575 1499 return backends[:]
1576 1500
1577 1501
1578 1502 @pytest.fixture
1579 1503 def settings_util(request, db_connection):
1580 1504 """
1581 1505 Provides a wired instance of `SettingsUtility` with integrated cleanup.
1582 1506 """
1583 1507 utility = SettingsUtility()
1584 1508 request.addfinalizer(utility.cleanup)
1585 1509 return utility
1586 1510
1587 1511
1588 1512 class SettingsUtility(object):
1589 1513 def __init__(self):
1590 1514 self.rhodecode_ui_ids = []
1591 1515 self.rhodecode_setting_ids = []
1592 1516 self.repo_rhodecode_ui_ids = []
1593 1517 self.repo_rhodecode_setting_ids = []
1594 1518
1595 1519 def create_repo_rhodecode_ui(
1596 1520 self, repo, section, value, key=None, active=True, cleanup=True):
1597 1521 key = key or hashlib.sha1(
1598 1522 '{}{}{}'.format(section, value, repo.repo_id)).hexdigest()
1599 1523
1600 1524 setting = RepoRhodeCodeUi()
1601 1525 setting.repository_id = repo.repo_id
1602 1526 setting.ui_section = section
1603 1527 setting.ui_value = value
1604 1528 setting.ui_key = key
1605 1529 setting.ui_active = active
1606 1530 Session().add(setting)
1607 1531 Session().commit()
1608 1532
1609 1533 if cleanup:
1610 1534 self.repo_rhodecode_ui_ids.append(setting.ui_id)
1611 1535 return setting
1612 1536
1613 1537 def create_rhodecode_ui(
1614 1538 self, section, value, key=None, active=True, cleanup=True):
1615 1539 key = key or hashlib.sha1('{}{}'.format(section, value)).hexdigest()
1616 1540
1617 1541 setting = RhodeCodeUi()
1618 1542 setting.ui_section = section
1619 1543 setting.ui_value = value
1620 1544 setting.ui_key = key
1621 1545 setting.ui_active = active
1622 1546 Session().add(setting)
1623 1547 Session().commit()
1624 1548
1625 1549 if cleanup:
1626 1550 self.rhodecode_ui_ids.append(setting.ui_id)
1627 1551 return setting
1628 1552
1629 1553 def create_repo_rhodecode_setting(
1630 1554 self, repo, name, value, type_, cleanup=True):
1631 1555 setting = RepoRhodeCodeSetting(
1632 1556 repo.repo_id, key=name, val=value, type=type_)
1633 1557 Session().add(setting)
1634 1558 Session().commit()
1635 1559
1636 1560 if cleanup:
1637 1561 self.repo_rhodecode_setting_ids.append(setting.app_settings_id)
1638 1562 return setting
1639 1563
1640 1564 def create_rhodecode_setting(self, name, value, type_, cleanup=True):
1641 1565 setting = RhodeCodeSetting(key=name, val=value, type=type_)
1642 1566 Session().add(setting)
1643 1567 Session().commit()
1644 1568
1645 1569 if cleanup:
1646 1570 self.rhodecode_setting_ids.append(setting.app_settings_id)
1647 1571
1648 1572 return setting
1649 1573
1650 1574 def cleanup(self):
1651 1575 for id_ in self.rhodecode_ui_ids:
1652 1576 setting = RhodeCodeUi.get(id_)
1653 1577 Session().delete(setting)
1654 1578
1655 1579 for id_ in self.rhodecode_setting_ids:
1656 1580 setting = RhodeCodeSetting.get(id_)
1657 1581 Session().delete(setting)
1658 1582
1659 1583 for id_ in self.repo_rhodecode_ui_ids:
1660 1584 setting = RepoRhodeCodeUi.get(id_)
1661 1585 Session().delete(setting)
1662 1586
1663 1587 for id_ in self.repo_rhodecode_setting_ids:
1664 1588 setting = RepoRhodeCodeSetting.get(id_)
1665 1589 Session().delete(setting)
1666 1590
1667 1591 Session().commit()
1668 1592
1669 1593
1670 1594 @pytest.fixture
1671 1595 def no_notifications(request):
1672 1596 notification_patcher = mock.patch(
1673 1597 'rhodecode.model.notification.NotificationModel.create')
1674 1598 notification_patcher.start()
1675 1599 request.addfinalizer(notification_patcher.stop)
1676 1600
1677 1601
1678 1602 @pytest.fixture(scope='session')
1679 1603 def repeat(request):
1680 1604 """
1681 1605 The number of repetitions is based on this fixture.
1682 1606
1683 1607 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
1684 1608 tests are not too slow in our default test suite.
1685 1609 """
1686 1610 return request.config.getoption('--repeat')
1687 1611
1688 1612
1689 1613 @pytest.fixture
1690 1614 def rhodecode_fixtures():
1691 1615 return Fixture()
1692 1616
1693 1617
1694 1618 @pytest.fixture
1695 1619 def context_stub():
1696 1620 """
1697 1621 Stub context object.
1698 1622 """
1699 1623 context = pyramid.testing.DummyResource()
1700 1624 return context
1701 1625
1702 1626
1703 1627 @pytest.fixture
1704 1628 def request_stub():
1705 1629 """
1706 1630 Stub request object.
1707 1631 """
1708 1632 from rhodecode.lib.base import bootstrap_request
1709 1633 request = bootstrap_request(scheme='https')
1710 1634 return request
1711 1635
1712 1636
1713 1637 @pytest.fixture
1714 1638 def config_stub(request, request_stub):
1715 1639 """
1716 1640 Set up pyramid.testing and return the Configurator.
1717 1641 """
1718 1642 from rhodecode.lib.base import bootstrap_config
1719 1643 config = bootstrap_config(request=request_stub)
1720 1644
1721 1645 @request.addfinalizer
1722 1646 def cleanup():
1723 1647 pyramid.testing.tearDown()
1724 1648
1725 1649 return config
1726 1650
1727 1651
1728 1652 @pytest.fixture
1729 1653 def StubIntegrationType():
1730 1654 class _StubIntegrationType(IntegrationTypeBase):
1731 1655 """ Test integration type class """
1732 1656
1733 1657 key = 'test'
1734 1658 display_name = 'Test integration type'
1735 1659 description = 'A test integration type for testing'
1736 1660
1737 1661 @classmethod
1738 1662 def icon(cls):
1739 1663 return 'test_icon_html_image'
1740 1664
1741 1665 def __init__(self, settings):
1742 1666 super(_StubIntegrationType, self).__init__(settings)
1743 1667 self.sent_events = [] # for testing
1744 1668
1745 1669 def send_event(self, event):
1746 1670 self.sent_events.append(event)
1747 1671
1748 1672 def settings_schema(self):
1749 1673 class SettingsSchema(colander.Schema):
1750 1674 test_string_field = colander.SchemaNode(
1751 1675 colander.String(),
1752 1676 missing=colander.required,
1753 1677 title='test string field',
1754 1678 )
1755 1679 test_int_field = colander.SchemaNode(
1756 1680 colander.Int(),
1757 1681 title='some integer setting',
1758 1682 )
1759 1683 return SettingsSchema()
1760 1684
1761 1685
1762 1686 integration_type_registry.register_integration_type(_StubIntegrationType)
1763 1687 return _StubIntegrationType
1764 1688
1765 1689 @pytest.fixture
1766 1690 def stub_integration_settings():
1767 1691 return {
1768 1692 'test_string_field': 'some data',
1769 1693 'test_int_field': 100,
1770 1694 }
1771 1695
1772 1696
1773 1697 @pytest.fixture
1774 1698 def repo_integration_stub(request, repo_stub, StubIntegrationType,
1775 1699 stub_integration_settings):
1776 1700 integration = IntegrationModel().create(
1777 1701 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1778 1702 name='test repo integration',
1779 1703 repo=repo_stub, repo_group=None, child_repos_only=None)
1780 1704
1781 1705 @request.addfinalizer
1782 1706 def cleanup():
1783 1707 IntegrationModel().delete(integration)
1784 1708
1785 1709 return integration
1786 1710
1787 1711
1788 1712 @pytest.fixture
1789 1713 def repogroup_integration_stub(request, test_repo_group, StubIntegrationType,
1790 1714 stub_integration_settings):
1791 1715 integration = IntegrationModel().create(
1792 1716 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1793 1717 name='test repogroup integration',
1794 1718 repo=None, repo_group=test_repo_group, child_repos_only=True)
1795 1719
1796 1720 @request.addfinalizer
1797 1721 def cleanup():
1798 1722 IntegrationModel().delete(integration)
1799 1723
1800 1724 return integration
1801 1725
1802 1726
1803 1727 @pytest.fixture
1804 1728 def repogroup_recursive_integration_stub(request, test_repo_group,
1805 1729 StubIntegrationType, stub_integration_settings):
1806 1730 integration = IntegrationModel().create(
1807 1731 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1808 1732 name='test recursive repogroup integration',
1809 1733 repo=None, repo_group=test_repo_group, child_repos_only=False)
1810 1734
1811 1735 @request.addfinalizer
1812 1736 def cleanup():
1813 1737 IntegrationModel().delete(integration)
1814 1738
1815 1739 return integration
1816 1740
1817 1741
1818 1742 @pytest.fixture
1819 1743 def global_integration_stub(request, StubIntegrationType,
1820 1744 stub_integration_settings):
1821 1745 integration = IntegrationModel().create(
1822 1746 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1823 1747 name='test global integration',
1824 1748 repo=None, repo_group=None, child_repos_only=None)
1825 1749
1826 1750 @request.addfinalizer
1827 1751 def cleanup():
1828 1752 IntegrationModel().delete(integration)
1829 1753
1830 1754 return integration
1831 1755
1832 1756
1833 1757 @pytest.fixture
1834 1758 def root_repos_integration_stub(request, StubIntegrationType,
1835 1759 stub_integration_settings):
1836 1760 integration = IntegrationModel().create(
1837 1761 StubIntegrationType, settings=stub_integration_settings, enabled=True,
1838 1762 name='test global integration',
1839 1763 repo=None, repo_group=None, child_repos_only=True)
1840 1764
1841 1765 @request.addfinalizer
1842 1766 def cleanup():
1843 1767 IntegrationModel().delete(integration)
1844 1768
1845 1769 return integration
1846 1770
1847 1771
1848 1772 @pytest.fixture
1849 1773 def local_dt_to_utc():
1850 1774 def _factory(dt):
1851 1775 return dt.replace(tzinfo=dateutil.tz.tzlocal()).astimezone(
1852 1776 dateutil.tz.tzutc()).replace(tzinfo=None)
1853 1777 return _factory
1854 1778
1855 1779
1856 1780 @pytest.fixture
1857 1781 def disable_anonymous_user(request, baseapp):
1858 1782 set_anonymous_access(False)
1859 1783
1860 1784 @request.addfinalizer
1861 1785 def cleanup():
1862 1786 set_anonymous_access(True)
1863 1787
1864 1788
1865 1789 @pytest.fixture(scope='module')
1866 1790 def rc_fixture(request):
1867 1791 return Fixture()
1868 1792
1869 1793
1870 1794 @pytest.fixture
1871 1795 def repo_groups(request):
1872 1796 fixture = Fixture()
1873 1797
1874 1798 session = Session()
1875 1799 zombie_group = fixture.create_repo_group('zombie')
1876 1800 parent_group = fixture.create_repo_group('parent')
1877 1801 child_group = fixture.create_repo_group('parent/child')
1878 1802 groups_in_db = session.query(RepoGroup).all()
1879 1803 assert len(groups_in_db) == 3
1880 1804 assert child_group.group_parent_id == parent_group.group_id
1881 1805
1882 1806 @request.addfinalizer
1883 1807 def cleanup():
1884 1808 fixture.destroy_repo_group(zombie_group)
1885 1809 fixture.destroy_repo_group(child_group)
1886 1810 fixture.destroy_repo_group(parent_group)
1887 1811
1888 1812 return zombie_group, parent_group, child_group
1889 1813
1890 1814
1891 1815 @pytest.fixture(scope="session")
1892 1816 def tmp_path_factory(request):
1893 1817 """Return a :class:`_pytest.tmpdir.TempPathFactory` instance for the test session.
1894 1818 """
1895 1819
1896 1820 class TempPathFactory:
1897 1821
1898 1822 def mktemp(self, basename):
1899 1823 import tempfile
1900 1824 return tempfile.mktemp(basename)
1901 1825
1902 1826 return TempPathFactory()
@@ -1,133 +1,133 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # Copyright (C) 2010-2019 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
20 20
21 21 import logging
22 22
23 23 import mock
24 24 import msgpack
25 25 import pytest
26 26
27 27 from rhodecode.lib import vcs
28 28 from rhodecode.lib.vcs import client_http, exceptions
29 29
30 30
31 31 def is_new_connection(logger, level, message):
32 32 return (
33 33 logger == 'requests.packages.urllib3.connectionpool' and
34 34 message.startswith('Starting new HTTP'))
35 35
36 36
37 37 @pytest.fixture
38 38 def stub_session():
39 39 """
40 40 Stub of `requests.Session()`.
41 41 """
42 42 session = mock.Mock()
43 43 post = session.post()
44 44 post.content = msgpack.packb({})
45 45 post.status_code = 200
46 46
47 47 session.reset_mock()
48 48 return session
49 49
50 50
51 51 @pytest.fixture
52 52 def stub_fail_session():
53 53 """
54 54 Stub of `requests.Session()`.
55 55 """
56 56 session = mock.Mock()
57 57 post = session.post()
58 58 post.content = msgpack.packb({'error': '500'})
59 59 post.status_code = 500
60 60
61 61 session.reset_mock()
62 62 return session
63 63
64 64
65 65 @pytest.fixture
66 66 def stub_session_factory(stub_session):
67 67 """
68 68 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
69 69 """
70 70 session_factory = mock.Mock()
71 71 session_factory.return_value = stub_session
72 72 return session_factory
73 73
74 74
75 75 @pytest.fixture
76 76 def stub_session_failing_factory(stub_fail_session):
77 77 """
78 78 Stub of `rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory`.
79 79 """
80 80 session_factory = mock.Mock()
81 81 session_factory.return_value = stub_fail_session
82 82 return session_factory
83 83
84 84
85 85 def test_uses_persistent_http_connections(caplog, vcsbackend_hg):
86 86 repo = vcsbackend_hg.repo
87 87 remote_call = repo._remote.branches
88 88
89 89 with caplog.at_level(logging.INFO):
90 90 for x in range(5):
91 91 remote_call(normal=True, closed=False)
92 92
93 93 new_connections = [
94 94 r for r in caplog.record_tuples if is_new_connection(*r)]
95 95 assert len(new_connections) <= 1
96 96
97 97
98 98 def test_repo_maker_uses_session_for_classmethods(stub_session_factory):
99 repo_maker = client_http.RepoMaker(
99 repo_maker = client_http.RemoteVCSMaker(
100 100 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
101 101 repo_maker.example_call()
102 102 stub_session_factory().post.assert_called_with(
103 103 'http://server_and_port/endpoint', data=mock.ANY)
104 104
105 105
106 106 def test_repo_maker_uses_session_for_instance_methods(
107 107 stub_session_factory, config):
108 repo_maker = client_http.RepoMaker(
108 repo_maker = client_http.RemoteVCSMaker(
109 109 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_factory)
110 110 repo = repo_maker('stub_path', 'stub_repo_id', config)
111 111 repo.example_call()
112 112 stub_session_factory().post.assert_called_with(
113 113 'http://server_and_port/endpoint', data=mock.ANY)
114 114
115 115
116 116 @mock.patch('rhodecode.lib.vcs.client_http.ThreadlocalSessionFactory')
117 117 @mock.patch('rhodecode.lib.vcs.connection')
118 118 def test_connect_passes_in_the_same_session(
119 119 connection, session_factory_class, stub_session):
120 120 session_factory = session_factory_class.return_value
121 121 session_factory.return_value = stub_session
122 122
123 123 vcs.connect_http('server_and_port')
124 124
125 125
126 126 def test_repo_maker_uses_session_that_throws_error(
127 127 stub_session_failing_factory, config):
128 repo_maker = client_http.RepoMaker(
128 repo_maker = client_http.RemoteVCSMaker(
129 129 'server_and_port', 'endpoint', 'test_dummy_scm', stub_session_failing_factory)
130 130 repo = repo_maker('stub_path', 'stub_repo_id', config)
131 131
132 132 with pytest.raises(exceptions.HttpVCSCommunicationError):
133 133 repo.example_call()
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now