##// END OF EJS Templates
caches: new cache implementation for remote functions
marcink -
r739:6b84a339 default
parent child Browse files
Show More
@@ -14,6 +14,7 b''
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 18 import collections
18 19 import logging
19 20 import os
@@ -39,7 +40,7 b' from dulwich.server import update_server'
39 40
40 41 from vcsserver import exceptions, settings, subprocessio
41 42 from vcsserver.utils import safe_str
42 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
43 from vcsserver.base import RepoFactory, obfuscate_qs
43 44 from vcsserver.hgcompat import (
44 45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
45 46 from vcsserver.git_lfs.lib import LFSOidStore
@@ -47,10 +48,19 b' from vcsserver.git_lfs.lib import LFSOid'
47 48 DIR_STAT = stat.S_IFDIR
48 49 FILE_MODE = stat.S_IFMT
49 50 GIT_LINK = objects.S_IFGITLINK
51 PEELED_REF_MARKER = '^{}'
52
50 53
51 54 log = logging.getLogger(__name__)
52 55
53 56
57 def str_to_dulwich(value):
58 """
59 Dulwich 0.10.1a requires `unicode` objects to be passed in.
60 """
61 return value.decode(settings.WIRE_ENCODING)
62
63
54 64 def reraise_safe_exceptions(func):
55 65 """Converts Dulwich exceptions to something neutral."""
56 66
@@ -111,19 +121,7 b' class GitFactory(RepoFactory):'
111 121 """
112 122 Get a repository instance for the given path.
113 123 """
114 region = self._cache_region
115 context = wire.get('context', None)
116 repo_path = wire.get('path', '')
117 context_uid = '{}'.format(context)
118 cache = wire.get('cache', True)
119 cache_on = context and cache
120
121 @region.conditional_cache_on_arguments(condition=cache_on)
122 def create_new_repo(_repo_type, _repo_path, _context_uid, _use_libgit2):
123 return self._create_repo(wire, create, use_libgit2)
124
125 repo = create_new_repo(self.repo_type, repo_path, context_uid, use_libgit2)
126 return repo
124 return self._create_repo(wire, create, use_libgit2)
127 125
128 126 def repo_libgit2(self, wire):
129 127 return self.repo(wire, use_libgit2=True)
@@ -133,14 +131,15 b' class GitRemote(object):'
133 131
134 132 def __init__(self, factory):
135 133 self._factory = factory
136 self.peeled_ref_marker = '^{}'
137 134 self._bulk_methods = {
138 135 "date": self.date,
139 136 "author": self.author,
137 "branch": self.branch,
140 138 "message": self.message,
141 139 "parents": self.parents,
142 140 "_commit": self.revision,
143 141 }
142 self.region = self._factory._cache_region
144 143
145 144 def _wire_to_config(self, wire):
146 145 if 'config' in wire:
@@ -156,6 +155,23 b' class GitRemote(object):'
156 155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
157 156 return params
158 157
158 def _cache_on(self, wire):
159 context = wire.get('context', '')
160 context_uid = '{}'.format(context)
161 repo_id = wire.get('repo_id', '')
162 cache = wire.get('cache', True)
163 cache_on = context and cache
164 return cache_on, context_uid, repo_id
165
166 @reraise_safe_exceptions
167 def discover_git_version(self):
168 stdout, _ = self.run_git_command(
169 {}, ['--version'], _bare=True, _safe=True)
170 prefix = 'git version'
171 if stdout.startswith(prefix):
172 stdout = stdout[len(prefix):]
173 return stdout.strip()
174
159 175 @reraise_safe_exceptions
160 176 def is_empty(self, wire):
161 177 repo_init = self._factory.repo_libgit2(wire)
@@ -184,17 +200,21 b' class GitRemote(object):'
184 200
185 201 @reraise_safe_exceptions
186 202 def assert_correct_path(self, wire):
187 try:
188 repo_init = self._factory.repo_libgit2(wire)
189 with repo_init as repo:
190 pass
191 except pygit2.GitError:
192 path = wire.get('path')
193 tb = traceback.format_exc()
194 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
195 return False
203 cache_on, context_uid, repo_id = self._cache_on(wire)
204 @self.region.conditional_cache_on_arguments(condition=cache_on)
205 def _assert_correct_path(_context_uid, _repo_id):
206 try:
207 repo_init = self._factory.repo_libgit2(wire)
208 with repo_init as repo:
209 pass
210 except pygit2.GitError:
211 path = wire.get('path')
212 tb = traceback.format_exc()
213 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
214 return False
196 215
197 return True
216 return True
217 return _assert_correct_path(context_uid, repo_id)
198 218
199 219 @reraise_safe_exceptions
200 220 def bare(self, wire):
@@ -212,10 +232,16 b' class GitRemote(object):'
212 232
213 233 @reraise_safe_exceptions
214 234 def blob_raw_length(self, wire, sha):
215 repo_init = self._factory.repo_libgit2(wire)
216 with repo_init as repo:
217 blob = repo[sha]
218 return blob.size
235 cache_on, context_uid, repo_id = self._cache_on(wire)
236 @self.region.conditional_cache_on_arguments(condition=cache_on)
237 def _blob_raw_length(_context_uid, _repo_id, _sha):
238
239 repo_init = self._factory.repo_libgit2(wire)
240 with repo_init as repo:
241 blob = repo[sha]
242 return blob.size
243
244 return _blob_raw_length(context_uid, repo_id, sha)
219 245
220 246 def _parse_lfs_pointer(self, raw_content):
221 247
@@ -236,13 +262,19 b' class GitRemote(object):'
236 262
237 263 @reraise_safe_exceptions
238 264 def is_large_file(self, wire, sha):
239 repo_init = self._factory.repo_libgit2(wire)
240 with repo_init as repo:
241 blob = repo[sha]
242 if blob.is_binary:
243 return {}
244 265
245 return self._parse_lfs_pointer(blob.data)
266 cache_on, context_uid, repo_id = self._cache_on(wire)
267 @self.region.conditional_cache_on_arguments(condition=cache_on)
268 def _is_large_file(_context_uid, _repo_id, _sha):
269 repo_init = self._factory.repo_libgit2(wire)
270 with repo_init as repo:
271 blob = repo[sha]
272 if blob.is_binary:
273 return {}
274
275 return self._parse_lfs_pointer(blob.data)
276
277 return _is_large_file(context_uid, repo_id, sha)
246 278
247 279 @reraise_safe_exceptions
248 280 def in_largefiles_store(self, wire, oid):
@@ -276,15 +308,21 b' class GitRemote(object):'
276 308
277 309 @reraise_safe_exceptions
278 310 def bulk_request(self, wire, rev, pre_load):
279 result = {}
280 for attr in pre_load:
281 try:
282 method = self._bulk_methods[attr]
283 args = [wire, rev]
284 result[attr] = method(*args)
285 except KeyError as e:
286 raise exceptions.VcsException(e)("Unknown bulk attribute: %s" % attr)
287 return result
311 cache_on, context_uid, repo_id = self._cache_on(wire)
312 @self.region.conditional_cache_on_arguments(condition=cache_on)
313 def _bulk_request(_context_uid, _repo_id, _rev, _pre_load):
314 result = {}
315 for attr in pre_load:
316 try:
317 method = self._bulk_methods[attr]
318 args = [wire, rev]
319 result[attr] = method(*args)
320 except KeyError as e:
321 raise exceptions.VcsException(e)(
322 "Unknown bulk attribute: %s" % attr)
323 return result
324
325 return _bulk_request(context_uid, repo_id, rev, sorted(pre_load))
288 326
289 327 def _build_opener(self, url):
290 328 handlers = []
@@ -371,6 +409,34 b' class GitRemote(object):'
371 409 index.build_index_from_tree(repo.path, repo.index_path(),
372 410 repo.object_store, repo["HEAD"].tree)
373 411
412 @reraise_safe_exceptions
413 def branch(self, wire, commit_id):
414 cache_on, context_uid, repo_id = self._cache_on(wire)
415 cache_on = False
416 @self.region.conditional_cache_on_arguments(condition=cache_on)
417 def _branch(_context_uid, _repo_id, _commit_id):
418 regex = re.compile('^refs/heads')
419
420 def filter_with(ref):
421 return regex.match(ref[0]) and ref[1] == _commit_id
422
423 branches = filter(filter_with, self.get_refs(wire).items())
424 return [x[0].split('refs/heads/')[-1] for x in branches]
425
426 return _branch(context_uid, repo_id, commit_id)
427
428 @reraise_safe_exceptions
429 def commit_branches(self, wire, commit_id):
430 cache_on, context_uid, repo_id = self._cache_on(wire)
431 @self.region.conditional_cache_on_arguments(condition=cache_on)
432 def _commit_branches(_context_uid, _repo_id, _commit_id):
433 repo_init = self._factory.repo_libgit2(wire)
434 with repo_init as repo:
435 branches = [x for x in repo.branches.with_commit(_commit_id)]
436 return branches
437
438 return _commit_branches(context_uid, repo_id, commit_id)
439
374 440 # TODO: this is quite complex, check if that can be simplified
375 441 @reraise_safe_exceptions
376 442 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
@@ -510,7 +576,7 b' class GitRemote(object):'
510 576 # that contains a tag object, so that we would end up with
511 577 # a peeled ref at this point.
512 578 for k in remote_refs:
513 if k.endswith(self.peeled_ref_marker):
579 if k.endswith(PEELED_REF_MARKER):
514 580 log.debug("Skipping peeled reference %s", k)
515 581 continue
516 582 repo[k] = remote_refs[k]
@@ -547,7 +613,7 b' class GitRemote(object):'
547 613 if ref in remote_refs:
548 614 # duplicate, skip
549 615 continue
550 if ref.endswith(self.peeled_ref_marker):
616 if ref.endswith(PEELED_REF_MARKER):
551 617 log.debug("Skipping peeled reference %s", ref)
552 618 continue
553 619 # don't sync HEAD
@@ -579,7 +645,7 b' class GitRemote(object):'
579 645 if not self.check_url(url, wire):
580 646 return
581 647 config = self._wire_to_config(wire)
582 repo = self._factory.repo(wire)
648 self._factory.repo(wire)
583 649 self.run_git_command(
584 650 wire, ['push', url, '--mirror'], fail_on_stderr=False,
585 651 _copts=self._remote_conf(config),
@@ -612,53 +678,80 b' class GitRemote(object):'
612 678
613 679 @reraise_safe_exceptions
614 680 def get_object(self, wire, sha):
615 repo_init = self._factory.repo_libgit2(wire)
616 with repo_init as repo:
681
682 cache_on, context_uid, repo_id = self._cache_on(wire)
683 @self.region.conditional_cache_on_arguments(condition=cache_on)
684 def _get_object(_context_uid, _repo_id, _sha):
685 repo_init = self._factory.repo_libgit2(wire)
686 with repo_init as repo:
617 687
618 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
619 try:
620 commit = repo.revparse_single(sha)
621 except (KeyError, ValueError) as e:
622 raise exceptions.LookupException(e)(missing_commit_err)
688 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
689 try:
690 commit = repo.revparse_single(sha)
691 except (KeyError, ValueError) as e:
692 raise exceptions.LookupException(e)(missing_commit_err)
623 693
624 if isinstance(commit, pygit2.Tag):
625 commit = repo.get(commit.target)
694 if isinstance(commit, pygit2.Tag):
695 commit = repo.get(commit.target)
626 696
627 # check for dangling commit
628 branches = [x for x in repo.branches.with_commit(commit.hex)]
629 if not branches:
630 raise exceptions.LookupException(None)(missing_commit_err)
697 # check for dangling commit
698 branches = [x for x in repo.branches.with_commit(commit.hex)]
699 if not branches:
700 raise exceptions.LookupException(None)(missing_commit_err)
701
702 commit_id = commit.hex
703 type_id = commit.type
631 704
632 commit_id = commit.hex
633 type_id = commit.type
705 return {
706 'id': commit_id,
707 'type': self._type_id_to_name(type_id),
708 'commit_id': commit_id,
709 'idx': 0
710 }
634 711
635 return {
636 'id': commit_id,
637 'type': self._type_id_to_name(type_id),
638 'commit_id': commit_id,
639 'idx': 0
640 }
712 return _get_object(context_uid, repo_id, sha)
641 713
642 714 @reraise_safe_exceptions
643 715 def get_refs(self, wire):
644 repo_init = self._factory.repo_libgit2(wire)
645 with repo_init as repo:
646 result = {}
647 for ref in repo.references:
648 peeled_sha = repo.lookup_reference(ref).peel()
649 result[ref] = peeled_sha.hex
716 cache_on, context_uid, repo_id = self._cache_on(wire)
717 @self.region.conditional_cache_on_arguments(condition=cache_on)
718 def _get_refs(_context_uid, _repo_id):
719
720 repo_init = self._factory.repo_libgit2(wire)
721 with repo_init as repo:
722 regex = re.compile('^refs/(heads|tags)/')
723 return {x.name: x.target.hex for x in
724 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
725
726 return _get_refs(context_uid, repo_id)
650 727
651 return result
728 @reraise_safe_exceptions
729 def get_branch_pointers(self, wire):
730 cache_on, context_uid, repo_id = self._cache_on(wire)
731 @self.region.conditional_cache_on_arguments(condition=cache_on)
732 def _get_branch_pointers(_context_uid, _repo_id):
733
734 repo_init = self._factory.repo_libgit2(wire)
735 regex = re.compile('^refs/heads')
736 with repo_init as repo:
737 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
738 return {x.target.hex: x.shorthand for x in branches}
739
740 return _get_branch_pointers(context_uid, repo_id)
652 741
653 742 @reraise_safe_exceptions
654 743 def head(self, wire, show_exc=True):
655 repo_init = self._factory.repo_libgit2(wire)
656 with repo_init as repo:
657 try:
658 return repo.head.peel().hex
659 except Exception:
660 if show_exc:
661 raise
744 cache_on, context_uid, repo_id = self._cache_on(wire)
745 @self.region.conditional_cache_on_arguments(condition=cache_on)
746 def _head(_context_uid, _repo_id, _show_exc):
747 repo_init = self._factory.repo_libgit2(wire)
748 with repo_init as repo:
749 try:
750 return repo.head.peel().hex
751 except Exception:
752 if show_exc:
753 raise
754 return _head(context_uid, repo_id, show_exc)
662 755
663 756 @reraise_safe_exceptions
664 757 def init(self, wire):
@@ -672,17 +765,22 b' class GitRemote(object):'
672 765
673 766 @reraise_safe_exceptions
674 767 def revision(self, wire, rev):
675 repo_init = self._factory.repo_libgit2(wire)
676 with repo_init as repo:
677 commit = repo[rev]
678 obj_data = {
679 'id': commit.id.hex,
680 }
681 # tree objects itself don't have tree_id attribute
682 if hasattr(commit, 'tree_id'):
683 obj_data['tree'] = commit.tree_id.hex
684 768
685 return obj_data
769 cache_on, context_uid, repo_id = self._cache_on(wire)
770 @self.region.conditional_cache_on_arguments(condition=cache_on)
771 def _revision(_context_uid, _repo_id, _rev):
772 repo_init = self._factory.repo_libgit2(wire)
773 with repo_init as repo:
774 commit = repo[rev]
775 obj_data = {
776 'id': commit.id.hex,
777 }
778 # tree objects itself don't have tree_id attribute
779 if hasattr(commit, 'tree_id'):
780 obj_data['tree'] = commit.tree_id.hex
781
782 return obj_data
783 return _revision(context_uid, repo_id, rev)
686 784
687 785 @reraise_safe_exceptions
688 786 def date(self, wire, rev):
@@ -711,10 +809,14 b' class GitRemote(object):'
711 809
712 810 @reraise_safe_exceptions
713 811 def parents(self, wire, rev):
714 repo_init = self._factory.repo_libgit2(wire)
715 with repo_init as repo:
716 commit = repo[rev]
717 return [x.hex for x in commit.parent_ids]
812 cache_on, context_uid, repo_id = self._cache_on(wire)
813 @self.region.conditional_cache_on_arguments(condition=cache_on)
814 def _parents(_context_uid, _repo_id, _rev):
815 repo_init = self._factory.repo_libgit2(wire)
816 with repo_init as repo:
817 commit = repo[rev]
818 return [x.hex for x in commit.parent_ids]
819 return _parents(context_uid, repo_id, rev)
718 820
719 821 @reraise_safe_exceptions
720 822 def set_refs(self, wire, key, value):
@@ -758,39 +860,49 b' class GitRemote(object):'
758 860
759 861 @reraise_safe_exceptions
760 862 def tree_and_type_for_path(self, wire, commit_id, path):
761 repo_init = self._factory.repo_libgit2(wire)
863
864 cache_on, context_uid, repo_id = self._cache_on(wire)
865 @self.region.conditional_cache_on_arguments(condition=cache_on)
866 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
867 repo_init = self._factory.repo_libgit2(wire)
762 868
763 with repo_init as repo:
764 commit = repo[commit_id]
765 try:
766 tree = commit.tree[path]
767 except KeyError:
768 return None, None, None
869 with repo_init as repo:
870 commit = repo[commit_id]
871 try:
872 tree = commit.tree[path]
873 except KeyError:
874 return None, None, None
769 875
770 return tree.id.hex, tree.type, tree.filemode
876 return tree.id.hex, tree.type, tree.filemode
877 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
771 878
772 879 @reraise_safe_exceptions
773 880 def tree_items(self, wire, tree_id):
774 repo_init = self._factory.repo_libgit2(wire)
881
882 cache_on, context_uid, repo_id = self._cache_on(wire)
883 @self.region.conditional_cache_on_arguments(condition=cache_on)
884 def _tree_items(_context_uid, _repo_id, _tree_id):
775 885
776 with repo_init as repo:
777 try:
778 tree = repo[tree_id]
779 except KeyError:
780 raise ObjectMissing('No tree with id: {}'.format(tree_id))
886 repo_init = self._factory.repo_libgit2(wire)
887 with repo_init as repo:
888 try:
889 tree = repo[tree_id]
890 except KeyError:
891 raise ObjectMissing('No tree with id: {}'.format(tree_id))
781 892
782 result = []
783 for item in tree:
784 item_sha = item.hex
785 item_mode = item.filemode
786 item_type = item.type
893 result = []
894 for item in tree:
895 item_sha = item.hex
896 item_mode = item.filemode
897 item_type = item.type
787 898
788 if item_type == 'commit':
789 # NOTE(marcink): submodules we translate to 'link' for backward compat
790 item_type = 'link'
899 if item_type == 'commit':
900 # NOTE(marcink): submodules we translate to 'link' for backward compat
901 item_type = 'link'
791 902
792 result.append((item.name, item_mode, item_sha, item_type))
793 return result
903 result.append((item.name, item_mode, item_sha, item_type))
904 return result
905 return _tree_items(context_uid, repo_id, tree_id)
794 906
795 907 @reraise_safe_exceptions
796 908 def update_server_info(self, wire):
@@ -798,24 +910,20 b' class GitRemote(object):'
798 910 update_server_info(repo)
799 911
800 912 @reraise_safe_exceptions
801 def discover_git_version(self):
802 stdout, _ = self.run_git_command(
803 {}, ['--version'], _bare=True, _safe=True)
804 prefix = 'git version'
805 if stdout.startswith(prefix):
806 stdout = stdout[len(prefix):]
807 return stdout.strip()
808
809 @reraise_safe_exceptions
810 913 def get_all_commit_ids(self, wire):
811 914
812 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
813 try:
814 output, __ = self.run_git_command(wire, cmd)
815 return output.splitlines()
816 except Exception:
817 # Can be raised for empty repositories
818 return []
915 cache_on, context_uid, repo_id = self._cache_on(wire)
916 @self.region.conditional_cache_on_arguments(condition=cache_on)
917 def _get_all_commit_ids(_context_uid, _repo_id):
918
919 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
920 try:
921 output, __ = self.run_git_command(wire, cmd)
922 return output.splitlines()
923 except Exception:
924 # Can be raised for empty repositories
925 return []
926 return _get_all_commit_ids(context_uid, repo_id)
819 927
820 928 @reraise_safe_exceptions
821 929 def run_git_command(self, wire, cmd, **opts):
@@ -870,22 +978,17 b' class GitRemote(object):'
870 978 @reraise_safe_exceptions
871 979 def install_hooks(self, wire, force=False):
872 980 from vcsserver.hook_utils import install_git_hooks
873 repo = self._factory.repo(wire)
874 return install_git_hooks(repo.path, repo.bare, force_create=force)
981 bare = self.bare(wire)
982 path = wire['path']
983 return install_git_hooks(path, bare, force_create=force)
875 984
876 985 @reraise_safe_exceptions
877 986 def get_hooks_info(self, wire):
878 987 from vcsserver.hook_utils import (
879 988 get_git_pre_hook_version, get_git_post_hook_version)
880 repo = self._factory.repo(wire)
989 bare = self.bare(wire)
990 path = wire['path']
881 991 return {
882 'pre_version': get_git_pre_hook_version(repo.path, repo.bare),
883 'post_version': get_git_post_hook_version(repo.path, repo.bare),
992 'pre_version': get_git_pre_hook_version(path, bare),
993 'post_version': get_git_post_hook_version(path, bare),
884 994 }
885
886
887 def str_to_dulwich(value):
888 """
889 Dulwich 0.10.1a requires `unicode` objects to be passed in.
890 """
891 return value.decode(settings.WIRE_ENCODING)
@@ -147,25 +147,13 b' class MercurialFactory(RepoFactory):'
147 147 """
148 148 Get a repository instance for the given path.
149 149 """
150 region = self._cache_region
151 context = wire.get('context', None)
152 repo_path = wire.get('path', '')
153 context_uid = '{}'.format(context)
154 cache = wire.get('cache', True)
155 cache_on = context and cache
156
157 @region.conditional_cache_on_arguments(condition=cache_on)
158 def create_new_repo(_repo_type, _repo_path, _context_uid):
159 return self._create_repo(wire, create)
160
161 return create_new_repo(self.repo_type, repo_path, context_uid)
150 return self._create_repo(wire, create)
162 151
163 152
164 153 class HgRemote(object):
165 154
166 155 def __init__(self, factory):
167 156 self._factory = factory
168
169 157 self._bulk_methods = {
170 158 "affected_files": self.ctx_files,
171 159 "author": self.ctx_user,
@@ -180,10 +168,19 b' class HgRemote(object):'
180 168 "hidden": self.ctx_hidden,
181 169 "_file_paths": self.ctx_list,
182 170 }
171 self.region = self._factory._cache_region
183 172
184 173 def _get_ctx(self, repo, ref):
185 174 return get_ctx(repo, ref)
186 175
176 def _cache_on(self, wire):
177 context = wire.get('context', '')
178 context_uid = '{}'.format(context)
179 repo_id = wire.get('repo_id', '')
180 cache = wire.get('cache', True)
181 cache_on = context and cache
182 return cache_on, context_uid, repo_id
183
187 184 @reraise_safe_exceptions
188 185 def discover_hg_version(self):
189 186 from mercurial import util
@@ -217,33 +214,48 b' class HgRemote(object):'
217 214
218 215 @reraise_safe_exceptions
219 216 def bookmarks(self, wire):
220 repo = self._factory.repo(wire)
221 return dict(repo._bookmarks)
217 cache_on, context_uid, repo_id = self._cache_on(wire)
218 @self.region.conditional_cache_on_arguments(condition=cache_on)
219 def _bookmarks(_context_uid, _repo_id):
220 repo = self._factory.repo(wire)
221 return dict(repo._bookmarks)
222
223 return _bookmarks(context_uid, repo_id)
222 224
223 225 @reraise_safe_exceptions
224 226 def branches(self, wire, normal, closed):
225 repo = self._factory.repo(wire)
226 iter_branches = repo.branchmap().iterbranches()
227 bt = {}
228 for branch_name, _heads, tip, is_closed in iter_branches:
229 if normal and not is_closed:
230 bt[branch_name] = tip
231 if closed and is_closed:
232 bt[branch_name] = tip
227 cache_on, context_uid, repo_id = self._cache_on(wire)
228 @self.region.conditional_cache_on_arguments(condition=cache_on)
229 def _branches(_context_uid, _repo_id, _normal, _closed):
230 repo = self._factory.repo(wire)
231 iter_branches = repo.branchmap().iterbranches()
232 bt = {}
233 for branch_name, _heads, tip, is_closed in iter_branches:
234 if normal and not is_closed:
235 bt[branch_name] = tip
236 if closed and is_closed:
237 bt[branch_name] = tip
233 238
234 return bt
239 return bt
240
241 return _branches(context_uid, repo_id, normal, closed)
235 242
236 243 @reraise_safe_exceptions
237 244 def bulk_request(self, wire, rev, pre_load):
238 result = {}
239 for attr in pre_load:
240 try:
241 method = self._bulk_methods[attr]
242 result[attr] = method(wire, rev)
243 except KeyError as e:
244 raise exceptions.VcsException(e)(
245 'Unknown bulk attribute: "%s"' % attr)
246 return result
245 cache_on, context_uid, repo_id = self._cache_on(wire)
246 @self.region.conditional_cache_on_arguments(condition=cache_on)
247 def _bulk_request(_context_uid, _repo_id, _rev, _pre_load):
248 result = {}
249 for attr in pre_load:
250 try:
251 method = self._bulk_methods[attr]
252 result[attr] = method(wire, rev)
253 except KeyError as e:
254 raise exceptions.VcsException(e)(
255 'Unknown bulk attribute: "%s"' % attr)
256 return result
257
258 return _bulk_request(context_uid, repo_id, rev, sorted(pre_load))
247 259
248 260 @reraise_safe_exceptions
249 261 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
@@ -251,9 +263,8 b' class HgRemote(object):'
251 263 clone(baseui, source, dest, noupdate=not update_after_clone)
252 264
253 265 @reraise_safe_exceptions
254 def commitctx(
255 self, wire, message, parents, commit_time, commit_timezone,
256 user, files, extra, removed, updated):
266 def commitctx(self, wire, message, parents, commit_time, commit_timezone,
267 user, files, extra, removed, updated):
257 268
258 269 repo = self._factory.repo(wire)
259 270 baseui = self._factory._create_config(wire['config'])
@@ -284,7 +295,7 b' class HgRemote(object):'
284 295 data=node['content'],
285 296 islink=False,
286 297 isexec=bool(node['mode'] & stat.S_IXUSR),
287 copied=False)
298 copysource=False)
288 299
289 300 raise exceptions.AbortException()(
290 301 "Given path haven't been marked as added, "
@@ -309,15 +320,14 b' class HgRemote(object):'
309 320
310 321 @reraise_safe_exceptions
311 322 def ctx_branch(self, wire, revision):
312 repo = self._factory.repo(wire)
313 ctx = self._get_ctx(repo, revision)
314 return ctx.branch()
315 323
316 @reraise_safe_exceptions
317 def ctx_children(self, wire, revision):
318 repo = self._factory.repo(wire)
319 ctx = self._get_ctx(repo, revision)
320 return [child.rev() for child in ctx.children()]
324 cache_on, context_uid, repo_id = self._cache_on(wire)
325 @self.region.conditional_cache_on_arguments(condition=cache_on)
326 def _ctx_branch(_context_uid, _repo_id, _revision):
327 repo = self._factory.repo(wire)
328 ctx = self._get_ctx(repo, revision)
329 return ctx.branch()
330 return _ctx_branch(context_uid, repo_id, revision)
321 331
322 332 @reraise_safe_exceptions
323 333 def ctx_date(self, wire, revision):
@@ -333,9 +343,15 b' class HgRemote(object):'
333 343
334 344 @reraise_safe_exceptions
335 345 def ctx_files(self, wire, revision):
336 repo = self._factory.repo(wire)
337 ctx = self._get_ctx(repo, revision)
338 return ctx.files()
346
347 cache_on, context_uid, repo_id = self._cache_on(wire)
348 @self.region.conditional_cache_on_arguments(condition=cache_on)
349 def _ctx_files(_context_uid, _repo_id, _revision):
350 repo = self._factory.repo(wire)
351 ctx = self._get_ctx(repo, revision)
352 return ctx.files()
353
354 return _ctx_files(context_uid, repo_id, revision)
339 355
340 356 @reraise_safe_exceptions
341 357 def ctx_list(self, path, revision):
@@ -345,9 +361,27 b' class HgRemote(object):'
345 361
346 362 @reraise_safe_exceptions
347 363 def ctx_parents(self, wire, revision):
348 repo = self._factory.repo(wire)
349 ctx = self._get_ctx(repo, revision)
350 return [parent.rev() for parent in ctx.parents()]
364 cache_on, context_uid, repo_id = self._cache_on(wire)
365 @self.region.conditional_cache_on_arguments(condition=cache_on)
366 def _ctx_parents(_context_uid, _repo_id, _revision):
367 repo = self._factory.repo(wire)
368 ctx = self._get_ctx(repo, revision)
369 return [parent.rev() for parent in ctx.parents()
370 if not (parent.hidden() or parent.obsolete())]
371
372 return _ctx_parents(context_uid, repo_id, revision)
373
374 @reraise_safe_exceptions
375 def ctx_children(self, wire, revision):
376 cache_on, context_uid, repo_id = self._cache_on(wire)
377 @self.region.conditional_cache_on_arguments(condition=cache_on)
378 def _ctx_children(_context_uid, _repo_id, _revision):
379 repo = self._factory.repo(wire)
380 ctx = self._get_ctx(repo, revision)
381 return [child.rev() for child in ctx.children()
382 if not (child.hidden() or child.obsolete())]
383
384 return _ctx_children(context_uid, repo_id, revision)
351 385
352 386 @reraise_safe_exceptions
353 387 def ctx_phase(self, wire, revision):
@@ -456,9 +490,7 b' class HgRemote(object):'
456 490 return True
457 491
458 492 @reraise_safe_exceptions
459 def diff(
460 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
461 context):
493 def diff(self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews, context):
462 494 repo = self._factory.repo(wire)
463 495
464 496 if file_filter:
@@ -527,7 +559,7 b' class HgRemote(object):'
527 559 return result
528 560
529 561 @reraise_safe_exceptions
530 def fctx_data(self, wire, revision, path):
562 def fctx_node_data(self, wire, revision, path):
531 563 repo = self._factory.repo(wire)
532 564 ctx = self._get_ctx(repo, revision)
533 565 fctx = ctx.filectx(path)
@@ -549,10 +581,14 b' class HgRemote(object):'
549 581
550 582 @reraise_safe_exceptions
551 583 def get_all_commit_ids(self, wire, name):
552 repo = self._factory.repo(wire)
553 repo = repo.filtered(name)
554 revs = map(lambda x: hex(x[7]), repo.changelog.index)
555 return revs
584 cache_on, context_uid, repo_id = self._cache_on(wire)
585 @self.region.conditional_cache_on_arguments(condition=cache_on)
586 def _get_all_commit_ids(_context_uid, _repo_id, _name):
587 repo = self._factory.repo(wire)
588 repo = repo.filtered(name)
589 revs = map(lambda x: hex(x[7]), repo.changelog.index)
590 return revs
591 return _get_all_commit_ids(context_uid, repo_id, name)
556 592
557 593 @reraise_safe_exceptions
558 594 def get_config_value(self, wire, section, name, untrusted=False):
@@ -571,7 +607,12 b' class HgRemote(object):'
571 607
572 608 @reraise_safe_exceptions
573 609 def is_large_file(self, wire, path):
574 return largefiles.lfutil.isstandin(path)
610 cache_on, context_uid, repo_id = self._cache_on(wire)
611 @self.region.conditional_cache_on_arguments(condition=cache_on)
612 def _is_large_file(_context_uid, _repo_id, _path):
613 return largefiles.lfutil.isstandin(path)
614
615 return _is_large_file(context_uid, repo_id, path)
575 616
576 617 @reraise_safe_exceptions
577 618 def in_largefiles_store(self, wire, sha):
@@ -600,31 +641,36 b' class HgRemote(object):'
600 641
601 642 @reraise_safe_exceptions
602 643 def lookup(self, wire, revision, both):
603
604 repo = self._factory.repo(wire)
644 cache_on, context_uid, repo_id = self._cache_on(wire)
645 @self.region.conditional_cache_on_arguments(condition=cache_on)
646 def _lookup(_context_uid, _repo_id, _revision, _both):
605 647
606 if isinstance(revision, int):
607 # NOTE(marcink):
608 # since Mercurial doesn't support negative indexes properly
609 # we need to shift accordingly by one to get proper index, e.g
610 # repo[-1] => repo[-2]
611 # repo[0] => repo[-1]
612 if revision <= 0:
613 revision = revision + -1
614 try:
615 ctx = self._get_ctx(repo, revision)
616 except (TypeError, RepoLookupError) as e:
617 e._org_exc_tb = traceback.format_exc()
618 raise exceptions.LookupException(e)(revision)
619 except LookupError as e:
620 e._org_exc_tb = traceback.format_exc()
621 raise exceptions.LookupException(e)(e.name)
648 repo = self._factory.repo(wire)
649 rev = _revision
650 if isinstance(rev, int):
651 # NOTE(marcink):
652 # since Mercurial doesn't support negative indexes properly
653 # we need to shift accordingly by one to get proper index, e.g
654 # repo[-1] => repo[-2]
655 # repo[0] => repo[-1]
656 if rev <= 0:
657 rev = rev + -1
658 try:
659 ctx = self._get_ctx(repo, rev)
660 except (TypeError, RepoLookupError) as e:
661 e._org_exc_tb = traceback.format_exc()
662 raise exceptions.LookupException(e)(rev)
663 except LookupError as e:
664 e._org_exc_tb = traceback.format_exc()
665 raise exceptions.LookupException(e)(e.name)
622 666
623 if not both:
624 return ctx.hex()
667 if not both:
668 return ctx.hex()
625 669
626 ctx = repo[ctx.hex()]
627 return ctx.hex(), ctx.rev()
670 ctx = repo[ctx.hex()]
671 return ctx.hex(), ctx.rev()
672
673 return _lookup(context_uid, repo_id, revision, both)
628 674
629 675 @reraise_safe_exceptions
630 676 def pull(self, wire, url, commit_ids=None):
@@ -667,10 +713,15 b' class HgRemote(object):'
667 713 return ctx.rev()
668 714
669 715 @reraise_safe_exceptions
670 def rev_range(self, wire, filter):
671 repo = self._factory.repo(wire)
672 revisions = [rev for rev in revrange(repo, filter)]
673 return revisions
716 def rev_range(self, wire, commit_filter):
717 cache_on, context_uid, repo_id = self._cache_on(wire)
718 @self.region.conditional_cache_on_arguments(condition=cache_on)
719 def _rev_range(_context_uid, _repo_id, _filter):
720 repo = self._factory.repo(wire)
721 revisions = [rev for rev in revrange(repo, commit_filter)]
722 return revisions
723
724 return _rev_range(context_uid, repo_id, sorted(commit_filter))
674 725
675 726 @reraise_safe_exceptions
676 727 def rev_range_hash(self, wire, node):
@@ -724,8 +775,7 b' class HgRemote(object):'
724 775 return output.getvalue()
725 776
726 777 @reraise_safe_exceptions
727 def tag(self, wire, name, revision, message, local, user,
728 tag_time, tag_timezone):
778 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
729 779 repo = self._factory.repo(wire)
730 780 ctx = self._get_ctx(repo, revision)
731 781 node = ctx.node()
@@ -740,8 +790,13 b' class HgRemote(object):'
740 790
741 791 @reraise_safe_exceptions
742 792 def tags(self, wire):
743 repo = self._factory.repo(wire)
744 return repo.tags()
793 cache_on, context_uid, repo_id = self._cache_on(wire)
794 @self.region.conditional_cache_on_arguments(condition=cache_on)
795 def _tags(_context_uid, _repo_id):
796 repo = self._factory.repo(wire)
797 return repo.tags()
798
799 return _tags(context_uid, repo_id)
745 800
746 801 @reraise_safe_exceptions
747 802 def update(self, wire, node=None, clean=False):
@@ -762,8 +817,7 b' class HgRemote(object):'
762 817 return output.getvalue()
763 818
764 819 @reraise_safe_exceptions
765 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
766 hooks=True):
820 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
767 821 repo = self._factory.repo(wire)
768 822 baseui = self._factory._create_config(wire['config'], hooks=hooks)
769 823
@@ -806,8 +860,7 b' class HgRemote(object):'
806 860 return hex(a)
807 861
808 862 @reraise_safe_exceptions
809 def push(self, wire, revisions, dest_path, hooks=True,
810 push_branches=False):
863 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
811 864 repo = self._factory.repo(wire)
812 865 baseui = self._factory._create_config(wire['config'], hooks=hooks)
813 866 commands.push(baseui, repo, dest=dest_path, rev=revisions,
@@ -846,7 +899,6 b' class HgRemote(object):'
846 899 repo.ui.setconfig('ui', 'username', username)
847 900 commands.commit(baseui, repo, message=message, close_branch=close_branch)
848 901
849
850 902 @reraise_safe_exceptions
851 903 def rebase(self, wire, source=None, dest=None, abort=False):
852 904 repo = self._factory.repo(wire)
@@ -98,19 +98,7 b' class SubversionFactory(RepoFactory):'
98 98 """
99 99 Get a repository instance for the given path.
100 100 """
101 region = self._cache_region
102 context = wire.get('context', None)
103 repo_path = wire.get('path', '')
104 context_uid = '{}'.format(context)
105 cache = wire.get('cache', True)
106 cache_on = context and cache
107
108 @region.conditional_cache_on_arguments(condition=cache_on)
109 def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id):
110 return self._create_repo(wire, create, compatible_version)
111
112 return create_new_repo(self.repo_type, repo_path, context_uid,
113 compatible_version)
101 return self._create_repo(wire, create, compatible_version)
114 102
115 103
116 104 NODE_TYPE_MAPPING = {
@@ -126,6 +114,15 b' class SvnRemote(object):'
126 114 # TODO: Remove once we do not use internal Mercurial objects anymore
127 115 # for subversion
128 116 self._hg_factory = hg_factory
117 self.region = self._factory._cache_region
118
119 def _cache_on(self, wire):
120 context = wire.get('context', '')
121 context_uid = '{}'.format(context)
122 repo_id = wire.get('repo_id', '')
123 cache = wire.get('cache', True)
124 cache_on = context and cache
125 return cache_on, context_uid, repo_id
129 126
130 127 @reraise_safe_exceptions
131 128 def discover_svn_version(self):
@@ -138,7 +135,6 b' class SvnRemote(object):'
138 135
139 136 @reraise_safe_exceptions
140 137 def is_empty(self, wire):
141 repo = self._factory.repo(wire)
142 138
143 139 try:
144 140 return self.lookup(wire, -1) == 0
@@ -216,9 +212,14 b' class SvnRemote(object):'
216 212 return start_rev, end_rev
217 213
218 214 def revision_properties(self, wire, revision):
219 repo = self._factory.repo(wire)
220 fs_ptr = svn.repos.fs(repo)
221 return svn.fs.revision_proplist(fs_ptr, revision)
215
216 cache_on, context_uid, repo_id = self._cache_on(wire)
217 @self.region.conditional_cache_on_arguments(condition=cache_on)
218 def _revision_properties(_context_uid, _repo_id, _revision):
219 repo = self._factory.repo(wire)
220 fs_ptr = svn.repos.fs(repo)
221 return svn.fs.revision_proplist(fs_ptr, revision)
222 return _revision_properties(context_uid, repo_id, revision)
222 223
223 224 def revision_changes(self, wire, revision):
224 225
@@ -264,22 +265,27 b' class SvnRemote(object):'
264 265 }
265 266 return changes
266 267
268 @reraise_safe_exceptions
267 269 def node_history(self, wire, path, revision, limit):
268 cross_copies = False
269 repo = self._factory.repo(wire)
270 fsobj = svn.repos.fs(repo)
271 rev_root = svn.fs.revision_root(fsobj, revision)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 @self.region.conditional_cache_on_arguments(condition=cache_on)
272 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
273 cross_copies = False
274 repo = self._factory.repo(wire)
275 fsobj = svn.repos.fs(repo)
276 rev_root = svn.fs.revision_root(fsobj, revision)
272 277
273 history_revisions = []
274 history = svn.fs.node_history(rev_root, path)
275 history = svn.fs.history_prev(history, cross_copies)
276 while history:
277 __, node_revision = svn.fs.history_location(history)
278 history_revisions.append(node_revision)
279 if limit and len(history_revisions) >= limit:
280 break
278 history_revisions = []
279 history = svn.fs.node_history(rev_root, path)
281 280 history = svn.fs.history_prev(history, cross_copies)
282 return history_revisions
281 while history:
282 __, node_revision = svn.fs.history_location(history)
283 history_revisions.append(node_revision)
284 if limit and len(history_revisions) >= limit:
285 break
286 history = svn.fs.history_prev(history, cross_copies)
287 return history_revisions
288 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
283 289
284 290 def node_properties(self, wire, path, revision):
285 291 repo = self._factory.repo(wire)
@@ -314,27 +320,37 b' class SvnRemote(object):'
314 320
315 321 return annotations
316 322
317 def get_node_type(self, wire, path, rev=None):
318 repo = self._factory.repo(wire)
319 fs_ptr = svn.repos.fs(repo)
320 if rev is None:
321 rev = svn.fs.youngest_rev(fs_ptr)
322 root = svn.fs.revision_root(fs_ptr, rev)
323 node = svn.fs.check_path(root, path)
324 return NODE_TYPE_MAPPING.get(node, None)
323 def get_node_type(self, wire, path, revision=None):
324
325 cache_on, context_uid, repo_id = self._cache_on(wire)
326 @self.region.conditional_cache_on_arguments(condition=cache_on)
327 def _get_node_type(_context_uid, _repo_id, _path, _revision):
328 repo = self._factory.repo(wire)
329 fs_ptr = svn.repos.fs(repo)
330 if _revision is None:
331 _revision = svn.fs.youngest_rev(fs_ptr)
332 root = svn.fs.revision_root(fs_ptr, _revision)
333 node = svn.fs.check_path(root, path)
334 return NODE_TYPE_MAPPING.get(node, None)
335 return _get_node_type(context_uid, repo_id, path, revision)
325 336
326 337 def get_nodes(self, wire, path, revision=None):
327 repo = self._factory.repo(wire)
328 fsobj = svn.repos.fs(repo)
329 if revision is None:
330 revision = svn.fs.youngest_rev(fsobj)
331 root = svn.fs.revision_root(fsobj, revision)
332 entries = svn.fs.dir_entries(root, path)
333 result = []
334 for entry_path, entry_info in entries.iteritems():
335 result.append(
336 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
337 return result
338
339 cache_on, context_uid, repo_id = self._cache_on(wire)
340 @self.region.conditional_cache_on_arguments(condition=cache_on)
341 def _get_nodes(_context_uid, _repo_id, _path, _revision):
342 repo = self._factory.repo(wire)
343 fsobj = svn.repos.fs(repo)
344 if _revision is None:
345 _revision = svn.fs.youngest_rev(fsobj)
346 root = svn.fs.revision_root(fsobj, _revision)
347 entries = svn.fs.dir_entries(root, path)
348 result = []
349 for entry_path, entry_info in entries.iteritems():
350 result.append(
351 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
352 return result
353 return _get_nodes(context_uid, repo_id, path, revision)
338 354
339 355 def get_file_content(self, wire, path, rev=None):
340 356 repo = self._factory.repo(wire)
@@ -346,13 +362,18 b' class SvnRemote(object):'
346 362 return content.read()
347 363
348 364 def get_file_size(self, wire, path, revision=None):
349 repo = self._factory.repo(wire)
350 fsobj = svn.repos.fs(repo)
351 if revision is None:
352 revision = svn.fs.youngest_revision(fsobj)
353 root = svn.fs.revision_root(fsobj, revision)
354 size = svn.fs.file_length(root, path)
355 return size
365
366 cache_on, context_uid, repo_id = self._cache_on(wire)
367 @self.region.conditional_cache_on_arguments(condition=cache_on)
368 def _get_file_size(_context_uid, _repo_id, _path, _revision):
369 repo = self._factory.repo(wire)
370 fsobj = svn.repos.fs(repo)
371 if _revision is None:
372 _revision = svn.fs.youngest_revision(fsobj)
373 root = svn.fs.revision_root(fsobj, _revision)
374 size = svn.fs.file_length(root, path)
375 return size
376 return _get_file_size(context_uid, repo_id, path, revision)
356 377
357 378 def create_repository(self, wire, compatible_version=None):
358 379 log.info('Creating Subversion repository in path "%s"', wire['path'])
@@ -61,7 +61,7 b' class TestGitFetch(object):'
61 61
62 62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 63 mock_fetch.side_effect = side_effect
64 self.remote_git.pull(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
65 65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67 67
@@ -79,7 +79,7 b' class TestGitFetch(object):'
79 79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 80 mock_fetch.side_effect = side_effect
81 81 self.remote_git.pull(
82 wire=None, url='/tmp/', apply_refs=False,
82 wire={}, url='/tmp/', apply_refs=False,
83 83 refs=selected_refs.keys())
84 84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 85 assert determine_wants.call_count == 0
@@ -95,7 +95,7 b' class TestGitFetch(object):'
95 95
96 96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 99 mock_repo().get_refs.assert_called_once_with()
100 100 assert remote_refs == sample_refs
101 101
@@ -26,36 +26,17 b' from mock import Mock, MagicMock, patch'
26 26 from vcsserver import exceptions, hg, hgcompat
27 27
28 28
29 class TestHGLookup(object):
30 def setup(self):
31 self.mock_repo = MagicMock()
32 self.mock_repo.__getitem__.side_effect = LookupError(
33 'revision_or_commit_id', 'index', 'message')
34 factory = Mock()
35 factory.repo = Mock(return_value=self.mock_repo)
36 self.remote_hg = hg.HgRemote(factory)
37
38 def test_fail_lookup_hg(self):
39 with pytest.raises(Exception) as exc_info:
40 self.remote_hg.lookup(
41 wire=None, revision='revision_or_commit_id', both=True)
42
43 assert exc_info.value._vcs_kind == 'lookup'
44 assert 'revision_or_commit_id' in exc_info.value.args
45
46
47 29 class TestDiff(object):
48 30 def test_raising_safe_exception_when_lookup_failed(self):
49 repo = Mock()
31
50 32 factory = Mock()
51 factory.repo = Mock(return_value=repo)
52 33 hg_remote = hg.HgRemote(factory)
53 34 with patch('mercurial.patch.diff') as diff_mock:
54 35 diff_mock.side_effect = LookupError(
55 36 'deadbeef', 'index', 'message')
56 37 with pytest.raises(Exception) as exc_info:
57 38 hg_remote.diff(
58 wire=None, rev1='deadbeef', rev2='deadbee1',
39 wire={}, rev1='deadbeef', rev2='deadbee1',
59 40 file_filter=None, opt_git=True, opt_ignorews=True,
60 41 context=3)
61 42 assert type(exc_info.value) == Exception
@@ -45,8 +45,10 b" INVALID_CERTIFICATE_STDERR = '\\n'.join(["
45 45 reason="SVN not packaged for Cygwin")
46 46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
47 47 from vcsserver import svn
48 factory = mock.Mock()
49 factory.repo = mock.Mock(return_value=mock.Mock())
48 50
49 remote = svn.SvnRemote(None)
51 remote = svn.SvnRemote(factory)
50 52 remote.is_path_valid_repository = lambda wire, path: True
51 53
52 54 with mock.patch('subprocess.Popen',
@@ -76,7 +78,10 b' def test_svn_libraries_can_be_imported()'
76 78 def test_username_password_extraction_from_url(example_url, parts):
77 79 from vcsserver import svn
78 80
79 remote = svn.SvnRemote(None)
81 factory = mock.Mock()
82 factory.repo = mock.Mock(return_value=mock.Mock())
83
84 remote = svn.SvnRemote(factory)
80 85 remote.is_path_valid_repository = lambda wire, path: True
81 86
82 87 assert remote.get_url_and_credentials(example_url) == parts
General Comments 0
You need to be logged in to leave comments. Login now