##// END OF EJS Templates
release: merge back stable branch into default
marcink -
r830:96a2fc09 merge default
parent child Browse files
Show More
@@ -1,59 +1,63 b''
1 c6fad7d1e61f22b1f4a4863eff207a04c27e9462 v4.0.0
1 c6fad7d1e61f22b1f4a4863eff207a04c27e9462 v4.0.0
2 77b6e243b4cc5b702c15abd6d737798edbac60dc v4.0.1
2 77b6e243b4cc5b702c15abd6d737798edbac60dc v4.0.1
3 a359c072337fdd8e1e71df72cc520b8a9b042f80 v4.1.0
3 a359c072337fdd8e1e71df72cc520b8a9b042f80 v4.1.0
4 49aa7ed030a36b7ceba149a21e587cb5d20b4946 v4.1.1
4 49aa7ed030a36b7ceba149a21e587cb5d20b4946 v4.1.1
5 f38ed1e1a31dce3c170b4d31585ba43471cf0705 v4.1.2
5 f38ed1e1a31dce3c170b4d31585ba43471cf0705 v4.1.2
6 21269ba7bafd8f0c77e79dd86a31eb9bce7643d2 v4.2.0
6 21269ba7bafd8f0c77e79dd86a31eb9bce7643d2 v4.2.0
7 b53930c918c25b2c8f69ceddc6641e511be27fd3 v4.2.1
7 b53930c918c25b2c8f69ceddc6641e511be27fd3 v4.2.1
8 6627ff4119723d8b2b60918e8b1aa49e9f055aab v4.3.0
8 6627ff4119723d8b2b60918e8b1aa49e9f055aab v4.3.0
9 d38f2c2b861dde6c4178923f7cf15ea58b85aa92 v4.3.1
9 d38f2c2b861dde6c4178923f7cf15ea58b85aa92 v4.3.1
10 1232313f9e6adac5ce5399c2a891dc1e72b79022 v4.4.0
10 1232313f9e6adac5ce5399c2a891dc1e72b79022 v4.4.0
11 cbb9f1d329ae5768379cdec55a62ebdd546c4e27 v4.4.1
11 cbb9f1d329ae5768379cdec55a62ebdd546c4e27 v4.4.1
12 24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17 v4.4.2
12 24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17 v4.4.2
13 beaeeaa440cd17471110d4621b8816506c0dff4a v4.5.0
13 beaeeaa440cd17471110d4621b8816506c0dff4a v4.5.0
14 668e5c656f61dd94595611844e1106d1361aa6a7 v4.5.1
14 668e5c656f61dd94595611844e1106d1361aa6a7 v4.5.1
15 ae0640240cb7a77e6dc8c77e64dd80d79732cb5b v4.5.2
15 ae0640240cb7a77e6dc8c77e64dd80d79732cb5b v4.5.2
16 7af06899f426813583fe60449d7517cc49c15b28 v4.6.0
16 7af06899f426813583fe60449d7517cc49c15b28 v4.6.0
17 8f7f4299bf341b43f94dadafa1ea73d6cea2c9ba v4.6.1
17 8f7f4299bf341b43f94dadafa1ea73d6cea2c9ba v4.6.1
18 de00a831a0709ffaac57f948738ea927b97223a9 v4.7.0
18 de00a831a0709ffaac57f948738ea927b97223a9 v4.7.0
19 57f527e0646d731768fb5e0fe742b12a35bdc63b v4.7.1
19 57f527e0646d731768fb5e0fe742b12a35bdc63b v4.7.1
20 f9b09787da9845e4a105f4bffdc252099902cefb v4.7.2
20 f9b09787da9845e4a105f4bffdc252099902cefb v4.7.2
21 0b7c790b726f08385e6ebdf4f257c905787b9244 v4.8.0
21 0b7c790b726f08385e6ebdf4f257c905787b9244 v4.8.0
22 f4123e725b74d0e82fe89982ab8791a66062e2b3 v4.9.0
22 f4123e725b74d0e82fe89982ab8791a66062e2b3 v4.9.0
23 940bac044a0fe1ec839759df81399b50141be720 v4.9.1
23 940bac044a0fe1ec839759df81399b50141be720 v4.9.1
24 582d9ebbe46bdddac4b26eacae36ee5ecabca267 v4.10.0
24 582d9ebbe46bdddac4b26eacae36ee5ecabca267 v4.10.0
25 12fbd08d0ab57acce9c0bdccee75633cfa08d7f4 v4.10.1
25 12fbd08d0ab57acce9c0bdccee75633cfa08d7f4 v4.10.1
26 78352f95021a9d128f5803fdbca7036daef5dabe v4.10.2
26 78352f95021a9d128f5803fdbca7036daef5dabe v4.10.2
27 a47ccfb020cda78c8680e3844aaf0b82b1390f3b v4.10.3
27 a47ccfb020cda78c8680e3844aaf0b82b1390f3b v4.10.3
28 347ae9ae544bba8deb417995285287a3b6be1611 v4.10.4
28 347ae9ae544bba8deb417995285287a3b6be1611 v4.10.4
29 9b257ac49841f850434be0d518baca0827e6c8cc v4.10.5
29 9b257ac49841f850434be0d518baca0827e6c8cc v4.10.5
30 e8bf26eea118694edc4ffe50c6c5aa91022bc434 v4.10.6
30 e8bf26eea118694edc4ffe50c6c5aa91022bc434 v4.10.6
31 71fa9274ba59fb982104f0b9b3d0d024c78675f7 v4.11.0
31 71fa9274ba59fb982104f0b9b3d0d024c78675f7 v4.11.0
32 92471577ef25636e5babe8001d47fc8e51521522 v4.11.1
32 92471577ef25636e5babe8001d47fc8e51521522 v4.11.1
33 0277edbcda5a8d075e1e41a95bcee6dcf21f3f77 v4.11.2
33 0277edbcda5a8d075e1e41a95bcee6dcf21f3f77 v4.11.2
34 6c5ecbf0778ef870e5b23d9fad5340135b563356 v4.11.3
34 6c5ecbf0778ef870e5b23d9fad5340135b563356 v4.11.3
35 be788a89a939ebd63606220064bd624fa9d5c9c9 v4.11.4
35 be788a89a939ebd63606220064bd624fa9d5c9c9 v4.11.4
36 15c90a04098a373ac761fab07695fd80dde3bcdb v4.11.5
36 15c90a04098a373ac761fab07695fd80dde3bcdb v4.11.5
37 77aff155b3251cc00394a49f5e8f2c99e33149a7 v4.11.6
37 77aff155b3251cc00394a49f5e8f2c99e33149a7 v4.11.6
38 c218a1ce5d370c2e671d42a91684b3fc2c91b81d v4.12.0
38 c218a1ce5d370c2e671d42a91684b3fc2c91b81d v4.12.0
39 80085fb846cc948195a5c76b579ca34cbc49b59b v4.12.1
39 80085fb846cc948195a5c76b579ca34cbc49b59b v4.12.1
40 346f04fc8a18df3235defbe6e71bd552c0d46481 v4.12.2
40 346f04fc8a18df3235defbe6e71bd552c0d46481 v4.12.2
41 764fdd752322f3e0c13ea00957f2d548bf4363a7 v4.12.3
41 764fdd752322f3e0c13ea00957f2d548bf4363a7 v4.12.3
42 b58038974a5cecbb9c100d32ad2e4c68582f1a78 v4.12.4
42 b58038974a5cecbb9c100d32ad2e4c68582f1a78 v4.12.4
43 e1d42d92a0fec0c80b56c82f37bc7b5472613706 v4.13.0
43 e1d42d92a0fec0c80b56c82f37bc7b5472613706 v4.13.0
44 c3ded3ff17e9bb2a47002a808984a7a946f58a1c v4.13.1
44 c3ded3ff17e9bb2a47002a808984a7a946f58a1c v4.13.1
45 7ff81aa47b1b40cdef9dd5bcdd439f59c269db3d v4.13.2
45 7ff81aa47b1b40cdef9dd5bcdd439f59c269db3d v4.13.2
46 628a08e6aaeff2c3f9e0e268e854f870e6778e53 v4.13.3
46 628a08e6aaeff2c3f9e0e268e854f870e6778e53 v4.13.3
47 941d675f10cfa7d774815bfacfb37085751b7a0d v4.14.0
47 941d675f10cfa7d774815bfacfb37085751b7a0d v4.14.0
48 75e11d32c0be0a457198f07888e7ef650cfa6888 v4.14.1
48 75e11d32c0be0a457198f07888e7ef650cfa6888 v4.14.1
49 6c6f49fda0191c4641dcd43aa0d4376b8b728d40 v4.15.0
49 6c6f49fda0191c4641dcd43aa0d4376b8b728d40 v4.15.0
50 184dea5e01c36e6474c83d3bb34719cdfec22b0d v4.15.1
50 184dea5e01c36e6474c83d3bb34719cdfec22b0d v4.15.1
51 a4dc3669345553582296b2ce1485229a6c6f0522 v4.15.2
51 a4dc3669345553582296b2ce1485229a6c6f0522 v4.15.2
52 d2a4a1a66f204668841da1cdccfa29083e1ef7a3 v4.16.0
52 d2a4a1a66f204668841da1cdccfa29083e1ef7a3 v4.16.0
53 744cf8f2c8f23051978fc293404bf475cc5a31f6 v4.16.1
53 744cf8f2c8f23051978fc293404bf475cc5a31f6 v4.16.1
54 e68aff93ce4ad11fea13420e914f7dfb05c39566 v4.16.2
54 e68aff93ce4ad11fea13420e914f7dfb05c39566 v4.16.2
55 647aeff9752dc1aa00796fa280d0d2ce2f511bc9 v4.17.0
55 647aeff9752dc1aa00796fa280d0d2ce2f511bc9 v4.17.0
56 5e0c2990e095bba1dc903cf0e6ef6ac035e0ccf9 v4.17.1
56 5e0c2990e095bba1dc903cf0e6ef6ac035e0ccf9 v4.17.1
57 8a824544d95037d76d99b104b5d2363858101d53 v4.17.2
57 8a824544d95037d76d99b104b5d2363858101d53 v4.17.2
58 ccd806a2d9482f61bd7e8956a02a28eb24a1d46a v4.17.3
58 ccd806a2d9482f61bd7e8956a02a28eb24a1d46a v4.17.3
59 e533ca02ccc205189b7bad9f227a312212772022 v4.17.4
59 e533ca02ccc205189b7bad9f227a312212772022 v4.17.4
60 ba6a6dc9ecd7fd8b1dcd6eb0c4ee0210e897c426 v4.18.0
61 17bc818b41bcf6883b9ff0da31f01d8c2a5d0781 v4.18.1
62 1e9f12aa01f82c335abc9017efe94ce1c30b52ba v4.18.2
63 f4cc6b3c5680bdf4541d7d442fbb7086640fb547 v4.18.3
@@ -1,1177 +1,1181 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib
25 import urllib
26 import urllib2
26 import urllib2
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from dulwich import index, objects
32 from dulwich import index, objects
33 from dulwich.client import HttpGitClient, LocalGitClient
33 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.errors import (
34 from dulwich.errors import (
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 MissingCommitError, ObjectMissing, HangupException,
36 MissingCommitError, ObjectMissing, HangupException,
37 UnexpectedCommandError)
37 UnexpectedCommandError)
38 from dulwich.repo import Repo as DulwichRepo
38 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.server import update_server_info
39 from dulwich.server import update_server_info
40
40
41 from vcsserver import exceptions, settings, subprocessio
41 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver.utils import safe_str, safe_int
42 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.base import RepoFactory, obfuscate_qs
43 from vcsserver.base import RepoFactory, obfuscate_qs
44 from vcsserver.hgcompat import (
44 from vcsserver.hgcompat import (
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 from vcsserver.git_lfs.lib import LFSOidStore
46 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.vcs_base import RemoteBase
47 from vcsserver.vcs_base import RemoteBase
48
48
49 DIR_STAT = stat.S_IFDIR
49 DIR_STAT = stat.S_IFDIR
50 FILE_MODE = stat.S_IFMT
50 FILE_MODE = stat.S_IFMT
51 GIT_LINK = objects.S_IFGITLINK
51 GIT_LINK = objects.S_IFGITLINK
52 PEELED_REF_MARKER = '^{}'
52 PEELED_REF_MARKER = '^{}'
53
53
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 def str_to_dulwich(value):
58 def str_to_dulwich(value):
59 """
59 """
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 """
61 """
62 return value.decode(settings.WIRE_ENCODING)
62 return value.decode(settings.WIRE_ENCODING)
63
63
64
64
65 def reraise_safe_exceptions(func):
65 def reraise_safe_exceptions(func):
66 """Converts Dulwich exceptions to something neutral."""
66 """Converts Dulwich exceptions to something neutral."""
67
67
68 @wraps(func)
68 @wraps(func)
69 def wrapper(*args, **kwargs):
69 def wrapper(*args, **kwargs):
70 try:
70 try:
71 return func(*args, **kwargs)
71 return func(*args, **kwargs)
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 exc = exceptions.LookupException(org_exc=e)
73 exc = exceptions.LookupException(org_exc=e)
74 raise exc(safe_str(e))
74 raise exc(safe_str(e))
75 except (HangupException, UnexpectedCommandError) as e:
75 except (HangupException, UnexpectedCommandError) as e:
76 exc = exceptions.VcsException(org_exc=e)
76 exc = exceptions.VcsException(org_exc=e)
77 raise exc(safe_str(e))
77 raise exc(safe_str(e))
78 except Exception as e:
78 except Exception as e:
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 # (KeyError on empty repos), we cannot track this and catch all
80 # (KeyError on empty repos), we cannot track this and catch all
81 # exceptions, it's an exceptions from other handlers
81 # exceptions, it's an exceptions from other handlers
82 #if not hasattr(e, '_vcs_kind'):
82 #if not hasattr(e, '_vcs_kind'):
83 #log.exception("Unhandled exception in git remote call")
83 #log.exception("Unhandled exception in git remote call")
84 #raise_from_original(exceptions.UnhandledException)
84 #raise_from_original(exceptions.UnhandledException)
85 raise
85 raise
86 return wrapper
86 return wrapper
87
87
88
88
89 class Repo(DulwichRepo):
89 class Repo(DulwichRepo):
90 """
90 """
91 A wrapper for dulwich Repo class.
91 A wrapper for dulwich Repo class.
92
92
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 "Too many open files" error. We need to close all opened file descriptors
94 "Too many open files" error. We need to close all opened file descriptors
95 once the repo object is destroyed.
95 once the repo object is destroyed.
96 """
96 """
97 def __del__(self):
97 def __del__(self):
98 if hasattr(self, 'object_store'):
98 if hasattr(self, 'object_store'):
99 self.close()
99 self.close()
100
100
101
101
102 class Repository(LibGit2Repo):
102 class Repository(LibGit2Repo):
103
103
104 def __enter__(self):
104 def __enter__(self):
105 return self
105 return self
106
106
107 def __exit__(self, exc_type, exc_val, exc_tb):
107 def __exit__(self, exc_type, exc_val, exc_tb):
108 self.free()
108 self.free()
109
109
110
110
111 class GitFactory(RepoFactory):
111 class GitFactory(RepoFactory):
112 repo_type = 'git'
112 repo_type = 'git'
113
113
114 def _create_repo(self, wire, create, use_libgit2=False):
114 def _create_repo(self, wire, create, use_libgit2=False):
115 if use_libgit2:
115 if use_libgit2:
116 return Repository(wire['path'])
116 return Repository(wire['path'])
117 else:
117 else:
118 repo_path = str_to_dulwich(wire['path'])
118 repo_path = str_to_dulwich(wire['path'])
119 return Repo(repo_path)
119 return Repo(repo_path)
120
120
121 def repo(self, wire, create=False, use_libgit2=False):
121 def repo(self, wire, create=False, use_libgit2=False):
122 """
122 """
123 Get a repository instance for the given path.
123 Get a repository instance for the given path.
124 """
124 """
125 return self._create_repo(wire, create, use_libgit2)
125 return self._create_repo(wire, create, use_libgit2)
126
126
127 def repo_libgit2(self, wire):
127 def repo_libgit2(self, wire):
128 return self.repo(wire, use_libgit2=True)
128 return self.repo(wire, use_libgit2=True)
129
129
130
130
131 class GitRemote(RemoteBase):
131 class GitRemote(RemoteBase):
132
132
133 def __init__(self, factory):
133 def __init__(self, factory):
134 self._factory = factory
134 self._factory = factory
135 self._bulk_methods = {
135 self._bulk_methods = {
136 "date": self.date,
136 "date": self.date,
137 "author": self.author,
137 "author": self.author,
138 "branch": self.branch,
138 "branch": self.branch,
139 "message": self.message,
139 "message": self.message,
140 "parents": self.parents,
140 "parents": self.parents,
141 "_commit": self.revision,
141 "_commit": self.revision,
142 }
142 }
143
143
144 def _wire_to_config(self, wire):
144 def _wire_to_config(self, wire):
145 if 'config' in wire:
145 if 'config' in wire:
146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return {}
147 return {}
148
148
149 def _remote_conf(self, config):
149 def _remote_conf(self, config):
150 params = [
150 params = [
151 '-c', 'core.askpass=""',
151 '-c', 'core.askpass=""',
152 ]
152 ]
153 ssl_cert_dir = config.get('vcs_ssl_dir')
153 ssl_cert_dir = config.get('vcs_ssl_dir')
154 if ssl_cert_dir:
154 if ssl_cert_dir:
155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 return params
156 return params
157
157
158 @reraise_safe_exceptions
158 @reraise_safe_exceptions
159 def discover_git_version(self):
159 def discover_git_version(self):
160 stdout, _ = self.run_git_command(
160 stdout, _ = self.run_git_command(
161 {}, ['--version'], _bare=True, _safe=True)
161 {}, ['--version'], _bare=True, _safe=True)
162 prefix = 'git version'
162 prefix = 'git version'
163 if stdout.startswith(prefix):
163 if stdout.startswith(prefix):
164 stdout = stdout[len(prefix):]
164 stdout = stdout[len(prefix):]
165 return stdout.strip()
165 return stdout.strip()
166
166
167 @reraise_safe_exceptions
167 @reraise_safe_exceptions
168 def is_empty(self, wire):
168 def is_empty(self, wire):
169 repo_init = self._factory.repo_libgit2(wire)
169 repo_init = self._factory.repo_libgit2(wire)
170 with repo_init as repo:
170 with repo_init as repo:
171
171
172 try:
172 try:
173 has_head = repo.head.name
173 has_head = repo.head.name
174 if has_head:
174 if has_head:
175 return False
175 return False
176
176
177 # NOTE(marcink): check again using more expensive method
177 # NOTE(marcink): check again using more expensive method
178 return repo.is_empty
178 return repo.is_empty
179 except Exception:
179 except Exception:
180 pass
180 pass
181
181
182 return True
182 return True
183
183
184 @reraise_safe_exceptions
184 @reraise_safe_exceptions
185 def assert_correct_path(self, wire):
185 def assert_correct_path(self, wire):
186 cache_on, context_uid, repo_id = self._cache_on(wire)
186 cache_on, context_uid, repo_id = self._cache_on(wire)
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 def _assert_correct_path(_context_uid, _repo_id):
188 def _assert_correct_path(_context_uid, _repo_id):
189 try:
189 try:
190 repo_init = self._factory.repo_libgit2(wire)
190 repo_init = self._factory.repo_libgit2(wire)
191 with repo_init as repo:
191 with repo_init as repo:
192 pass
192 pass
193 except pygit2.GitError:
193 except pygit2.GitError:
194 path = wire.get('path')
194 path = wire.get('path')
195 tb = traceback.format_exc()
195 tb = traceback.format_exc()
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 return False
197 return False
198
198
199 return True
199 return True
200 return _assert_correct_path(context_uid, repo_id)
200 return _assert_correct_path(context_uid, repo_id)
201
201
202 @reraise_safe_exceptions
202 @reraise_safe_exceptions
203 def bare(self, wire):
203 def bare(self, wire):
204 repo_init = self._factory.repo_libgit2(wire)
204 repo_init = self._factory.repo_libgit2(wire)
205 with repo_init as repo:
205 with repo_init as repo:
206 return repo.is_bare
206 return repo.is_bare
207
207
208 @reraise_safe_exceptions
208 @reraise_safe_exceptions
209 def blob_as_pretty_string(self, wire, sha):
209 def blob_as_pretty_string(self, wire, sha):
210 repo_init = self._factory.repo_libgit2(wire)
210 repo_init = self._factory.repo_libgit2(wire)
211 with repo_init as repo:
211 with repo_init as repo:
212 blob_obj = repo[sha]
212 blob_obj = repo[sha]
213 blob = blob_obj.data
213 blob = blob_obj.data
214 return blob
214 return blob
215
215
216 @reraise_safe_exceptions
216 @reraise_safe_exceptions
217 def blob_raw_length(self, wire, sha):
217 def blob_raw_length(self, wire, sha):
218 cache_on, context_uid, repo_id = self._cache_on(wire)
218 cache_on, context_uid, repo_id = self._cache_on(wire)
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 def _blob_raw_length(_repo_id, _sha):
220 def _blob_raw_length(_repo_id, _sha):
221
221
222 repo_init = self._factory.repo_libgit2(wire)
222 repo_init = self._factory.repo_libgit2(wire)
223 with repo_init as repo:
223 with repo_init as repo:
224 blob = repo[sha]
224 blob = repo[sha]
225 return blob.size
225 return blob.size
226
226
227 return _blob_raw_length(repo_id, sha)
227 return _blob_raw_length(repo_id, sha)
228
228
229 def _parse_lfs_pointer(self, raw_content):
229 def _parse_lfs_pointer(self, raw_content):
230
230
231 spec_string = 'version https://git-lfs.github.com/spec'
231 spec_string = 'version https://git-lfs.github.com/spec'
232 if raw_content and raw_content.startswith(spec_string):
232 if raw_content and raw_content.startswith(spec_string):
233 pattern = re.compile(r"""
233 pattern = re.compile(r"""
234 (?:\n)?
234 (?:\n)?
235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 ^size[ ](?P<oid_size>[0-9]+)\n
237 ^size[ ](?P<oid_size>[0-9]+)\n
238 (?:\n)?
238 (?:\n)?
239 """, re.VERBOSE | re.MULTILINE)
239 """, re.VERBOSE | re.MULTILINE)
240 match = pattern.match(raw_content)
240 match = pattern.match(raw_content)
241 if match:
241 if match:
242 return match.groupdict()
242 return match.groupdict()
243
243
244 return {}
244 return {}
245
245
246 @reraise_safe_exceptions
246 @reraise_safe_exceptions
247 def is_large_file(self, wire, commit_id):
247 def is_large_file(self, wire, commit_id):
248 cache_on, context_uid, repo_id = self._cache_on(wire)
248 cache_on, context_uid, repo_id = self._cache_on(wire)
249
249
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 def _is_large_file(_repo_id, _sha):
251 def _is_large_file(_repo_id, _sha):
252 repo_init = self._factory.repo_libgit2(wire)
252 repo_init = self._factory.repo_libgit2(wire)
253 with repo_init as repo:
253 with repo_init as repo:
254 blob = repo[commit_id]
254 blob = repo[commit_id]
255 if blob.is_binary:
255 if blob.is_binary:
256 return {}
256 return {}
257
257
258 return self._parse_lfs_pointer(blob.data)
258 return self._parse_lfs_pointer(blob.data)
259
259
260 return _is_large_file(repo_id, commit_id)
260 return _is_large_file(repo_id, commit_id)
261
261
262 @reraise_safe_exceptions
262 @reraise_safe_exceptions
263 def is_binary(self, wire, tree_id):
263 def is_binary(self, wire, tree_id):
264 cache_on, context_uid, repo_id = self._cache_on(wire)
264 cache_on, context_uid, repo_id = self._cache_on(wire)
265
265
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 def _is_binary(_repo_id, _tree_id):
267 def _is_binary(_repo_id, _tree_id):
268 repo_init = self._factory.repo_libgit2(wire)
268 repo_init = self._factory.repo_libgit2(wire)
269 with repo_init as repo:
269 with repo_init as repo:
270 blob_obj = repo[tree_id]
270 blob_obj = repo[tree_id]
271 return blob_obj.is_binary
271 return blob_obj.is_binary
272
272
273 return _is_binary(repo_id, tree_id)
273 return _is_binary(repo_id, tree_id)
274
274
275 @reraise_safe_exceptions
275 @reraise_safe_exceptions
276 def in_largefiles_store(self, wire, oid):
276 def in_largefiles_store(self, wire, oid):
277 conf = self._wire_to_config(wire)
277 conf = self._wire_to_config(wire)
278 repo_init = self._factory.repo_libgit2(wire)
278 repo_init = self._factory.repo_libgit2(wire)
279 with repo_init as repo:
279 with repo_init as repo:
280 repo_name = repo.path
280 repo_name = repo.path
281
281
282 store_location = conf.get('vcs_git_lfs_store_location')
282 store_location = conf.get('vcs_git_lfs_store_location')
283 if store_location:
283 if store_location:
284
284
285 store = LFSOidStore(
285 store = LFSOidStore(
286 oid=oid, repo=repo_name, store_location=store_location)
286 oid=oid, repo=repo_name, store_location=store_location)
287 return store.has_oid()
287 return store.has_oid()
288
288
289 return False
289 return False
290
290
291 @reraise_safe_exceptions
291 @reraise_safe_exceptions
292 def store_path(self, wire, oid):
292 def store_path(self, wire, oid):
293 conf = self._wire_to_config(wire)
293 conf = self._wire_to_config(wire)
294 repo_init = self._factory.repo_libgit2(wire)
294 repo_init = self._factory.repo_libgit2(wire)
295 with repo_init as repo:
295 with repo_init as repo:
296 repo_name = repo.path
296 repo_name = repo.path
297
297
298 store_location = conf.get('vcs_git_lfs_store_location')
298 store_location = conf.get('vcs_git_lfs_store_location')
299 if store_location:
299 if store_location:
300 store = LFSOidStore(
300 store = LFSOidStore(
301 oid=oid, repo=repo_name, store_location=store_location)
301 oid=oid, repo=repo_name, store_location=store_location)
302 return store.oid_path
302 return store.oid_path
303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
304
304
305 @reraise_safe_exceptions
305 @reraise_safe_exceptions
306 def bulk_request(self, wire, rev, pre_load):
306 def bulk_request(self, wire, rev, pre_load):
307 cache_on, context_uid, repo_id = self._cache_on(wire)
307 cache_on, context_uid, repo_id = self._cache_on(wire)
308 @self.region.conditional_cache_on_arguments(condition=cache_on)
308 @self.region.conditional_cache_on_arguments(condition=cache_on)
309 def _bulk_request(_repo_id, _rev, _pre_load):
309 def _bulk_request(_repo_id, _rev, _pre_load):
310 result = {}
310 result = {}
311 for attr in pre_load:
311 for attr in pre_load:
312 try:
312 try:
313 method = self._bulk_methods[attr]
313 method = self._bulk_methods[attr]
314 args = [wire, rev]
314 args = [wire, rev]
315 result[attr] = method(*args)
315 result[attr] = method(*args)
316 except KeyError as e:
316 except KeyError as e:
317 raise exceptions.VcsException(e)(
317 raise exceptions.VcsException(e)(
318 "Unknown bulk attribute: %s" % attr)
318 "Unknown bulk attribute: %s" % attr)
319 return result
319 return result
320
320
321 return _bulk_request(repo_id, rev, sorted(pre_load))
321 return _bulk_request(repo_id, rev, sorted(pre_load))
322
322
323 def _build_opener(self, url):
323 def _build_opener(self, url):
324 handlers = []
324 handlers = []
325 url_obj = url_parser(url)
325 url_obj = url_parser(url)
326 _, authinfo = url_obj.authinfo()
326 _, authinfo = url_obj.authinfo()
327
327
328 if authinfo:
328 if authinfo:
329 # create a password manager
329 # create a password manager
330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
331 passmgr.add_password(*authinfo)
331 passmgr.add_password(*authinfo)
332
332
333 handlers.extend((httpbasicauthhandler(passmgr),
333 handlers.extend((httpbasicauthhandler(passmgr),
334 httpdigestauthhandler(passmgr)))
334 httpdigestauthhandler(passmgr)))
335
335
336 return urllib2.build_opener(*handlers)
336 return urllib2.build_opener(*handlers)
337
337
338 def _type_id_to_name(self, type_id):
338 def _type_id_to_name(self, type_id):
339 return {
339 return {
340 1: b'commit',
340 1: b'commit',
341 2: b'tree',
341 2: b'tree',
342 3: b'blob',
342 3: b'blob',
343 4: b'tag'
343 4: b'tag'
344 }[type_id]
344 }[type_id]
345
345
346 @reraise_safe_exceptions
346 @reraise_safe_exceptions
347 def check_url(self, url, config):
347 def check_url(self, url, config):
348 url_obj = url_parser(url)
348 url_obj = url_parser(url)
349 test_uri, _ = url_obj.authinfo()
349 test_uri, _ = url_obj.authinfo()
350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
351 url_obj.query = obfuscate_qs(url_obj.query)
351 url_obj.query = obfuscate_qs(url_obj.query)
352 cleaned_uri = str(url_obj)
352 cleaned_uri = str(url_obj)
353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
354
354
355 if not test_uri.endswith('info/refs'):
355 if not test_uri.endswith('info/refs'):
356 test_uri = test_uri.rstrip('/') + '/info/refs'
356 test_uri = test_uri.rstrip('/') + '/info/refs'
357
357
358 o = self._build_opener(url)
358 o = self._build_opener(url)
359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
360
360
361 q = {"service": 'git-upload-pack'}
361 q = {"service": 'git-upload-pack'}
362 qs = '?%s' % urllib.urlencode(q)
362 qs = '?%s' % urllib.urlencode(q)
363 cu = "%s%s" % (test_uri, qs)
363 cu = "%s%s" % (test_uri, qs)
364 req = urllib2.Request(cu, None, {})
364 req = urllib2.Request(cu, None, {})
365
365
366 try:
366 try:
367 log.debug("Trying to open URL %s", cleaned_uri)
367 log.debug("Trying to open URL %s", cleaned_uri)
368 resp = o.open(req)
368 resp = o.open(req)
369 if resp.code != 200:
369 if resp.code != 200:
370 raise exceptions.URLError()('Return Code is not 200')
370 raise exceptions.URLError()('Return Code is not 200')
371 except Exception as e:
371 except Exception as e:
372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 # means it cannot be cloned
373 # means it cannot be cloned
374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
375
375
376 # now detect if it's proper git repo
376 # now detect if it's proper git repo
377 gitdata = resp.read()
377 gitdata = resp.read()
378 if 'service=git-upload-pack' in gitdata:
378 if 'service=git-upload-pack' in gitdata:
379 pass
379 pass
380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
381 # old style git can return some other format !
381 # old style git can return some other format !
382 pass
382 pass
383 else:
383 else:
384 raise exceptions.URLError()(
384 raise exceptions.URLError()(
385 "url [%s] does not look like an git" % (cleaned_uri,))
385 "url [%s] does not look like an git" % (cleaned_uri,))
386
386
387 return True
387 return True
388
388
389 @reraise_safe_exceptions
389 @reraise_safe_exceptions
390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
392 remote_refs = self.pull(wire, url, apply_refs=False)
392 remote_refs = self.pull(wire, url, apply_refs=False)
393 repo = self._factory.repo(wire)
393 repo = self._factory.repo(wire)
394 if isinstance(valid_refs, list):
394 if isinstance(valid_refs, list):
395 valid_refs = tuple(valid_refs)
395 valid_refs = tuple(valid_refs)
396
396
397 for k in remote_refs:
397 for k in remote_refs:
398 # only parse heads/tags and skip so called deferred tags
398 # only parse heads/tags and skip so called deferred tags
399 if k.startswith(valid_refs) and not k.endswith(deferred):
399 if k.startswith(valid_refs) and not k.endswith(deferred):
400 repo[k] = remote_refs[k]
400 repo[k] = remote_refs[k]
401
401
402 if update_after_clone:
402 if update_after_clone:
403 # we want to checkout HEAD
403 # we want to checkout HEAD
404 repo["HEAD"] = remote_refs["HEAD"]
404 repo["HEAD"] = remote_refs["HEAD"]
405 index.build_index_from_tree(repo.path, repo.index_path(),
405 index.build_index_from_tree(repo.path, repo.index_path(),
406 repo.object_store, repo["HEAD"].tree)
406 repo.object_store, repo["HEAD"].tree)
407
407
408 @reraise_safe_exceptions
408 @reraise_safe_exceptions
409 def branch(self, wire, commit_id):
409 def branch(self, wire, commit_id):
410 cache_on, context_uid, repo_id = self._cache_on(wire)
410 cache_on, context_uid, repo_id = self._cache_on(wire)
411 @self.region.conditional_cache_on_arguments(condition=cache_on)
411 @self.region.conditional_cache_on_arguments(condition=cache_on)
412 def _branch(_context_uid, _repo_id, _commit_id):
412 def _branch(_context_uid, _repo_id, _commit_id):
413 regex = re.compile('^refs/heads')
413 regex = re.compile('^refs/heads')
414
414
415 def filter_with(ref):
415 def filter_with(ref):
416 return regex.match(ref[0]) and ref[1] == _commit_id
416 return regex.match(ref[0]) and ref[1] == _commit_id
417
417
418 branches = filter(filter_with, self.get_refs(wire).items())
418 branches = filter(filter_with, self.get_refs(wire).items())
419 return [x[0].split('refs/heads/')[-1] for x in branches]
419 return [x[0].split('refs/heads/')[-1] for x in branches]
420
420
421 return _branch(context_uid, repo_id, commit_id)
421 return _branch(context_uid, repo_id, commit_id)
422
422
423 @reraise_safe_exceptions
423 @reraise_safe_exceptions
424 def commit_branches(self, wire, commit_id):
424 def commit_branches(self, wire, commit_id):
425 cache_on, context_uid, repo_id = self._cache_on(wire)
425 cache_on, context_uid, repo_id = self._cache_on(wire)
426 @self.region.conditional_cache_on_arguments(condition=cache_on)
426 @self.region.conditional_cache_on_arguments(condition=cache_on)
427 def _commit_branches(_context_uid, _repo_id, _commit_id):
427 def _commit_branches(_context_uid, _repo_id, _commit_id):
428 repo_init = self._factory.repo_libgit2(wire)
428 repo_init = self._factory.repo_libgit2(wire)
429 with repo_init as repo:
429 with repo_init as repo:
430 branches = [x for x in repo.branches.with_commit(_commit_id)]
430 branches = [x for x in repo.branches.with_commit(_commit_id)]
431 return branches
431 return branches
432
432
433 return _commit_branches(context_uid, repo_id, commit_id)
433 return _commit_branches(context_uid, repo_id, commit_id)
434
434
435 @reraise_safe_exceptions
435 @reraise_safe_exceptions
436 def add_object(self, wire, content):
436 def add_object(self, wire, content):
437 repo_init = self._factory.repo_libgit2(wire)
437 repo_init = self._factory.repo_libgit2(wire)
438 with repo_init as repo:
438 with repo_init as repo:
439 blob = objects.Blob()
439 blob = objects.Blob()
440 blob.set_raw_string(content)
440 blob.set_raw_string(content)
441 repo.object_store.add_object(blob)
441 repo.object_store.add_object(blob)
442 return blob.id
442 return blob.id
443
443
444 # TODO: this is quite complex, check if that can be simplified
444 # TODO: this is quite complex, check if that can be simplified
445 @reraise_safe_exceptions
445 @reraise_safe_exceptions
446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
447 repo = self._factory.repo(wire)
447 repo = self._factory.repo(wire)
448 object_store = repo.object_store
448 object_store = repo.object_store
449
449
450 # Create tree and populates it with blobs
450 # Create tree and populates it with blobs
451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
452
452
453 for node in updated:
453 for node in updated:
454 # Compute subdirs if needed
454 # Compute subdirs if needed
455 dirpath, nodename = vcspath.split(node['path'])
455 dirpath, nodename = vcspath.split(node['path'])
456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
457 parent = commit_tree
457 parent = commit_tree
458 ancestors = [('', parent)]
458 ancestors = [('', parent)]
459
459
460 # Tries to dig for the deepest existing tree
460 # Tries to dig for the deepest existing tree
461 while dirnames:
461 while dirnames:
462 curdir = dirnames.pop(0)
462 curdir = dirnames.pop(0)
463 try:
463 try:
464 dir_id = parent[curdir][1]
464 dir_id = parent[curdir][1]
465 except KeyError:
465 except KeyError:
466 # put curdir back into dirnames and stops
466 # put curdir back into dirnames and stops
467 dirnames.insert(0, curdir)
467 dirnames.insert(0, curdir)
468 break
468 break
469 else:
469 else:
470 # If found, updates parent
470 # If found, updates parent
471 parent = repo[dir_id]
471 parent = repo[dir_id]
472 ancestors.append((curdir, parent))
472 ancestors.append((curdir, parent))
473 # Now parent is deepest existing tree and we need to create
473 # Now parent is deepest existing tree and we need to create
474 # subtrees for dirnames (in reverse order)
474 # subtrees for dirnames (in reverse order)
475 # [this only applies for nodes from added]
475 # [this only applies for nodes from added]
476 new_trees = []
476 new_trees = []
477
477
478 blob = objects.Blob.from_string(node['content'])
478 blob = objects.Blob.from_string(node['content'])
479
479
480 if dirnames:
480 if dirnames:
481 # If there are trees which should be created we need to build
481 # If there are trees which should be created we need to build
482 # them now (in reverse order)
482 # them now (in reverse order)
483 reversed_dirnames = list(reversed(dirnames))
483 reversed_dirnames = list(reversed(dirnames))
484 curtree = objects.Tree()
484 curtree = objects.Tree()
485 curtree[node['node_path']] = node['mode'], blob.id
485 curtree[node['node_path']] = node['mode'], blob.id
486 new_trees.append(curtree)
486 new_trees.append(curtree)
487 for dirname in reversed_dirnames[:-1]:
487 for dirname in reversed_dirnames[:-1]:
488 newtree = objects.Tree()
488 newtree = objects.Tree()
489 newtree[dirname] = (DIR_STAT, curtree.id)
489 newtree[dirname] = (DIR_STAT, curtree.id)
490 new_trees.append(newtree)
490 new_trees.append(newtree)
491 curtree = newtree
491 curtree = newtree
492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
493 else:
493 else:
494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
495
495
496 new_trees.append(parent)
496 new_trees.append(parent)
497 # Update ancestors
497 # Update ancestors
498 reversed_ancestors = reversed(
498 reversed_ancestors = reversed(
499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
500 for parent, tree, path in reversed_ancestors:
500 for parent, tree, path in reversed_ancestors:
501 parent[path] = (DIR_STAT, tree.id)
501 parent[path] = (DIR_STAT, tree.id)
502 object_store.add_object(tree)
502 object_store.add_object(tree)
503
503
504 object_store.add_object(blob)
504 object_store.add_object(blob)
505 for tree in new_trees:
505 for tree in new_trees:
506 object_store.add_object(tree)
506 object_store.add_object(tree)
507
507
508 for node_path in removed:
508 for node_path in removed:
509 paths = node_path.split('/')
509 paths = node_path.split('/')
510 tree = commit_tree
510 tree = commit_tree
511 trees = [tree]
511 trees = [tree]
512 # Traverse deep into the forest...
512 # Traverse deep into the forest...
513 for path in paths:
513 for path in paths:
514 try:
514 try:
515 obj = repo[tree[path][1]]
515 obj = repo[tree[path][1]]
516 if isinstance(obj, objects.Tree):
516 if isinstance(obj, objects.Tree):
517 trees.append(obj)
517 trees.append(obj)
518 tree = obj
518 tree = obj
519 except KeyError:
519 except KeyError:
520 break
520 break
521 # Cut down the blob and all rotten trees on the way back...
521 # Cut down the blob and all rotten trees on the way back...
522 for path, tree in reversed(zip(paths, trees)):
522 for path, tree in reversed(zip(paths, trees)):
523 del tree[path]
523 del tree[path]
524 if tree:
524 if tree:
525 # This tree still has elements - don't remove it or any
525 # This tree still has elements - don't remove it or any
526 # of it's parents
526 # of it's parents
527 break
527 break
528
528
529 object_store.add_object(commit_tree)
529 object_store.add_object(commit_tree)
530
530
531 # Create commit
531 # Create commit
532 commit = objects.Commit()
532 commit = objects.Commit()
533 commit.tree = commit_tree.id
533 commit.tree = commit_tree.id
534 for k, v in commit_data.iteritems():
534 for k, v in commit_data.iteritems():
535 setattr(commit, k, v)
535 setattr(commit, k, v)
536 object_store.add_object(commit)
536 object_store.add_object(commit)
537
537
538 self.create_branch(wire, branch, commit.id)
538 self.create_branch(wire, branch, commit.id)
539
539
540 # dulwich set-ref
540 # dulwich set-ref
541 ref = 'refs/heads/%s' % branch
541 ref = 'refs/heads/%s' % branch
542 repo.refs[ref] = commit.id
542 repo.refs[ref] = commit.id
543
543
544 return commit.id
544 return commit.id
545
545
546 @reraise_safe_exceptions
546 @reraise_safe_exceptions
547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
548 if url != 'default' and '://' not in url:
548 if url != 'default' and '://' not in url:
549 client = LocalGitClient(url)
549 client = LocalGitClient(url)
550 else:
550 else:
551 url_obj = url_parser(url)
551 url_obj = url_parser(url)
552 o = self._build_opener(url)
552 o = self._build_opener(url)
553 url, _ = url_obj.authinfo()
553 url, _ = url_obj.authinfo()
554 client = HttpGitClient(base_url=url, opener=o)
554 client = HttpGitClient(base_url=url, opener=o)
555 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
556
556
557 determine_wants = repo.object_store.determine_wants_all
557 determine_wants = repo.object_store.determine_wants_all
558 if refs:
558 if refs:
559 def determine_wants_requested(references):
559 def determine_wants_requested(references):
560 return [references[r] for r in references if r in refs]
560 return [references[r] for r in references if r in refs]
561 determine_wants = determine_wants_requested
561 determine_wants = determine_wants_requested
562
562
563 try:
563 try:
564 remote_refs = client.fetch(
564 remote_refs = client.fetch(
565 path=url, target=repo, determine_wants=determine_wants)
565 path=url, target=repo, determine_wants=determine_wants)
566 except NotGitRepository as e:
566 except NotGitRepository as e:
567 log.warning(
567 log.warning(
568 'Trying to fetch from "%s" failed, not a Git repository.', url)
568 'Trying to fetch from "%s" failed, not a Git repository.', url)
569 # Exception can contain unicode which we convert
569 # Exception can contain unicode which we convert
570 raise exceptions.AbortException(e)(repr(e))
570 raise exceptions.AbortException(e)(repr(e))
571
571
572 # mikhail: client.fetch() returns all the remote refs, but fetches only
572 # mikhail: client.fetch() returns all the remote refs, but fetches only
573 # refs filtered by `determine_wants` function. We need to filter result
573 # refs filtered by `determine_wants` function. We need to filter result
574 # as well
574 # as well
575 if refs:
575 if refs:
576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
577
577
578 if apply_refs:
578 if apply_refs:
579 # TODO: johbo: Needs proper test coverage with a git repository
579 # TODO: johbo: Needs proper test coverage with a git repository
580 # that contains a tag object, so that we would end up with
580 # that contains a tag object, so that we would end up with
581 # a peeled ref at this point.
581 # a peeled ref at this point.
582 for k in remote_refs:
582 for k in remote_refs:
583 if k.endswith(PEELED_REF_MARKER):
583 if k.endswith(PEELED_REF_MARKER):
584 log.debug("Skipping peeled reference %s", k)
584 log.debug("Skipping peeled reference %s", k)
585 continue
585 continue
586 repo[k] = remote_refs[k]
586 repo[k] = remote_refs[k]
587
587
588 if refs and not update_after:
588 if refs and not update_after:
589 # mikhail: explicitly set the head to the last ref.
589 # mikhail: explicitly set the head to the last ref.
590 repo['HEAD'] = remote_refs[refs[-1]]
590 repo['HEAD'] = remote_refs[refs[-1]]
591
591
592 if update_after:
592 if update_after:
593 # we want to checkout HEAD
593 # we want to checkout HEAD
594 repo["HEAD"] = remote_refs["HEAD"]
594 repo["HEAD"] = remote_refs["HEAD"]
595 index.build_index_from_tree(repo.path, repo.index_path(),
595 index.build_index_from_tree(repo.path, repo.index_path(),
596 repo.object_store, repo["HEAD"].tree)
596 repo.object_store, repo["HEAD"].tree)
597 return remote_refs
597 return remote_refs
598
598
599 @reraise_safe_exceptions
599 @reraise_safe_exceptions
600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
601 repo = self._factory.repo(wire)
601 repo = self._factory.repo(wire)
602 if refs and not isinstance(refs, (list, tuple)):
602 if refs and not isinstance(refs, (list, tuple)):
603 refs = [refs]
603 refs = [refs]
604
604
605 config = self._wire_to_config(wire)
605 config = self._wire_to_config(wire)
606 # get all remote refs we'll use to fetch later
606 # get all remote refs we'll use to fetch later
607 cmd = ['ls-remote']
607 cmd = ['ls-remote']
608 if not all_refs:
608 if not all_refs:
609 cmd += ['--heads', '--tags']
609 cmd += ['--heads', '--tags']
610 cmd += [url]
610 cmd += [url]
611 output, __ = self.run_git_command(
611 output, __ = self.run_git_command(
612 wire, cmd, fail_on_stderr=False,
612 wire, cmd, fail_on_stderr=False,
613 _copts=self._remote_conf(config),
613 _copts=self._remote_conf(config),
614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
615
615
616 remote_refs = collections.OrderedDict()
616 remote_refs = collections.OrderedDict()
617 fetch_refs = []
617 fetch_refs = []
618
618
619 for ref_line in output.splitlines():
619 for ref_line in output.splitlines():
620 sha, ref = ref_line.split('\t')
620 sha, ref = ref_line.split('\t')
621 sha = sha.strip()
621 sha = sha.strip()
622 if ref in remote_refs:
622 if ref in remote_refs:
623 # duplicate, skip
623 # duplicate, skip
624 continue
624 continue
625 if ref.endswith(PEELED_REF_MARKER):
625 if ref.endswith(PEELED_REF_MARKER):
626 log.debug("Skipping peeled reference %s", ref)
626 log.debug("Skipping peeled reference %s", ref)
627 continue
627 continue
628 # don't sync HEAD
628 # don't sync HEAD
629 if ref in ['HEAD']:
629 if ref in ['HEAD']:
630 continue
630 continue
631
631
632 remote_refs[ref] = sha
632 remote_refs[ref] = sha
633
633
634 if refs and sha in refs:
634 if refs and sha in refs:
635 # we filter fetch using our specified refs
635 # we filter fetch using our specified refs
636 fetch_refs.append('{}:{}'.format(ref, ref))
636 fetch_refs.append('{}:{}'.format(ref, ref))
637 elif not refs:
637 elif not refs:
638 fetch_refs.append('{}:{}'.format(ref, ref))
638 fetch_refs.append('{}:{}'.format(ref, ref))
639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
640
640
641 if fetch_refs:
641 if fetch_refs:
642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
643 fetch_refs_chunks = list(chunk)
643 fetch_refs_chunks = list(chunk)
644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
645 _out, _err = self.run_git_command(
645 _out, _err = self.run_git_command(
646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
647 fail_on_stderr=False,
647 fail_on_stderr=False,
648 _copts=self._remote_conf(config),
648 _copts=self._remote_conf(config),
649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
650
650
651 return remote_refs
651 return remote_refs
652
652
653 @reraise_safe_exceptions
653 @reraise_safe_exceptions
654 def sync_push(self, wire, url, refs=None):
654 def sync_push(self, wire, url, refs=None):
655 if not self.check_url(url, wire):
655 if not self.check_url(url, wire):
656 return
656 return
657 config = self._wire_to_config(wire)
657 config = self._wire_to_config(wire)
658 self._factory.repo(wire)
658 self._factory.repo(wire)
659 self.run_git_command(
659 self.run_git_command(
660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
661 _copts=self._remote_conf(config),
661 _copts=self._remote_conf(config),
662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663
663
664 @reraise_safe_exceptions
664 @reraise_safe_exceptions
665 def get_remote_refs(self, wire, url):
665 def get_remote_refs(self, wire, url):
666 repo = Repo(url)
666 repo = Repo(url)
667 return repo.get_refs()
667 return repo.get_refs()
668
668
669 @reraise_safe_exceptions
669 @reraise_safe_exceptions
670 def get_description(self, wire):
670 def get_description(self, wire):
671 repo = self._factory.repo(wire)
671 repo = self._factory.repo(wire)
672 return repo.get_description()
672 return repo.get_description()
673
673
674 @reraise_safe_exceptions
674 @reraise_safe_exceptions
675 def get_missing_revs(self, wire, rev1, rev2, path2):
675 def get_missing_revs(self, wire, rev1, rev2, path2):
676 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
677 LocalGitClient(thin_packs=False).fetch(path2, repo)
677 LocalGitClient(thin_packs=False).fetch(path2, repo)
678
678
679 wire_remote = wire.copy()
679 wire_remote = wire.copy()
680 wire_remote['path'] = path2
680 wire_remote['path'] = path2
681 repo_remote = self._factory.repo(wire_remote)
681 repo_remote = self._factory.repo(wire_remote)
682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
683
683
684 revs = [
684 revs = [
685 x.commit.id
685 x.commit.id
686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
687 return revs
687 return revs
688
688
689 @reraise_safe_exceptions
689 @reraise_safe_exceptions
690 def get_object(self, wire, sha):
690 def get_object(self, wire, sha):
691 cache_on, context_uid, repo_id = self._cache_on(wire)
691 cache_on, context_uid, repo_id = self._cache_on(wire)
692 @self.region.conditional_cache_on_arguments(condition=cache_on)
692 @self.region.conditional_cache_on_arguments(condition=cache_on)
693 def _get_object(_context_uid, _repo_id, _sha):
693 def _get_object(_context_uid, _repo_id, _sha):
694 repo_init = self._factory.repo_libgit2(wire)
694 repo_init = self._factory.repo_libgit2(wire)
695 with repo_init as repo:
695 with repo_init as repo:
696
696
697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
698 try:
698 try:
699 commit = repo.revparse_single(sha)
699 commit = repo.revparse_single(sha)
700 except (KeyError, ValueError) as e:
700 except (KeyError, ValueError) as e:
701 raise exceptions.LookupException(e)(missing_commit_err)
701 raise exceptions.LookupException(e)(missing_commit_err)
702
702
703 is_tag = False
703 is_tag = False
704 if isinstance(commit, pygit2.Tag):
704 if isinstance(commit, pygit2.Tag):
705 commit = repo.get(commit.target)
705 commit = repo.get(commit.target)
706 is_tag = True
706 is_tag = True
707
707
708 check_dangling = True
708 check_dangling = True
709 if is_tag:
709 if is_tag:
710 check_dangling = False
710 check_dangling = False
711
711
712 # we used a reference and it parsed means we're not having a dangling commit
712 # we used a reference and it parsed means we're not having a dangling commit
713 if sha != commit.hex:
713 if sha != commit.hex:
714 check_dangling = False
714 check_dangling = False
715
715
716 if check_dangling:
716 if check_dangling:
717 # check for dangling commit
717 # check for dangling commit
718 for branch in repo.branches.with_commit(commit.hex):
718 for branch in repo.branches.with_commit(commit.hex):
719 if branch:
719 if branch:
720 break
720 break
721 else:
721 else:
722 raise exceptions.LookupException(None)(missing_commit_err)
722 raise exceptions.LookupException(None)(missing_commit_err)
723
723
724 commit_id = commit.hex
724 commit_id = commit.hex
725 type_id = commit.type
725 type_id = commit.type
726
726
727 return {
727 return {
728 'id': commit_id,
728 'id': commit_id,
729 'type': self._type_id_to_name(type_id),
729 'type': self._type_id_to_name(type_id),
730 'commit_id': commit_id,
730 'commit_id': commit_id,
731 'idx': 0
731 'idx': 0
732 }
732 }
733
733
734 return _get_object(context_uid, repo_id, sha)
734 return _get_object(context_uid, repo_id, sha)
735
735
736 @reraise_safe_exceptions
736 @reraise_safe_exceptions
737 def get_refs(self, wire):
737 def get_refs(self, wire):
738 cache_on, context_uid, repo_id = self._cache_on(wire)
738 cache_on, context_uid, repo_id = self._cache_on(wire)
739 @self.region.conditional_cache_on_arguments(condition=cache_on)
739 @self.region.conditional_cache_on_arguments(condition=cache_on)
740 def _get_refs(_context_uid, _repo_id):
740 def _get_refs(_context_uid, _repo_id):
741
741
742 repo_init = self._factory.repo_libgit2(wire)
742 repo_init = self._factory.repo_libgit2(wire)
743 with repo_init as repo:
743 with repo_init as repo:
744 regex = re.compile('^refs/(heads|tags)/')
744 regex = re.compile('^refs/(heads|tags)/')
745 return {x.name: x.target.hex for x in
745 return {x.name: x.target.hex for x in
746 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
746 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
747
747
748 return _get_refs(context_uid, repo_id)
748 return _get_refs(context_uid, repo_id)
749
749
750 @reraise_safe_exceptions
750 @reraise_safe_exceptions
751 def get_branch_pointers(self, wire):
751 def get_branch_pointers(self, wire):
752 cache_on, context_uid, repo_id = self._cache_on(wire)
752 cache_on, context_uid, repo_id = self._cache_on(wire)
753 @self.region.conditional_cache_on_arguments(condition=cache_on)
753 @self.region.conditional_cache_on_arguments(condition=cache_on)
754 def _get_branch_pointers(_context_uid, _repo_id):
754 def _get_branch_pointers(_context_uid, _repo_id):
755
755
756 repo_init = self._factory.repo_libgit2(wire)
756 repo_init = self._factory.repo_libgit2(wire)
757 regex = re.compile('^refs/heads')
757 regex = re.compile('^refs/heads')
758 with repo_init as repo:
758 with repo_init as repo:
759 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
759 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
760 return {x.target.hex: x.shorthand for x in branches}
760 return {x.target.hex: x.shorthand for x in branches}
761
761
762 return _get_branch_pointers(context_uid, repo_id)
762 return _get_branch_pointers(context_uid, repo_id)
763
763
764 @reraise_safe_exceptions
764 @reraise_safe_exceptions
765 def head(self, wire, show_exc=True):
765 def head(self, wire, show_exc=True):
766 cache_on, context_uid, repo_id = self._cache_on(wire)
766 cache_on, context_uid, repo_id = self._cache_on(wire)
767 @self.region.conditional_cache_on_arguments(condition=cache_on)
767 @self.region.conditional_cache_on_arguments(condition=cache_on)
768 def _head(_context_uid, _repo_id, _show_exc):
768 def _head(_context_uid, _repo_id, _show_exc):
769 repo_init = self._factory.repo_libgit2(wire)
769 repo_init = self._factory.repo_libgit2(wire)
770 with repo_init as repo:
770 with repo_init as repo:
771 try:
771 try:
772 return repo.head.peel().hex
772 return repo.head.peel().hex
773 except Exception:
773 except Exception:
774 if show_exc:
774 if show_exc:
775 raise
775 raise
776 return _head(context_uid, repo_id, show_exc)
776 return _head(context_uid, repo_id, show_exc)
777
777
778 @reraise_safe_exceptions
778 @reraise_safe_exceptions
779 def init(self, wire):
779 def init(self, wire):
780 repo_path = str_to_dulwich(wire['path'])
780 repo_path = str_to_dulwich(wire['path'])
781 self.repo = Repo.init(repo_path)
781 self.repo = Repo.init(repo_path)
782
782
783 @reraise_safe_exceptions
783 @reraise_safe_exceptions
784 def init_bare(self, wire):
784 def init_bare(self, wire):
785 repo_path = str_to_dulwich(wire['path'])
785 repo_path = str_to_dulwich(wire['path'])
786 self.repo = Repo.init_bare(repo_path)
786 self.repo = Repo.init_bare(repo_path)
787
787
788 @reraise_safe_exceptions
788 @reraise_safe_exceptions
789 def revision(self, wire, rev):
789 def revision(self, wire, rev):
790
790
791 cache_on, context_uid, repo_id = self._cache_on(wire)
791 cache_on, context_uid, repo_id = self._cache_on(wire)
792 @self.region.conditional_cache_on_arguments(condition=cache_on)
792 @self.region.conditional_cache_on_arguments(condition=cache_on)
793 def _revision(_context_uid, _repo_id, _rev):
793 def _revision(_context_uid, _repo_id, _rev):
794 repo_init = self._factory.repo_libgit2(wire)
794 repo_init = self._factory.repo_libgit2(wire)
795 with repo_init as repo:
795 with repo_init as repo:
796 commit = repo[rev]
796 commit = repo[rev]
797 obj_data = {
797 obj_data = {
798 'id': commit.id.hex,
798 'id': commit.id.hex,
799 }
799 }
800 # tree objects itself don't have tree_id attribute
800 # tree objects itself don't have tree_id attribute
801 if hasattr(commit, 'tree_id'):
801 if hasattr(commit, 'tree_id'):
802 obj_data['tree'] = commit.tree_id.hex
802 obj_data['tree'] = commit.tree_id.hex
803
803
804 return obj_data
804 return obj_data
805 return _revision(context_uid, repo_id, rev)
805 return _revision(context_uid, repo_id, rev)
806
806
807 @reraise_safe_exceptions
807 @reraise_safe_exceptions
808 def date(self, wire, commit_id):
808 def date(self, wire, commit_id):
809 cache_on, context_uid, repo_id = self._cache_on(wire)
809 cache_on, context_uid, repo_id = self._cache_on(wire)
810 @self.region.conditional_cache_on_arguments(condition=cache_on)
810 @self.region.conditional_cache_on_arguments(condition=cache_on)
811 def _date(_repo_id, _commit_id):
811 def _date(_repo_id, _commit_id):
812 repo_init = self._factory.repo_libgit2(wire)
812 repo_init = self._factory.repo_libgit2(wire)
813 with repo_init as repo:
813 with repo_init as repo:
814 commit = repo[commit_id]
814 commit = repo[commit_id]
815
815
816 if hasattr(commit, 'commit_time'):
816 if hasattr(commit, 'commit_time'):
817 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
817 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
818 else:
818 else:
819 commit = commit.get_object()
819 commit = commit.get_object()
820 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
820 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
821
821
822 # TODO(marcink): check dulwich difference of offset vs timezone
822 # TODO(marcink): check dulwich difference of offset vs timezone
823 return [commit_time, commit_time_offset]
823 return [commit_time, commit_time_offset]
824 return _date(repo_id, commit_id)
824 return _date(repo_id, commit_id)
825
825
826 @reraise_safe_exceptions
826 @reraise_safe_exceptions
827 def author(self, wire, commit_id):
827 def author(self, wire, commit_id):
828 cache_on, context_uid, repo_id = self._cache_on(wire)
828 cache_on, context_uid, repo_id = self._cache_on(wire)
829 @self.region.conditional_cache_on_arguments(condition=cache_on)
829 @self.region.conditional_cache_on_arguments(condition=cache_on)
830 def _author(_repo_id, _commit_id):
830 def _author(_repo_id, _commit_id):
831 repo_init = self._factory.repo_libgit2(wire)
831 repo_init = self._factory.repo_libgit2(wire)
832 with repo_init as repo:
832 with repo_init as repo:
833 commit = repo[commit_id]
833 commit = repo[commit_id]
834
834
835 if hasattr(commit, 'author'):
835 if hasattr(commit, 'author'):
836 author = commit.author
836 author = commit.author
837 else:
837 else:
838 author = commit.get_object().author
838 author = commit.get_object().author
839
839
840 if author.email:
840 if author.email:
841 return u"{} <{}>".format(author.name, author.email)
841 return u"{} <{}>".format(author.name, author.email)
842
842
843 return u"{}".format(author.raw_name)
843 try:
844 return u"{}".format(author.name)
845 except Exception:
846 return u"{}".format(safe_unicode(author.raw_name))
847
844 return _author(repo_id, commit_id)
848 return _author(repo_id, commit_id)
845
849
846 @reraise_safe_exceptions
850 @reraise_safe_exceptions
847 def message(self, wire, commit_id):
851 def message(self, wire, commit_id):
848 cache_on, context_uid, repo_id = self._cache_on(wire)
852 cache_on, context_uid, repo_id = self._cache_on(wire)
849 @self.region.conditional_cache_on_arguments(condition=cache_on)
853 @self.region.conditional_cache_on_arguments(condition=cache_on)
850 def _message(_repo_id, _commit_id):
854 def _message(_repo_id, _commit_id):
851 repo_init = self._factory.repo_libgit2(wire)
855 repo_init = self._factory.repo_libgit2(wire)
852 with repo_init as repo:
856 with repo_init as repo:
853 commit = repo[commit_id]
857 commit = repo[commit_id]
854 return commit.message
858 return commit.message
855 return _message(repo_id, commit_id)
859 return _message(repo_id, commit_id)
856
860
857 @reraise_safe_exceptions
861 @reraise_safe_exceptions
858 def parents(self, wire, commit_id):
862 def parents(self, wire, commit_id):
859 cache_on, context_uid, repo_id = self._cache_on(wire)
863 cache_on, context_uid, repo_id = self._cache_on(wire)
860 @self.region.conditional_cache_on_arguments(condition=cache_on)
864 @self.region.conditional_cache_on_arguments(condition=cache_on)
861 def _parents(_repo_id, _commit_id):
865 def _parents(_repo_id, _commit_id):
862 repo_init = self._factory.repo_libgit2(wire)
866 repo_init = self._factory.repo_libgit2(wire)
863 with repo_init as repo:
867 with repo_init as repo:
864 commit = repo[commit_id]
868 commit = repo[commit_id]
865 if hasattr(commit, 'parent_ids'):
869 if hasattr(commit, 'parent_ids'):
866 parent_ids = commit.parent_ids
870 parent_ids = commit.parent_ids
867 else:
871 else:
868 parent_ids = commit.get_object().parent_ids
872 parent_ids = commit.get_object().parent_ids
869
873
870 return [x.hex for x in parent_ids]
874 return [x.hex for x in parent_ids]
871 return _parents(repo_id, commit_id)
875 return _parents(repo_id, commit_id)
872
876
873 @reraise_safe_exceptions
877 @reraise_safe_exceptions
874 def children(self, wire, commit_id):
878 def children(self, wire, commit_id):
875 cache_on, context_uid, repo_id = self._cache_on(wire)
879 cache_on, context_uid, repo_id = self._cache_on(wire)
876 @self.region.conditional_cache_on_arguments(condition=cache_on)
880 @self.region.conditional_cache_on_arguments(condition=cache_on)
877 def _children(_repo_id, _commit_id):
881 def _children(_repo_id, _commit_id):
878 output, __ = self.run_git_command(
882 output, __ = self.run_git_command(
879 wire, ['rev-list', '--all', '--children'])
883 wire, ['rev-list', '--all', '--children'])
880
884
881 child_ids = []
885 child_ids = []
882 pat = re.compile(r'^%s' % commit_id)
886 pat = re.compile(r'^%s' % commit_id)
883 for l in output.splitlines():
887 for l in output.splitlines():
884 if pat.match(l):
888 if pat.match(l):
885 found_ids = l.split(' ')[1:]
889 found_ids = l.split(' ')[1:]
886 child_ids.extend(found_ids)
890 child_ids.extend(found_ids)
887
891
888 return child_ids
892 return child_ids
889 return _children(repo_id, commit_id)
893 return _children(repo_id, commit_id)
890
894
891 @reraise_safe_exceptions
895 @reraise_safe_exceptions
892 def set_refs(self, wire, key, value):
896 def set_refs(self, wire, key, value):
893 repo_init = self._factory.repo_libgit2(wire)
897 repo_init = self._factory.repo_libgit2(wire)
894 with repo_init as repo:
898 with repo_init as repo:
895 repo.references.create(key, value, force=True)
899 repo.references.create(key, value, force=True)
896
900
897 @reraise_safe_exceptions
901 @reraise_safe_exceptions
898 def create_branch(self, wire, branch_name, commit_id, force=False):
902 def create_branch(self, wire, branch_name, commit_id, force=False):
899 repo_init = self._factory.repo_libgit2(wire)
903 repo_init = self._factory.repo_libgit2(wire)
900 with repo_init as repo:
904 with repo_init as repo:
901 commit = repo[commit_id]
905 commit = repo[commit_id]
902
906
903 if force:
907 if force:
904 repo.branches.local.create(branch_name, commit, force=force)
908 repo.branches.local.create(branch_name, commit, force=force)
905 elif not repo.branches.get(branch_name):
909 elif not repo.branches.get(branch_name):
906 # create only if that branch isn't existing
910 # create only if that branch isn't existing
907 repo.branches.local.create(branch_name, commit, force=force)
911 repo.branches.local.create(branch_name, commit, force=force)
908
912
909 @reraise_safe_exceptions
913 @reraise_safe_exceptions
910 def remove_ref(self, wire, key):
914 def remove_ref(self, wire, key):
911 repo_init = self._factory.repo_libgit2(wire)
915 repo_init = self._factory.repo_libgit2(wire)
912 with repo_init as repo:
916 with repo_init as repo:
913 repo.references.delete(key)
917 repo.references.delete(key)
914
918
915 @reraise_safe_exceptions
919 @reraise_safe_exceptions
916 def tag_remove(self, wire, tag_name):
920 def tag_remove(self, wire, tag_name):
917 repo_init = self._factory.repo_libgit2(wire)
921 repo_init = self._factory.repo_libgit2(wire)
918 with repo_init as repo:
922 with repo_init as repo:
919 key = 'refs/tags/{}'.format(tag_name)
923 key = 'refs/tags/{}'.format(tag_name)
920 repo.references.delete(key)
924 repo.references.delete(key)
921
925
922 @reraise_safe_exceptions
926 @reraise_safe_exceptions
923 def tree_changes(self, wire, source_id, target_id):
927 def tree_changes(self, wire, source_id, target_id):
924 # TODO(marcink): remove this seems it's only used by tests
928 # TODO(marcink): remove this seems it's only used by tests
925 repo = self._factory.repo(wire)
929 repo = self._factory.repo(wire)
926 source = repo[source_id].tree if source_id else None
930 source = repo[source_id].tree if source_id else None
927 target = repo[target_id].tree
931 target = repo[target_id].tree
928 result = repo.object_store.tree_changes(source, target)
932 result = repo.object_store.tree_changes(source, target)
929 return list(result)
933 return list(result)
930
934
931 @reraise_safe_exceptions
935 @reraise_safe_exceptions
932 def tree_and_type_for_path(self, wire, commit_id, path):
936 def tree_and_type_for_path(self, wire, commit_id, path):
933
937
934 cache_on, context_uid, repo_id = self._cache_on(wire)
938 cache_on, context_uid, repo_id = self._cache_on(wire)
935 @self.region.conditional_cache_on_arguments(condition=cache_on)
939 @self.region.conditional_cache_on_arguments(condition=cache_on)
936 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
940 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
937 repo_init = self._factory.repo_libgit2(wire)
941 repo_init = self._factory.repo_libgit2(wire)
938
942
939 with repo_init as repo:
943 with repo_init as repo:
940 commit = repo[commit_id]
944 commit = repo[commit_id]
941 try:
945 try:
942 tree = commit.tree[path]
946 tree = commit.tree[path]
943 except KeyError:
947 except KeyError:
944 return None, None, None
948 return None, None, None
945
949
946 return tree.id.hex, tree.type, tree.filemode
950 return tree.id.hex, tree.type, tree.filemode
947 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
951 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
948
952
949 @reraise_safe_exceptions
953 @reraise_safe_exceptions
950 def tree_items(self, wire, tree_id):
954 def tree_items(self, wire, tree_id):
951 cache_on, context_uid, repo_id = self._cache_on(wire)
955 cache_on, context_uid, repo_id = self._cache_on(wire)
952 @self.region.conditional_cache_on_arguments(condition=cache_on)
956 @self.region.conditional_cache_on_arguments(condition=cache_on)
953 def _tree_items(_repo_id, _tree_id):
957 def _tree_items(_repo_id, _tree_id):
954
958
955 repo_init = self._factory.repo_libgit2(wire)
959 repo_init = self._factory.repo_libgit2(wire)
956 with repo_init as repo:
960 with repo_init as repo:
957 try:
961 try:
958 tree = repo[tree_id]
962 tree = repo[tree_id]
959 except KeyError:
963 except KeyError:
960 raise ObjectMissing('No tree with id: {}'.format(tree_id))
964 raise ObjectMissing('No tree with id: {}'.format(tree_id))
961
965
962 result = []
966 result = []
963 for item in tree:
967 for item in tree:
964 item_sha = item.hex
968 item_sha = item.hex
965 item_mode = item.filemode
969 item_mode = item.filemode
966 item_type = item.type
970 item_type = item.type
967
971
968 if item_type == 'commit':
972 if item_type == 'commit':
969 # NOTE(marcink): submodules we translate to 'link' for backward compat
973 # NOTE(marcink): submodules we translate to 'link' for backward compat
970 item_type = 'link'
974 item_type = 'link'
971
975
972 result.append((item.name, item_mode, item_sha, item_type))
976 result.append((item.name, item_mode, item_sha, item_type))
973 return result
977 return result
974 return _tree_items(repo_id, tree_id)
978 return _tree_items(repo_id, tree_id)
975
979
976 @reraise_safe_exceptions
980 @reraise_safe_exceptions
977 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
981 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
978 """
982 """
979 Old version that uses subprocess to call diff
983 Old version that uses subprocess to call diff
980 """
984 """
981
985
982 flags = [
986 flags = [
983 '-U%s' % context, '--patch',
987 '-U%s' % context, '--patch',
984 '--binary',
988 '--binary',
985 '--find-renames',
989 '--find-renames',
986 '--no-indent-heuristic',
990 '--no-indent-heuristic',
987 # '--indent-heuristic',
991 # '--indent-heuristic',
988 #'--full-index',
992 #'--full-index',
989 #'--abbrev=40'
993 #'--abbrev=40'
990 ]
994 ]
991
995
992 if opt_ignorews:
996 if opt_ignorews:
993 flags.append('--ignore-all-space')
997 flags.append('--ignore-all-space')
994
998
995 if commit_id_1 == self.EMPTY_COMMIT:
999 if commit_id_1 == self.EMPTY_COMMIT:
996 cmd = ['show'] + flags + [commit_id_2]
1000 cmd = ['show'] + flags + [commit_id_2]
997 else:
1001 else:
998 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1002 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
999
1003
1000 if file_filter:
1004 if file_filter:
1001 cmd.extend(['--', file_filter])
1005 cmd.extend(['--', file_filter])
1002
1006
1003 diff, __ = self.run_git_command(wire, cmd)
1007 diff, __ = self.run_git_command(wire, cmd)
1004 # If we used 'show' command, strip first few lines (until actual diff
1008 # If we used 'show' command, strip first few lines (until actual diff
1005 # starts)
1009 # starts)
1006 if commit_id_1 == self.EMPTY_COMMIT:
1010 if commit_id_1 == self.EMPTY_COMMIT:
1007 lines = diff.splitlines()
1011 lines = diff.splitlines()
1008 x = 0
1012 x = 0
1009 for line in lines:
1013 for line in lines:
1010 if line.startswith('diff'):
1014 if line.startswith('diff'):
1011 break
1015 break
1012 x += 1
1016 x += 1
1013 # Append new line just like 'diff' command do
1017 # Append new line just like 'diff' command do
1014 diff = '\n'.join(lines[x:]) + '\n'
1018 diff = '\n'.join(lines[x:]) + '\n'
1015 return diff
1019 return diff
1016
1020
1017 @reraise_safe_exceptions
1021 @reraise_safe_exceptions
1018 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1022 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1019 repo_init = self._factory.repo_libgit2(wire)
1023 repo_init = self._factory.repo_libgit2(wire)
1020 with repo_init as repo:
1024 with repo_init as repo:
1021 swap = True
1025 swap = True
1022 flags = 0
1026 flags = 0
1023 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1027 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1024
1028
1025 if opt_ignorews:
1029 if opt_ignorews:
1026 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1030 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1027
1031
1028 if commit_id_1 == self.EMPTY_COMMIT:
1032 if commit_id_1 == self.EMPTY_COMMIT:
1029 comm1 = repo[commit_id_2]
1033 comm1 = repo[commit_id_2]
1030 diff_obj = comm1.tree.diff_to_tree(
1034 diff_obj = comm1.tree.diff_to_tree(
1031 flags=flags, context_lines=context, swap=swap)
1035 flags=flags, context_lines=context, swap=swap)
1032
1036
1033 else:
1037 else:
1034 comm1 = repo[commit_id_2]
1038 comm1 = repo[commit_id_2]
1035 comm2 = repo[commit_id_1]
1039 comm2 = repo[commit_id_1]
1036 diff_obj = comm1.tree.diff_to_tree(
1040 diff_obj = comm1.tree.diff_to_tree(
1037 comm2.tree, flags=flags, context_lines=context, swap=swap)
1041 comm2.tree, flags=flags, context_lines=context, swap=swap)
1038 similar_flags = 0
1042 similar_flags = 0
1039 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1043 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1040 diff_obj.find_similar(flags=similar_flags)
1044 diff_obj.find_similar(flags=similar_flags)
1041
1045
1042 if file_filter:
1046 if file_filter:
1043 for p in diff_obj:
1047 for p in diff_obj:
1044 if p.delta.old_file.path == file_filter:
1048 if p.delta.old_file.path == file_filter:
1045 return p.patch or ''
1049 return p.patch or ''
1046 # fo matching path == no diff
1050 # fo matching path == no diff
1047 return ''
1051 return ''
1048 return diff_obj.patch or ''
1052 return diff_obj.patch or ''
1049
1053
1050 @reraise_safe_exceptions
1054 @reraise_safe_exceptions
1051 def node_history(self, wire, commit_id, path, limit):
1055 def node_history(self, wire, commit_id, path, limit):
1052 cache_on, context_uid, repo_id = self._cache_on(wire)
1056 cache_on, context_uid, repo_id = self._cache_on(wire)
1053 @self.region.conditional_cache_on_arguments(condition=cache_on)
1057 @self.region.conditional_cache_on_arguments(condition=cache_on)
1054 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1058 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1055 # optimize for n==1, rev-list is much faster for that use-case
1059 # optimize for n==1, rev-list is much faster for that use-case
1056 if limit == 1:
1060 if limit == 1:
1057 cmd = ['rev-list', '-1', commit_id, '--', path]
1061 cmd = ['rev-list', '-1', commit_id, '--', path]
1058 else:
1062 else:
1059 cmd = ['log']
1063 cmd = ['log']
1060 if limit:
1064 if limit:
1061 cmd.extend(['-n', str(safe_int(limit, 0))])
1065 cmd.extend(['-n', str(safe_int(limit, 0))])
1062 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1066 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1063
1067
1064 output, __ = self.run_git_command(wire, cmd)
1068 output, __ = self.run_git_command(wire, cmd)
1065 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1069 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1066
1070
1067 return [x for x in commit_ids]
1071 return [x for x in commit_ids]
1068 return _node_history(context_uid, repo_id, commit_id, path, limit)
1072 return _node_history(context_uid, repo_id, commit_id, path, limit)
1069
1073
1070 @reraise_safe_exceptions
1074 @reraise_safe_exceptions
1071 def node_annotate(self, wire, commit_id, path):
1075 def node_annotate(self, wire, commit_id, path):
1072
1076
1073 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1077 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1074 # -l ==> outputs long shas (and we need all 40 characters)
1078 # -l ==> outputs long shas (and we need all 40 characters)
1075 # --root ==> doesn't put '^' character for boundaries
1079 # --root ==> doesn't put '^' character for boundaries
1076 # -r commit_id ==> blames for the given commit
1080 # -r commit_id ==> blames for the given commit
1077 output, __ = self.run_git_command(wire, cmd)
1081 output, __ = self.run_git_command(wire, cmd)
1078
1082
1079 result = []
1083 result = []
1080 for i, blame_line in enumerate(output.split('\n')[:-1]):
1084 for i, blame_line in enumerate(output.split('\n')[:-1]):
1081 line_no = i + 1
1085 line_no = i + 1
1082 commit_id, line = re.split(r' ', blame_line, 1)
1086 commit_id, line = re.split(r' ', blame_line, 1)
1083 result.append((line_no, commit_id, line))
1087 result.append((line_no, commit_id, line))
1084 return result
1088 return result
1085
1089
1086 @reraise_safe_exceptions
1090 @reraise_safe_exceptions
1087 def update_server_info(self, wire):
1091 def update_server_info(self, wire):
1088 repo = self._factory.repo(wire)
1092 repo = self._factory.repo(wire)
1089 update_server_info(repo)
1093 update_server_info(repo)
1090
1094
1091 @reraise_safe_exceptions
1095 @reraise_safe_exceptions
1092 def get_all_commit_ids(self, wire):
1096 def get_all_commit_ids(self, wire):
1093
1097
1094 cache_on, context_uid, repo_id = self._cache_on(wire)
1098 cache_on, context_uid, repo_id = self._cache_on(wire)
1095 @self.region.conditional_cache_on_arguments(condition=cache_on)
1099 @self.region.conditional_cache_on_arguments(condition=cache_on)
1096 def _get_all_commit_ids(_context_uid, _repo_id):
1100 def _get_all_commit_ids(_context_uid, _repo_id):
1097
1101
1098 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1102 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1099 try:
1103 try:
1100 output, __ = self.run_git_command(wire, cmd)
1104 output, __ = self.run_git_command(wire, cmd)
1101 return output.splitlines()
1105 return output.splitlines()
1102 except Exception:
1106 except Exception:
1103 # Can be raised for empty repositories
1107 # Can be raised for empty repositories
1104 return []
1108 return []
1105 return _get_all_commit_ids(context_uid, repo_id)
1109 return _get_all_commit_ids(context_uid, repo_id)
1106
1110
1107 @reraise_safe_exceptions
1111 @reraise_safe_exceptions
1108 def run_git_command(self, wire, cmd, **opts):
1112 def run_git_command(self, wire, cmd, **opts):
1109 path = wire.get('path', None)
1113 path = wire.get('path', None)
1110
1114
1111 if path and os.path.isdir(path):
1115 if path and os.path.isdir(path):
1112 opts['cwd'] = path
1116 opts['cwd'] = path
1113
1117
1114 if '_bare' in opts:
1118 if '_bare' in opts:
1115 _copts = []
1119 _copts = []
1116 del opts['_bare']
1120 del opts['_bare']
1117 else:
1121 else:
1118 _copts = ['-c', 'core.quotepath=false', ]
1122 _copts = ['-c', 'core.quotepath=false', ]
1119 safe_call = False
1123 safe_call = False
1120 if '_safe' in opts:
1124 if '_safe' in opts:
1121 # no exc on failure
1125 # no exc on failure
1122 del opts['_safe']
1126 del opts['_safe']
1123 safe_call = True
1127 safe_call = True
1124
1128
1125 if '_copts' in opts:
1129 if '_copts' in opts:
1126 _copts.extend(opts['_copts'] or [])
1130 _copts.extend(opts['_copts'] or [])
1127 del opts['_copts']
1131 del opts['_copts']
1128
1132
1129 gitenv = os.environ.copy()
1133 gitenv = os.environ.copy()
1130 gitenv.update(opts.pop('extra_env', {}))
1134 gitenv.update(opts.pop('extra_env', {}))
1131 # need to clean fix GIT_DIR !
1135 # need to clean fix GIT_DIR !
1132 if 'GIT_DIR' in gitenv:
1136 if 'GIT_DIR' in gitenv:
1133 del gitenv['GIT_DIR']
1137 del gitenv['GIT_DIR']
1134 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1138 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1135 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1139 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1136
1140
1137 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1141 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1138 _opts = {'env': gitenv, 'shell': False}
1142 _opts = {'env': gitenv, 'shell': False}
1139
1143
1140 proc = None
1144 proc = None
1141 try:
1145 try:
1142 _opts.update(opts)
1146 _opts.update(opts)
1143 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1147 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1144
1148
1145 return ''.join(proc), ''.join(proc.error)
1149 return ''.join(proc), ''.join(proc.error)
1146 except (EnvironmentError, OSError) as err:
1150 except (EnvironmentError, OSError) as err:
1147 cmd = ' '.join(cmd) # human friendly CMD
1151 cmd = ' '.join(cmd) # human friendly CMD
1148 tb_err = ("Couldn't run git command (%s).\n"
1152 tb_err = ("Couldn't run git command (%s).\n"
1149 "Original error was:%s\n"
1153 "Original error was:%s\n"
1150 "Call options:%s\n"
1154 "Call options:%s\n"
1151 % (cmd, err, _opts))
1155 % (cmd, err, _opts))
1152 log.exception(tb_err)
1156 log.exception(tb_err)
1153 if safe_call:
1157 if safe_call:
1154 return '', err
1158 return '', err
1155 else:
1159 else:
1156 raise exceptions.VcsException()(tb_err)
1160 raise exceptions.VcsException()(tb_err)
1157 finally:
1161 finally:
1158 if proc:
1162 if proc:
1159 proc.close()
1163 proc.close()
1160
1164
1161 @reraise_safe_exceptions
1165 @reraise_safe_exceptions
1162 def install_hooks(self, wire, force=False):
1166 def install_hooks(self, wire, force=False):
1163 from vcsserver.hook_utils import install_git_hooks
1167 from vcsserver.hook_utils import install_git_hooks
1164 bare = self.bare(wire)
1168 bare = self.bare(wire)
1165 path = wire['path']
1169 path = wire['path']
1166 return install_git_hooks(path, bare, force_create=force)
1170 return install_git_hooks(path, bare, force_create=force)
1167
1171
1168 @reraise_safe_exceptions
1172 @reraise_safe_exceptions
1169 def get_hooks_info(self, wire):
1173 def get_hooks_info(self, wire):
1170 from vcsserver.hook_utils import (
1174 from vcsserver.hook_utils import (
1171 get_git_pre_hook_version, get_git_post_hook_version)
1175 get_git_pre_hook_version, get_git_post_hook_version)
1172 bare = self.bare(wire)
1176 bare = self.bare(wire)
1173 path = wire['path']
1177 path = wire['path']
1174 return {
1178 return {
1175 'pre_version': get_git_pre_hook_version(path, bare),
1179 'pre_version': get_git_pre_hook_version(path, bare),
1176 'post_version': get_git_post_hook_version(path, bare),
1180 'post_version': get_git_post_hook_version(path, bare),
1177 }
1181 }
@@ -1,990 +1,1009 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase, purge
25 from hgext import largefiles, rebase, purge
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30 from mercurial import repair
30
31
31 import vcsserver
32 import vcsserver
32 from vcsserver import exceptions
33 from vcsserver import exceptions
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 from vcsserver.hgcompat import (
35 from vcsserver.hgcompat import (
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 RepoLookupError, InterventionRequired, RequirementError)
40 RepoLookupError, InterventionRequired, RequirementError)
40 from vcsserver.vcs_base import RemoteBase
41 from vcsserver.vcs_base import RemoteBase
41
42
42 log = logging.getLogger(__name__)
43 log = logging.getLogger(__name__)
43
44
44
45
45 def make_ui_from_config(repo_config):
46 def make_ui_from_config(repo_config):
46
47
47 class LoggingUI(ui.ui):
48 class LoggingUI(ui.ui):
48 def status(self, *msg, **opts):
49 def status(self, *msg, **opts):
49 log.info(' '.join(msg).rstrip('\n'))
50 log.info(' '.join(msg).rstrip('\n'))
50 super(LoggingUI, self).status(*msg, **opts)
51 super(LoggingUI, self).status(*msg, **opts)
51
52
52 def warn(self, *msg, **opts):
53 def warn(self, *msg, **opts):
53 log.warn(' '.join(msg).rstrip('\n'))
54 log.warn(' '.join(msg).rstrip('\n'))
54 super(LoggingUI, self).warn(*msg, **opts)
55 super(LoggingUI, self).warn(*msg, **opts)
55
56
56 def error(self, *msg, **opts):
57 def error(self, *msg, **opts):
57 log.error(' '.join(msg).rstrip('\n'))
58 log.error(' '.join(msg).rstrip('\n'))
58 super(LoggingUI, self).error(*msg, **opts)
59 super(LoggingUI, self).error(*msg, **opts)
59
60
60 def note(self, *msg, **opts):
61 def note(self, *msg, **opts):
61 log.info(' '.join(msg).rstrip('\n'))
62 log.info(' '.join(msg).rstrip('\n'))
62 super(LoggingUI, self).note(*msg, **opts)
63 super(LoggingUI, self).note(*msg, **opts)
63
64
64 def debug(self, *msg, **opts):
65 def debug(self, *msg, **opts):
65 log.debug(' '.join(msg).rstrip('\n'))
66 log.debug(' '.join(msg).rstrip('\n'))
66 super(LoggingUI, self).debug(*msg, **opts)
67 super(LoggingUI, self).debug(*msg, **opts)
67
68
68 baseui = LoggingUI()
69 baseui = LoggingUI()
69
70
70 # clean the baseui object
71 # clean the baseui object
71 baseui._ocfg = hgconfig.config()
72 baseui._ocfg = hgconfig.config()
72 baseui._ucfg = hgconfig.config()
73 baseui._ucfg = hgconfig.config()
73 baseui._tcfg = hgconfig.config()
74 baseui._tcfg = hgconfig.config()
74
75
75 for section, option, value in repo_config:
76 for section, option, value in repo_config:
76 baseui.setconfig(section, option, value)
77 baseui.setconfig(section, option, value)
77
78
78 # make our hgweb quiet so it doesn't print output
79 # make our hgweb quiet so it doesn't print output
79 baseui.setconfig('ui', 'quiet', 'true')
80 baseui.setconfig('ui', 'quiet', 'true')
80
81
81 baseui.setconfig('ui', 'paginate', 'never')
82 baseui.setconfig('ui', 'paginate', 'never')
82 # for better Error reporting of Mercurial
83 # for better Error reporting of Mercurial
83 baseui.setconfig('ui', 'message-output', 'stderr')
84 baseui.setconfig('ui', 'message-output', 'stderr')
84
85
85 # force mercurial to only use 1 thread, otherwise it may try to set a
86 # force mercurial to only use 1 thread, otherwise it may try to set a
86 # signal in a non-main thread, thus generating a ValueError.
87 # signal in a non-main thread, thus generating a ValueError.
87 baseui.setconfig('worker', 'numcpus', 1)
88 baseui.setconfig('worker', 'numcpus', 1)
88
89
89 # If there is no config for the largefiles extension, we explicitly disable
90 # If there is no config for the largefiles extension, we explicitly disable
90 # it here. This overrides settings from repositories hgrc file. Recent
91 # it here. This overrides settings from repositories hgrc file. Recent
91 # mercurial versions enable largefiles in hgrc on clone from largefile
92 # mercurial versions enable largefiles in hgrc on clone from largefile
92 # repo.
93 # repo.
93 if not baseui.hasconfig('extensions', 'largefiles'):
94 if not baseui.hasconfig('extensions', 'largefiles'):
94 log.debug('Explicitly disable largefiles extension for repo.')
95 log.debug('Explicitly disable largefiles extension for repo.')
95 baseui.setconfig('extensions', 'largefiles', '!')
96 baseui.setconfig('extensions', 'largefiles', '!')
96
97
97 return baseui
98 return baseui
98
99
99
100
100 def reraise_safe_exceptions(func):
101 def reraise_safe_exceptions(func):
101 """Decorator for converting mercurial exceptions to something neutral."""
102 """Decorator for converting mercurial exceptions to something neutral."""
102
103
103 def wrapper(*args, **kwargs):
104 def wrapper(*args, **kwargs):
104 try:
105 try:
105 return func(*args, **kwargs)
106 return func(*args, **kwargs)
106 except (Abort, InterventionRequired) as e:
107 except (Abort, InterventionRequired) as e:
107 raise_from_original(exceptions.AbortException(e))
108 raise_from_original(exceptions.AbortException(e))
108 except RepoLookupError as e:
109 except RepoLookupError as e:
109 raise_from_original(exceptions.LookupException(e))
110 raise_from_original(exceptions.LookupException(e))
110 except RequirementError as e:
111 except RequirementError as e:
111 raise_from_original(exceptions.RequirementException(e))
112 raise_from_original(exceptions.RequirementException(e))
112 except RepoError as e:
113 except RepoError as e:
113 raise_from_original(exceptions.VcsException(e))
114 raise_from_original(exceptions.VcsException(e))
114 except LookupError as e:
115 except LookupError as e:
115 raise_from_original(exceptions.LookupException(e))
116 raise_from_original(exceptions.LookupException(e))
116 except Exception as e:
117 except Exception as e:
117 if not hasattr(e, '_vcs_kind'):
118 if not hasattr(e, '_vcs_kind'):
118 log.exception("Unhandled exception in hg remote call")
119 log.exception("Unhandled exception in hg remote call")
119 raise_from_original(exceptions.UnhandledException(e))
120 raise_from_original(exceptions.UnhandledException(e))
120
121
121 raise
122 raise
122 return wrapper
123 return wrapper
123
124
124
125
125 class MercurialFactory(RepoFactory):
126 class MercurialFactory(RepoFactory):
126 repo_type = 'hg'
127 repo_type = 'hg'
127
128
128 def _create_config(self, config, hooks=True):
129 def _create_config(self, config, hooks=True):
129 if not hooks:
130 if not hooks:
130 hooks_to_clean = frozenset((
131 hooks_to_clean = frozenset((
131 'changegroup.repo_size', 'preoutgoing.pre_pull',
132 'changegroup.repo_size', 'preoutgoing.pre_pull',
132 'outgoing.pull_logger', 'prechangegroup.pre_push'))
133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
133 new_config = []
134 new_config = []
134 for section, option, value in config:
135 for section, option, value in config:
135 if section == 'hooks' and option in hooks_to_clean:
136 if section == 'hooks' and option in hooks_to_clean:
136 continue
137 continue
137 new_config.append((section, option, value))
138 new_config.append((section, option, value))
138 config = new_config
139 config = new_config
139
140
140 baseui = make_ui_from_config(config)
141 baseui = make_ui_from_config(config)
141 return baseui
142 return baseui
142
143
143 def _create_repo(self, wire, create):
144 def _create_repo(self, wire, create):
144 baseui = self._create_config(wire["config"])
145 baseui = self._create_config(wire["config"])
145 return instance(baseui, wire["path"], create)
146 return instance(baseui, wire["path"], create)
146
147
147 def repo(self, wire, create=False):
148 def repo(self, wire, create=False):
148 """
149 """
149 Get a repository instance for the given path.
150 Get a repository instance for the given path.
150 """
151 """
151 return self._create_repo(wire, create)
152 return self._create_repo(wire, create)
152
153
153
154
155 def patch_ui_message_output(baseui):
156 baseui.setconfig('ui', 'quiet', 'false')
157 output = io.BytesIO()
158
159 def write(data, **unused_kwargs):
160 output.write(data)
161
162 baseui.status = write
163 baseui.write = write
164 baseui.warn = write
165 baseui.debug = write
166
167 return baseui, output
168
169
154 class HgRemote(RemoteBase):
170 class HgRemote(RemoteBase):
155
171
156 def __init__(self, factory):
172 def __init__(self, factory):
157 self._factory = factory
173 self._factory = factory
158 self._bulk_methods = {
174 self._bulk_methods = {
159 "affected_files": self.ctx_files,
175 "affected_files": self.ctx_files,
160 "author": self.ctx_user,
176 "author": self.ctx_user,
161 "branch": self.ctx_branch,
177 "branch": self.ctx_branch,
162 "children": self.ctx_children,
178 "children": self.ctx_children,
163 "date": self.ctx_date,
179 "date": self.ctx_date,
164 "message": self.ctx_description,
180 "message": self.ctx_description,
165 "parents": self.ctx_parents,
181 "parents": self.ctx_parents,
166 "status": self.ctx_status,
182 "status": self.ctx_status,
167 "obsolete": self.ctx_obsolete,
183 "obsolete": self.ctx_obsolete,
168 "phase": self.ctx_phase,
184 "phase": self.ctx_phase,
169 "hidden": self.ctx_hidden,
185 "hidden": self.ctx_hidden,
170 "_file_paths": self.ctx_list,
186 "_file_paths": self.ctx_list,
171 }
187 }
172
188
173 def _get_ctx(self, repo, ref):
189 def _get_ctx(self, repo, ref):
174 return get_ctx(repo, ref)
190 return get_ctx(repo, ref)
175
191
176 @reraise_safe_exceptions
192 @reraise_safe_exceptions
177 def discover_hg_version(self):
193 def discover_hg_version(self):
178 from mercurial import util
194 from mercurial import util
179 return util.version()
195 return util.version()
180
196
181 @reraise_safe_exceptions
197 @reraise_safe_exceptions
182 def is_empty(self, wire):
198 def is_empty(self, wire):
183 repo = self._factory.repo(wire)
199 repo = self._factory.repo(wire)
184
200
185 try:
201 try:
186 return len(repo) == 0
202 return len(repo) == 0
187 except Exception:
203 except Exception:
188 log.exception("failed to read object_store")
204 log.exception("failed to read object_store")
189 return False
205 return False
190
206
191 @reraise_safe_exceptions
207 @reraise_safe_exceptions
192 def archive_repo(self, archive_path, mtime, file_info, kind):
208 def archive_repo(self, archive_path, mtime, file_info, kind):
193 if kind == "tgz":
209 if kind == "tgz":
194 archiver = archival.tarit(archive_path, mtime, "gz")
210 archiver = archival.tarit(archive_path, mtime, "gz")
195 elif kind == "tbz2":
211 elif kind == "tbz2":
196 archiver = archival.tarit(archive_path, mtime, "bz2")
212 archiver = archival.tarit(archive_path, mtime, "bz2")
197 elif kind == 'zip':
213 elif kind == 'zip':
198 archiver = archival.zipit(archive_path, mtime)
214 archiver = archival.zipit(archive_path, mtime)
199 else:
215 else:
200 raise exceptions.ArchiveException()(
216 raise exceptions.ArchiveException()(
201 'Remote does not support: "%s".' % kind)
217 'Remote does not support: "%s".' % kind)
202
218
203 for f_path, f_mode, f_is_link, f_content in file_info:
219 for f_path, f_mode, f_is_link, f_content in file_info:
204 archiver.addfile(f_path, f_mode, f_is_link, f_content)
220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
205 archiver.done()
221 archiver.done()
206
222
207 @reraise_safe_exceptions
223 @reraise_safe_exceptions
208 def bookmarks(self, wire):
224 def bookmarks(self, wire):
209 cache_on, context_uid, repo_id = self._cache_on(wire)
225 cache_on, context_uid, repo_id = self._cache_on(wire)
210 @self.region.conditional_cache_on_arguments(condition=cache_on)
226 @self.region.conditional_cache_on_arguments(condition=cache_on)
211 def _bookmarks(_context_uid, _repo_id):
227 def _bookmarks(_context_uid, _repo_id):
212 repo = self._factory.repo(wire)
228 repo = self._factory.repo(wire)
213 return dict(repo._bookmarks)
229 return dict(repo._bookmarks)
214
230
215 return _bookmarks(context_uid, repo_id)
231 return _bookmarks(context_uid, repo_id)
216
232
217 @reraise_safe_exceptions
233 @reraise_safe_exceptions
218 def branches(self, wire, normal, closed):
234 def branches(self, wire, normal, closed):
219 cache_on, context_uid, repo_id = self._cache_on(wire)
235 cache_on, context_uid, repo_id = self._cache_on(wire)
220 @self.region.conditional_cache_on_arguments(condition=cache_on)
236 @self.region.conditional_cache_on_arguments(condition=cache_on)
221 def _branches(_context_uid, _repo_id, _normal, _closed):
237 def _branches(_context_uid, _repo_id, _normal, _closed):
222 repo = self._factory.repo(wire)
238 repo = self._factory.repo(wire)
223 iter_branches = repo.branchmap().iterbranches()
239 iter_branches = repo.branchmap().iterbranches()
224 bt = {}
240 bt = {}
225 for branch_name, _heads, tip, is_closed in iter_branches:
241 for branch_name, _heads, tip, is_closed in iter_branches:
226 if normal and not is_closed:
242 if normal and not is_closed:
227 bt[branch_name] = tip
243 bt[branch_name] = tip
228 if closed and is_closed:
244 if closed and is_closed:
229 bt[branch_name] = tip
245 bt[branch_name] = tip
230
246
231 return bt
247 return bt
232
248
233 return _branches(context_uid, repo_id, normal, closed)
249 return _branches(context_uid, repo_id, normal, closed)
234
250
235 @reraise_safe_exceptions
251 @reraise_safe_exceptions
236 def bulk_request(self, wire, commit_id, pre_load):
252 def bulk_request(self, wire, commit_id, pre_load):
237 cache_on, context_uid, repo_id = self._cache_on(wire)
253 cache_on, context_uid, repo_id = self._cache_on(wire)
238 @self.region.conditional_cache_on_arguments(condition=cache_on)
254 @self.region.conditional_cache_on_arguments(condition=cache_on)
239 def _bulk_request(_repo_id, _commit_id, _pre_load):
255 def _bulk_request(_repo_id, _commit_id, _pre_load):
240 result = {}
256 result = {}
241 for attr in pre_load:
257 for attr in pre_load:
242 try:
258 try:
243 method = self._bulk_methods[attr]
259 method = self._bulk_methods[attr]
244 result[attr] = method(wire, commit_id)
260 result[attr] = method(wire, commit_id)
245 except KeyError as e:
261 except KeyError as e:
246 raise exceptions.VcsException(e)(
262 raise exceptions.VcsException(e)(
247 'Unknown bulk attribute: "%s"' % attr)
263 'Unknown bulk attribute: "%s"' % attr)
248 return result
264 return result
249
265
250 return _bulk_request(repo_id, commit_id, sorted(pre_load))
266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
251
267
252 @reraise_safe_exceptions
268 @reraise_safe_exceptions
253 def ctx_branch(self, wire, commit_id):
269 def ctx_branch(self, wire, commit_id):
254 cache_on, context_uid, repo_id = self._cache_on(wire)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
255 @self.region.conditional_cache_on_arguments(condition=cache_on)
271 @self.region.conditional_cache_on_arguments(condition=cache_on)
256 def _ctx_branch(_repo_id, _commit_id):
272 def _ctx_branch(_repo_id, _commit_id):
257 repo = self._factory.repo(wire)
273 repo = self._factory.repo(wire)
258 ctx = self._get_ctx(repo, commit_id)
274 ctx = self._get_ctx(repo, commit_id)
259 return ctx.branch()
275 return ctx.branch()
260 return _ctx_branch(repo_id, commit_id)
276 return _ctx_branch(repo_id, commit_id)
261
277
262 @reraise_safe_exceptions
278 @reraise_safe_exceptions
263 def ctx_date(self, wire, commit_id):
279 def ctx_date(self, wire, commit_id):
264 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
265 @self.region.conditional_cache_on_arguments(condition=cache_on)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
266 def _ctx_date(_repo_id, _commit_id):
282 def _ctx_date(_repo_id, _commit_id):
267 repo = self._factory.repo(wire)
283 repo = self._factory.repo(wire)
268 ctx = self._get_ctx(repo, commit_id)
284 ctx = self._get_ctx(repo, commit_id)
269 return ctx.date()
285 return ctx.date()
270 return _ctx_date(repo_id, commit_id)
286 return _ctx_date(repo_id, commit_id)
271
287
272 @reraise_safe_exceptions
288 @reraise_safe_exceptions
273 def ctx_description(self, wire, revision):
289 def ctx_description(self, wire, revision):
274 repo = self._factory.repo(wire)
290 repo = self._factory.repo(wire)
275 ctx = self._get_ctx(repo, revision)
291 ctx = self._get_ctx(repo, revision)
276 return ctx.description()
292 return ctx.description()
277
293
278 @reraise_safe_exceptions
294 @reraise_safe_exceptions
279 def ctx_files(self, wire, commit_id):
295 def ctx_files(self, wire, commit_id):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
296 cache_on, context_uid, repo_id = self._cache_on(wire)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
297 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 def _ctx_files(_repo_id, _commit_id):
298 def _ctx_files(_repo_id, _commit_id):
283 repo = self._factory.repo(wire)
299 repo = self._factory.repo(wire)
284 ctx = self._get_ctx(repo, commit_id)
300 ctx = self._get_ctx(repo, commit_id)
285 return ctx.files()
301 return ctx.files()
286
302
287 return _ctx_files(repo_id, commit_id)
303 return _ctx_files(repo_id, commit_id)
288
304
289 @reraise_safe_exceptions
305 @reraise_safe_exceptions
290 def ctx_list(self, path, revision):
306 def ctx_list(self, path, revision):
291 repo = self._factory.repo(path)
307 repo = self._factory.repo(path)
292 ctx = self._get_ctx(repo, revision)
308 ctx = self._get_ctx(repo, revision)
293 return list(ctx)
309 return list(ctx)
294
310
295 @reraise_safe_exceptions
311 @reraise_safe_exceptions
296 def ctx_parents(self, wire, commit_id):
312 def ctx_parents(self, wire, commit_id):
297 cache_on, context_uid, repo_id = self._cache_on(wire)
313 cache_on, context_uid, repo_id = self._cache_on(wire)
298 @self.region.conditional_cache_on_arguments(condition=cache_on)
314 @self.region.conditional_cache_on_arguments(condition=cache_on)
299 def _ctx_parents(_repo_id, _commit_id):
315 def _ctx_parents(_repo_id, _commit_id):
300 repo = self._factory.repo(wire)
316 repo = self._factory.repo(wire)
301 ctx = self._get_ctx(repo, commit_id)
317 ctx = self._get_ctx(repo, commit_id)
302 return [parent.hex() for parent in ctx.parents()
318 return [parent.hex() for parent in ctx.parents()
303 if not (parent.hidden() or parent.obsolete())]
319 if not (parent.hidden() or parent.obsolete())]
304
320
305 return _ctx_parents(repo_id, commit_id)
321 return _ctx_parents(repo_id, commit_id)
306
322
307 @reraise_safe_exceptions
323 @reraise_safe_exceptions
308 def ctx_children(self, wire, commit_id):
324 def ctx_children(self, wire, commit_id):
309 cache_on, context_uid, repo_id = self._cache_on(wire)
325 cache_on, context_uid, repo_id = self._cache_on(wire)
310 @self.region.conditional_cache_on_arguments(condition=cache_on)
326 @self.region.conditional_cache_on_arguments(condition=cache_on)
311 def _ctx_children(_repo_id, _commit_id):
327 def _ctx_children(_repo_id, _commit_id):
312 repo = self._factory.repo(wire)
328 repo = self._factory.repo(wire)
313 ctx = self._get_ctx(repo, commit_id)
329 ctx = self._get_ctx(repo, commit_id)
314 return [child.hex() for child in ctx.children()
330 return [child.hex() for child in ctx.children()
315 if not (child.hidden() or child.obsolete())]
331 if not (child.hidden() or child.obsolete())]
316
332
317 return _ctx_children(repo_id, commit_id)
333 return _ctx_children(repo_id, commit_id)
318
334
319 @reraise_safe_exceptions
335 @reraise_safe_exceptions
320 def ctx_phase(self, wire, commit_id):
336 def ctx_phase(self, wire, commit_id):
321 cache_on, context_uid, repo_id = self._cache_on(wire)
337 cache_on, context_uid, repo_id = self._cache_on(wire)
322 @self.region.conditional_cache_on_arguments(condition=cache_on)
338 @self.region.conditional_cache_on_arguments(condition=cache_on)
323 def _ctx_phase(_context_uid, _repo_id, _commit_id):
339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
324 repo = self._factory.repo(wire)
340 repo = self._factory.repo(wire)
325 ctx = self._get_ctx(repo, commit_id)
341 ctx = self._get_ctx(repo, commit_id)
326 # public=0, draft=1, secret=3
342 # public=0, draft=1, secret=3
327 return ctx.phase()
343 return ctx.phase()
328 return _ctx_phase(context_uid, repo_id, commit_id)
344 return _ctx_phase(context_uid, repo_id, commit_id)
329
345
330 @reraise_safe_exceptions
346 @reraise_safe_exceptions
331 def ctx_obsolete(self, wire, commit_id):
347 def ctx_obsolete(self, wire, commit_id):
332 cache_on, context_uid, repo_id = self._cache_on(wire)
348 cache_on, context_uid, repo_id = self._cache_on(wire)
333 @self.region.conditional_cache_on_arguments(condition=cache_on)
349 @self.region.conditional_cache_on_arguments(condition=cache_on)
334 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
335 repo = self._factory.repo(wire)
351 repo = self._factory.repo(wire)
336 ctx = self._get_ctx(repo, commit_id)
352 ctx = self._get_ctx(repo, commit_id)
337 return ctx.obsolete()
353 return ctx.obsolete()
338 return _ctx_obsolete(context_uid, repo_id, commit_id)
354 return _ctx_obsolete(context_uid, repo_id, commit_id)
339
355
340 @reraise_safe_exceptions
356 @reraise_safe_exceptions
341 def ctx_hidden(self, wire, commit_id):
357 def ctx_hidden(self, wire, commit_id):
342 cache_on, context_uid, repo_id = self._cache_on(wire)
358 cache_on, context_uid, repo_id = self._cache_on(wire)
343 @self.region.conditional_cache_on_arguments(condition=cache_on)
359 @self.region.conditional_cache_on_arguments(condition=cache_on)
344 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
345 repo = self._factory.repo(wire)
361 repo = self._factory.repo(wire)
346 ctx = self._get_ctx(repo, commit_id)
362 ctx = self._get_ctx(repo, commit_id)
347 return ctx.hidden()
363 return ctx.hidden()
348 return _ctx_hidden(context_uid, repo_id, commit_id)
364 return _ctx_hidden(context_uid, repo_id, commit_id)
349
365
350 @reraise_safe_exceptions
366 @reraise_safe_exceptions
351 def ctx_substate(self, wire, revision):
367 def ctx_substate(self, wire, revision):
352 repo = self._factory.repo(wire)
368 repo = self._factory.repo(wire)
353 ctx = self._get_ctx(repo, revision)
369 ctx = self._get_ctx(repo, revision)
354 return ctx.substate
370 return ctx.substate
355
371
356 @reraise_safe_exceptions
372 @reraise_safe_exceptions
357 def ctx_status(self, wire, revision):
373 def ctx_status(self, wire, revision):
358 repo = self._factory.repo(wire)
374 repo = self._factory.repo(wire)
359 ctx = self._get_ctx(repo, revision)
375 ctx = self._get_ctx(repo, revision)
360 status = repo[ctx.p1().node()].status(other=ctx.node())
376 status = repo[ctx.p1().node()].status(other=ctx.node())
361 # object of status (odd, custom named tuple in mercurial) is not
377 # object of status (odd, custom named tuple in mercurial) is not
362 # correctly serializable, we make it a list, as the underling
378 # correctly serializable, we make it a list, as the underling
363 # API expects this to be a list
379 # API expects this to be a list
364 return list(status)
380 return list(status)
365
381
366 @reraise_safe_exceptions
382 @reraise_safe_exceptions
367 def ctx_user(self, wire, revision):
383 def ctx_user(self, wire, revision):
368 repo = self._factory.repo(wire)
384 repo = self._factory.repo(wire)
369 ctx = self._get_ctx(repo, revision)
385 ctx = self._get_ctx(repo, revision)
370 return ctx.user()
386 return ctx.user()
371
387
372 @reraise_safe_exceptions
388 @reraise_safe_exceptions
373 def check_url(self, url, config):
389 def check_url(self, url, config):
374 _proto = None
390 _proto = None
375 if '+' in url[:url.find('://')]:
391 if '+' in url[:url.find('://')]:
376 _proto = url[0:url.find('+')]
392 _proto = url[0:url.find('+')]
377 url = url[url.find('+') + 1:]
393 url = url[url.find('+') + 1:]
378 handlers = []
394 handlers = []
379 url_obj = url_parser(url)
395 url_obj = url_parser(url)
380 test_uri, authinfo = url_obj.authinfo()
396 test_uri, authinfo = url_obj.authinfo()
381 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
382 url_obj.query = obfuscate_qs(url_obj.query)
398 url_obj.query = obfuscate_qs(url_obj.query)
383
399
384 cleaned_uri = str(url_obj)
400 cleaned_uri = str(url_obj)
385 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
386
402
387 if authinfo:
403 if authinfo:
388 # create a password manager
404 # create a password manager
389 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
405 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
390 passmgr.add_password(*authinfo)
406 passmgr.add_password(*authinfo)
391
407
392 handlers.extend((httpbasicauthhandler(passmgr),
408 handlers.extend((httpbasicauthhandler(passmgr),
393 httpdigestauthhandler(passmgr)))
409 httpdigestauthhandler(passmgr)))
394
410
395 o = urllib2.build_opener(*handlers)
411 o = urllib2.build_opener(*handlers)
396 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
397 ('Accept', 'application/mercurial-0.1')]
413 ('Accept', 'application/mercurial-0.1')]
398
414
399 q = {"cmd": 'between'}
415 q = {"cmd": 'between'}
400 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
401 qs = '?%s' % urllib.urlencode(q)
417 qs = '?%s' % urllib.urlencode(q)
402 cu = "%s%s" % (test_uri, qs)
418 cu = "%s%s" % (test_uri, qs)
403 req = urllib2.Request(cu, None, {})
419 req = urllib2.Request(cu, None, {})
404
420
405 try:
421 try:
406 log.debug("Trying to open URL %s", cleaned_uri)
422 log.debug("Trying to open URL %s", cleaned_uri)
407 resp = o.open(req)
423 resp = o.open(req)
408 if resp.code != 200:
424 if resp.code != 200:
409 raise exceptions.URLError()('Return Code is not 200')
425 raise exceptions.URLError()('Return Code is not 200')
410 except Exception as e:
426 except Exception as e:
411 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
412 # means it cannot be cloned
428 # means it cannot be cloned
413 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
414
430
415 # now check if it's a proper hg repo, but don't do it for svn
431 # now check if it's a proper hg repo, but don't do it for svn
416 try:
432 try:
417 if _proto == 'svn':
433 if _proto == 'svn':
418 pass
434 pass
419 else:
435 else:
420 # check for pure hg repos
436 # check for pure hg repos
421 log.debug(
437 log.debug(
422 "Verifying if URL is a Mercurial repository: %s",
438 "Verifying if URL is a Mercurial repository: %s",
423 cleaned_uri)
439 cleaned_uri)
424 ui = make_ui_from_config(config)
440 ui = make_ui_from_config(config)
425 peer_checker = makepeer(ui, url)
441 peer_checker = makepeer(ui, url)
426 peer_checker.lookup('tip')
442 peer_checker.lookup('tip')
427 except Exception as e:
443 except Exception as e:
428 log.warning("URL is not a valid Mercurial repository: %s",
444 log.warning("URL is not a valid Mercurial repository: %s",
429 cleaned_uri)
445 cleaned_uri)
430 raise exceptions.URLError(e)(
446 raise exceptions.URLError(e)(
431 "url [%s] does not look like an hg repo org_exc: %s"
447 "url [%s] does not look like an hg repo org_exc: %s"
432 % (cleaned_uri, e))
448 % (cleaned_uri, e))
433
449
434 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
435 return True
451 return True
436
452
437 @reraise_safe_exceptions
453 @reraise_safe_exceptions
438 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
439 repo = self._factory.repo(wire)
455 repo = self._factory.repo(wire)
440
456
441 if file_filter:
457 if file_filter:
442 match_filter = match(file_filter[0], '', [file_filter[1]])
458 match_filter = match(file_filter[0], '', [file_filter[1]])
443 else:
459 else:
444 match_filter = file_filter
460 match_filter = file_filter
445 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
446
462
447 try:
463 try:
448 return "".join(patch.diff(
464 return "".join(patch.diff(
449 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
450 except RepoLookupError as e:
466 except RepoLookupError as e:
451 raise exceptions.LookupException(e)()
467 raise exceptions.LookupException(e)()
452
468
453 @reraise_safe_exceptions
469 @reraise_safe_exceptions
454 def node_history(self, wire, revision, path, limit):
470 def node_history(self, wire, revision, path, limit):
455 cache_on, context_uid, repo_id = self._cache_on(wire)
471 cache_on, context_uid, repo_id = self._cache_on(wire)
456 @self.region.conditional_cache_on_arguments(condition=cache_on)
472 @self.region.conditional_cache_on_arguments(condition=cache_on)
457 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
458 repo = self._factory.repo(wire)
474 repo = self._factory.repo(wire)
459
475
460 ctx = self._get_ctx(repo, revision)
476 ctx = self._get_ctx(repo, revision)
461 fctx = ctx.filectx(path)
477 fctx = ctx.filectx(path)
462
478
463 def history_iter():
479 def history_iter():
464 limit_rev = fctx.rev()
480 limit_rev = fctx.rev()
465 for obj in reversed(list(fctx.filelog())):
481 for obj in reversed(list(fctx.filelog())):
466 obj = fctx.filectx(obj)
482 obj = fctx.filectx(obj)
467 ctx = obj.changectx()
483 ctx = obj.changectx()
468 if ctx.hidden() or ctx.obsolete():
484 if ctx.hidden() or ctx.obsolete():
469 continue
485 continue
470
486
471 if limit_rev >= obj.rev():
487 if limit_rev >= obj.rev():
472 yield obj
488 yield obj
473
489
474 history = []
490 history = []
475 for cnt, obj in enumerate(history_iter()):
491 for cnt, obj in enumerate(history_iter()):
476 if limit and cnt >= limit:
492 if limit and cnt >= limit:
477 break
493 break
478 history.append(hex(obj.node()))
494 history.append(hex(obj.node()))
479
495
480 return [x for x in history]
496 return [x for x in history]
481 return _node_history(context_uid, repo_id, revision, path, limit)
497 return _node_history(context_uid, repo_id, revision, path, limit)
482
498
483 @reraise_safe_exceptions
499 @reraise_safe_exceptions
484 def node_history_untill(self, wire, revision, path, limit):
500 def node_history_untill(self, wire, revision, path, limit):
485 cache_on, context_uid, repo_id = self._cache_on(wire)
501 cache_on, context_uid, repo_id = self._cache_on(wire)
486 @self.region.conditional_cache_on_arguments(condition=cache_on)
502 @self.region.conditional_cache_on_arguments(condition=cache_on)
487 def _node_history_until(_context_uid, _repo_id):
503 def _node_history_until(_context_uid, _repo_id):
488 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
489 ctx = self._get_ctx(repo, revision)
505 ctx = self._get_ctx(repo, revision)
490 fctx = ctx.filectx(path)
506 fctx = ctx.filectx(path)
491
507
492 file_log = list(fctx.filelog())
508 file_log = list(fctx.filelog())
493 if limit:
509 if limit:
494 # Limit to the last n items
510 # Limit to the last n items
495 file_log = file_log[-limit:]
511 file_log = file_log[-limit:]
496
512
497 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
498 return _node_history_until(context_uid, repo_id, revision, path, limit)
514 return _node_history_until(context_uid, repo_id, revision, path, limit)
499
515
500 @reraise_safe_exceptions
516 @reraise_safe_exceptions
501 def fctx_annotate(self, wire, revision, path):
517 def fctx_annotate(self, wire, revision, path):
502 repo = self._factory.repo(wire)
518 repo = self._factory.repo(wire)
503 ctx = self._get_ctx(repo, revision)
519 ctx = self._get_ctx(repo, revision)
504 fctx = ctx.filectx(path)
520 fctx = ctx.filectx(path)
505
521
506 result = []
522 result = []
507 for i, annotate_obj in enumerate(fctx.annotate(), 1):
523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
508 ln_no = i
524 ln_no = i
509 sha = hex(annotate_obj.fctx.node())
525 sha = hex(annotate_obj.fctx.node())
510 content = annotate_obj.text
526 content = annotate_obj.text
511 result.append((ln_no, sha, content))
527 result.append((ln_no, sha, content))
512 return result
528 return result
513
529
514 @reraise_safe_exceptions
530 @reraise_safe_exceptions
515 def fctx_node_data(self, wire, revision, path):
531 def fctx_node_data(self, wire, revision, path):
516 repo = self._factory.repo(wire)
532 repo = self._factory.repo(wire)
517 ctx = self._get_ctx(repo, revision)
533 ctx = self._get_ctx(repo, revision)
518 fctx = ctx.filectx(path)
534 fctx = ctx.filectx(path)
519 return fctx.data()
535 return fctx.data()
520
536
521 @reraise_safe_exceptions
537 @reraise_safe_exceptions
522 def fctx_flags(self, wire, commit_id, path):
538 def fctx_flags(self, wire, commit_id, path):
523 cache_on, context_uid, repo_id = self._cache_on(wire)
539 cache_on, context_uid, repo_id = self._cache_on(wire)
524 @self.region.conditional_cache_on_arguments(condition=cache_on)
540 @self.region.conditional_cache_on_arguments(condition=cache_on)
525 def _fctx_flags(_repo_id, _commit_id, _path):
541 def _fctx_flags(_repo_id, _commit_id, _path):
526 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
527 ctx = self._get_ctx(repo, commit_id)
543 ctx = self._get_ctx(repo, commit_id)
528 fctx = ctx.filectx(path)
544 fctx = ctx.filectx(path)
529 return fctx.flags()
545 return fctx.flags()
530
546
531 return _fctx_flags(repo_id, commit_id, path)
547 return _fctx_flags(repo_id, commit_id, path)
532
548
533 @reraise_safe_exceptions
549 @reraise_safe_exceptions
534 def fctx_size(self, wire, commit_id, path):
550 def fctx_size(self, wire, commit_id, path):
535 cache_on, context_uid, repo_id = self._cache_on(wire)
551 cache_on, context_uid, repo_id = self._cache_on(wire)
536 @self.region.conditional_cache_on_arguments(condition=cache_on)
552 @self.region.conditional_cache_on_arguments(condition=cache_on)
537 def _fctx_size(_repo_id, _revision, _path):
553 def _fctx_size(_repo_id, _revision, _path):
538 repo = self._factory.repo(wire)
554 repo = self._factory.repo(wire)
539 ctx = self._get_ctx(repo, commit_id)
555 ctx = self._get_ctx(repo, commit_id)
540 fctx = ctx.filectx(path)
556 fctx = ctx.filectx(path)
541 return fctx.size()
557 return fctx.size()
542 return _fctx_size(repo_id, commit_id, path)
558 return _fctx_size(repo_id, commit_id, path)
543
559
544 @reraise_safe_exceptions
560 @reraise_safe_exceptions
545 def get_all_commit_ids(self, wire, name):
561 def get_all_commit_ids(self, wire, name):
546 cache_on, context_uid, repo_id = self._cache_on(wire)
562 cache_on, context_uid, repo_id = self._cache_on(wire)
547 @self.region.conditional_cache_on_arguments(condition=cache_on)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
548 def _get_all_commit_ids(_context_uid, _repo_id, _name):
564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
549 repo = self._factory.repo(wire)
565 repo = self._factory.repo(wire)
550 repo = repo.filtered(name)
566 repo = repo.filtered(name)
551 revs = map(lambda x: hex(x[7]), repo.changelog.index)
567 revs = map(lambda x: hex(x[7]), repo.changelog.index)
552 return revs
568 return revs
553 return _get_all_commit_ids(context_uid, repo_id, name)
569 return _get_all_commit_ids(context_uid, repo_id, name)
554
570
555 @reraise_safe_exceptions
571 @reraise_safe_exceptions
556 def get_config_value(self, wire, section, name, untrusted=False):
572 def get_config_value(self, wire, section, name, untrusted=False):
557 repo = self._factory.repo(wire)
573 repo = self._factory.repo(wire)
558 return repo.ui.config(section, name, untrusted=untrusted)
574 return repo.ui.config(section, name, untrusted=untrusted)
559
575
560 @reraise_safe_exceptions
576 @reraise_safe_exceptions
561 def is_large_file(self, wire, commit_id, path):
577 def is_large_file(self, wire, commit_id, path):
562 cache_on, context_uid, repo_id = self._cache_on(wire)
578 cache_on, context_uid, repo_id = self._cache_on(wire)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
579 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
565 return largefiles.lfutil.isstandin(path)
581 return largefiles.lfutil.isstandin(path)
566
582
567 return _is_large_file(context_uid, repo_id, commit_id, path)
583 return _is_large_file(context_uid, repo_id, commit_id, path)
568
584
569 @reraise_safe_exceptions
585 @reraise_safe_exceptions
570 def is_binary(self, wire, revision, path):
586 def is_binary(self, wire, revision, path):
571 cache_on, context_uid, repo_id = self._cache_on(wire)
587 cache_on, context_uid, repo_id = self._cache_on(wire)
572
588
573 @self.region.conditional_cache_on_arguments(condition=cache_on)
589 @self.region.conditional_cache_on_arguments(condition=cache_on)
574 def _is_binary(_repo_id, _sha, _path):
590 def _is_binary(_repo_id, _sha, _path):
575 repo = self._factory.repo(wire)
591 repo = self._factory.repo(wire)
576 ctx = self._get_ctx(repo, revision)
592 ctx = self._get_ctx(repo, revision)
577 fctx = ctx.filectx(path)
593 fctx = ctx.filectx(path)
578 return fctx.isbinary()
594 return fctx.isbinary()
579
595
580 return _is_binary(repo_id, revision, path)
596 return _is_binary(repo_id, revision, path)
581
597
582 @reraise_safe_exceptions
598 @reraise_safe_exceptions
583 def in_largefiles_store(self, wire, sha):
599 def in_largefiles_store(self, wire, sha):
584 repo = self._factory.repo(wire)
600 repo = self._factory.repo(wire)
585 return largefiles.lfutil.instore(repo, sha)
601 return largefiles.lfutil.instore(repo, sha)
586
602
587 @reraise_safe_exceptions
603 @reraise_safe_exceptions
588 def in_user_cache(self, wire, sha):
604 def in_user_cache(self, wire, sha):
589 repo = self._factory.repo(wire)
605 repo = self._factory.repo(wire)
590 return largefiles.lfutil.inusercache(repo.ui, sha)
606 return largefiles.lfutil.inusercache(repo.ui, sha)
591
607
592 @reraise_safe_exceptions
608 @reraise_safe_exceptions
593 def store_path(self, wire, sha):
609 def store_path(self, wire, sha):
594 repo = self._factory.repo(wire)
610 repo = self._factory.repo(wire)
595 return largefiles.lfutil.storepath(repo, sha)
611 return largefiles.lfutil.storepath(repo, sha)
596
612
597 @reraise_safe_exceptions
613 @reraise_safe_exceptions
598 def link(self, wire, sha, path):
614 def link(self, wire, sha, path):
599 repo = self._factory.repo(wire)
615 repo = self._factory.repo(wire)
600 largefiles.lfutil.link(
616 largefiles.lfutil.link(
601 largefiles.lfutil.usercachepath(repo.ui, sha), path)
617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
602
618
603 @reraise_safe_exceptions
619 @reraise_safe_exceptions
604 def localrepository(self, wire, create=False):
620 def localrepository(self, wire, create=False):
605 self._factory.repo(wire, create=create)
621 self._factory.repo(wire, create=create)
606
622
607 @reraise_safe_exceptions
623 @reraise_safe_exceptions
608 def lookup(self, wire, revision, both):
624 def lookup(self, wire, revision, both):
609 cache_on, context_uid, repo_id = self._cache_on(wire)
625 cache_on, context_uid, repo_id = self._cache_on(wire)
610 @self.region.conditional_cache_on_arguments(condition=cache_on)
626 @self.region.conditional_cache_on_arguments(condition=cache_on)
611 def _lookup(_context_uid, _repo_id, _revision, _both):
627 def _lookup(_context_uid, _repo_id, _revision, _both):
612
628
613 repo = self._factory.repo(wire)
629 repo = self._factory.repo(wire)
614 rev = _revision
630 rev = _revision
615 if isinstance(rev, int):
631 if isinstance(rev, int):
616 # NOTE(marcink):
632 # NOTE(marcink):
617 # since Mercurial doesn't support negative indexes properly
633 # since Mercurial doesn't support negative indexes properly
618 # we need to shift accordingly by one to get proper index, e.g
634 # we need to shift accordingly by one to get proper index, e.g
619 # repo[-1] => repo[-2]
635 # repo[-1] => repo[-2]
620 # repo[0] => repo[-1]
636 # repo[0] => repo[-1]
621 if rev <= 0:
637 if rev <= 0:
622 rev = rev + -1
638 rev = rev + -1
623 try:
639 try:
624 ctx = self._get_ctx(repo, rev)
640 ctx = self._get_ctx(repo, rev)
625 except (TypeError, RepoLookupError) as e:
641 except (TypeError, RepoLookupError) as e:
626 e._org_exc_tb = traceback.format_exc()
642 e._org_exc_tb = traceback.format_exc()
627 raise exceptions.LookupException(e)(rev)
643 raise exceptions.LookupException(e)(rev)
628 except LookupError as e:
644 except LookupError as e:
629 e._org_exc_tb = traceback.format_exc()
645 e._org_exc_tb = traceback.format_exc()
630 raise exceptions.LookupException(e)(e.name)
646 raise exceptions.LookupException(e)(e.name)
631
647
632 if not both:
648 if not both:
633 return ctx.hex()
649 return ctx.hex()
634
650
635 ctx = repo[ctx.hex()]
651 ctx = repo[ctx.hex()]
636 return ctx.hex(), ctx.rev()
652 return ctx.hex(), ctx.rev()
637
653
638 return _lookup(context_uid, repo_id, revision, both)
654 return _lookup(context_uid, repo_id, revision, both)
639
655
640 @reraise_safe_exceptions
656 @reraise_safe_exceptions
641 def sync_push(self, wire, url):
657 def sync_push(self, wire, url):
642 if not self.check_url(url, wire['config']):
658 if not self.check_url(url, wire['config']):
643 return
659 return
644
660
645 repo = self._factory.repo(wire)
661 repo = self._factory.repo(wire)
646
662
647 # Disable any prompts for this repo
663 # Disable any prompts for this repo
648 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
649
665
650 bookmarks = dict(repo._bookmarks).keys()
666 bookmarks = dict(repo._bookmarks).keys()
651 remote = peer(repo, {}, url)
667 remote = peer(repo, {}, url)
652 # Disable any prompts for this remote
668 # Disable any prompts for this remote
653 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
654
670
655 return exchange.push(
671 return exchange.push(
656 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
657
673
658 @reraise_safe_exceptions
674 @reraise_safe_exceptions
659 def revision(self, wire, rev):
675 def revision(self, wire, rev):
660 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
661 ctx = self._get_ctx(repo, rev)
677 ctx = self._get_ctx(repo, rev)
662 return ctx.rev()
678 return ctx.rev()
663
679
664 @reraise_safe_exceptions
680 @reraise_safe_exceptions
665 def rev_range(self, wire, commit_filter):
681 def rev_range(self, wire, commit_filter):
666 cache_on, context_uid, repo_id = self._cache_on(wire)
682 cache_on, context_uid, repo_id = self._cache_on(wire)
667
683
668 @self.region.conditional_cache_on_arguments(condition=cache_on)
684 @self.region.conditional_cache_on_arguments(condition=cache_on)
669 def _rev_range(_context_uid, _repo_id, _filter):
685 def _rev_range(_context_uid, _repo_id, _filter):
670 repo = self._factory.repo(wire)
686 repo = self._factory.repo(wire)
671 revisions = [rev for rev in revrange(repo, commit_filter)]
687 revisions = [rev for rev in revrange(repo, commit_filter)]
672 return revisions
688 return revisions
673
689
674 return _rev_range(context_uid, repo_id, sorted(commit_filter))
690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
675
691
676 @reraise_safe_exceptions
692 @reraise_safe_exceptions
677 def rev_range_hash(self, wire, node):
693 def rev_range_hash(self, wire, node):
678 repo = self._factory.repo(wire)
694 repo = self._factory.repo(wire)
679
695
680 def get_revs(repo, rev_opt):
696 def get_revs(repo, rev_opt):
681 if rev_opt:
697 if rev_opt:
682 revs = revrange(repo, rev_opt)
698 revs = revrange(repo, rev_opt)
683 if len(revs) == 0:
699 if len(revs) == 0:
684 return (nullrev, nullrev)
700 return (nullrev, nullrev)
685 return max(revs), min(revs)
701 return max(revs), min(revs)
686 else:
702 else:
687 return len(repo) - 1, 0
703 return len(repo) - 1, 0
688
704
689 stop, start = get_revs(repo, [node + ':'])
705 stop, start = get_revs(repo, [node + ':'])
690 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
706 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
691 return revs
707 return revs
692
708
693 @reraise_safe_exceptions
709 @reraise_safe_exceptions
694 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
695 other_path = kwargs.pop('other_path', None)
711 other_path = kwargs.pop('other_path', None)
696
712
697 # case when we want to compare two independent repositories
713 # case when we want to compare two independent repositories
698 if other_path and other_path != wire["path"]:
714 if other_path and other_path != wire["path"]:
699 baseui = self._factory._create_config(wire["config"])
715 baseui = self._factory._create_config(wire["config"])
700 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
701 else:
717 else:
702 repo = self._factory.repo(wire)
718 repo = self._factory.repo(wire)
703 return list(repo.revs(rev_spec, *args))
719 return list(repo.revs(rev_spec, *args))
704
720
705 @reraise_safe_exceptions
721 @reraise_safe_exceptions
706 def verify(self, wire,):
722 def verify(self, wire,):
707 repo = self._factory.repo(wire)
723 repo = self._factory.repo(wire)
708 baseui = self._factory._create_config(wire['config'])
724 baseui = self._factory._create_config(wire['config'])
709 baseui.setconfig('ui', 'quiet', 'false')
710 output = io.BytesIO()
711
725
712 def write(data, **unused_kwargs):
726 baseui, output = patch_ui_message_output(baseui)
713 output.write(data)
714 baseui.write = write
715
727
716 repo.ui = baseui
728 repo.ui = baseui
717 verify.verify(repo)
729 verify.verify(repo)
718 return output.getvalue()
730 return output.getvalue()
719
731
720 @reraise_safe_exceptions
732 @reraise_safe_exceptions
721 def hg_update_cache(self, wire,):
733 def hg_update_cache(self, wire,):
722 repo = self._factory.repo(wire)
734 repo = self._factory.repo(wire)
723 baseui = self._factory._create_config(wire['config'])
735 baseui = self._factory._create_config(wire['config'])
724 baseui.setconfig('ui', 'quiet', 'false')
736 baseui, output = patch_ui_message_output(baseui)
725 output = io.BytesIO()
726
727 def write(data, **unused_kwargs):
728 output.write(data)
729 baseui.write = write
730
737
731 repo.ui = baseui
738 repo.ui = baseui
732 with repo.wlock(), repo.lock():
739 with repo.wlock(), repo.lock():
733 repo.updatecaches(full=True)
740 repo.updatecaches(full=True)
734
741
735 return output.getvalue()
742 return output.getvalue()
736
743
737 @reraise_safe_exceptions
744 @reraise_safe_exceptions
745 def hg_rebuild_fn_cache(self, wire,):
746 repo = self._factory.repo(wire)
747 baseui = self._factory._create_config(wire['config'])
748 baseui, output = patch_ui_message_output(baseui)
749
750 repo.ui = baseui
751
752 repair.rebuildfncache(baseui, repo)
753
754 return output.getvalue()
755
756 @reraise_safe_exceptions
738 def tags(self, wire):
757 def tags(self, wire):
739 cache_on, context_uid, repo_id = self._cache_on(wire)
758 cache_on, context_uid, repo_id = self._cache_on(wire)
740 @self.region.conditional_cache_on_arguments(condition=cache_on)
759 @self.region.conditional_cache_on_arguments(condition=cache_on)
741 def _tags(_context_uid, _repo_id):
760 def _tags(_context_uid, _repo_id):
742 repo = self._factory.repo(wire)
761 repo = self._factory.repo(wire)
743 return repo.tags()
762 return repo.tags()
744
763
745 return _tags(context_uid, repo_id)
764 return _tags(context_uid, repo_id)
746
765
747 @reraise_safe_exceptions
766 @reraise_safe_exceptions
748 def update(self, wire, node=None, clean=False):
767 def update(self, wire, node=None, clean=False):
749 repo = self._factory.repo(wire)
768 repo = self._factory.repo(wire)
750 baseui = self._factory._create_config(wire['config'])
769 baseui = self._factory._create_config(wire['config'])
751 commands.update(baseui, repo, node=node, clean=clean)
770 commands.update(baseui, repo, node=node, clean=clean)
752
771
753 @reraise_safe_exceptions
772 @reraise_safe_exceptions
754 def identify(self, wire):
773 def identify(self, wire):
755 repo = self._factory.repo(wire)
774 repo = self._factory.repo(wire)
756 baseui = self._factory._create_config(wire['config'])
775 baseui = self._factory._create_config(wire['config'])
757 output = io.BytesIO()
776 output = io.BytesIO()
758 baseui.write = output.write
777 baseui.write = output.write
759 # This is required to get a full node id
778 # This is required to get a full node id
760 baseui.debugflag = True
779 baseui.debugflag = True
761 commands.identify(baseui, repo, id=True)
780 commands.identify(baseui, repo, id=True)
762
781
763 return output.getvalue()
782 return output.getvalue()
764
783
765 @reraise_safe_exceptions
784 @reraise_safe_exceptions
766 def heads(self, wire, branch=None):
785 def heads(self, wire, branch=None):
767 repo = self._factory.repo(wire)
786 repo = self._factory.repo(wire)
768 baseui = self._factory._create_config(wire['config'])
787 baseui = self._factory._create_config(wire['config'])
769 output = io.BytesIO()
788 output = io.BytesIO()
770
789
771 def write(data, **unused_kwargs):
790 def write(data, **unused_kwargs):
772 output.write(data)
791 output.write(data)
773
792
774 baseui.write = write
793 baseui.write = write
775 if branch:
794 if branch:
776 args = [branch]
795 args = [branch]
777 else:
796 else:
778 args = []
797 args = []
779 commands.heads(baseui, repo, template='{node} ', *args)
798 commands.heads(baseui, repo, template='{node} ', *args)
780
799
781 return output.getvalue()
800 return output.getvalue()
782
801
783 @reraise_safe_exceptions
802 @reraise_safe_exceptions
784 def ancestor(self, wire, revision1, revision2):
803 def ancestor(self, wire, revision1, revision2):
785 repo = self._factory.repo(wire)
804 repo = self._factory.repo(wire)
786 changelog = repo.changelog
805 changelog = repo.changelog
787 lookup = repo.lookup
806 lookup = repo.lookup
788 a = changelog.ancestor(lookup(revision1), lookup(revision2))
807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
789 return hex(a)
808 return hex(a)
790
809
791 @reraise_safe_exceptions
810 @reraise_safe_exceptions
792 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
793 baseui = self._factory._create_config(wire["config"], hooks=hooks)
812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
794 clone(baseui, source, dest, noupdate=not update_after_clone)
813 clone(baseui, source, dest, noupdate=not update_after_clone)
795
814
796 @reraise_safe_exceptions
815 @reraise_safe_exceptions
797 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
798
817
799 repo = self._factory.repo(wire)
818 repo = self._factory.repo(wire)
800 baseui = self._factory._create_config(wire['config'])
819 baseui = self._factory._create_config(wire['config'])
801 publishing = baseui.configbool('phases', 'publish')
820 publishing = baseui.configbool('phases', 'publish')
802 if publishing:
821 if publishing:
803 new_commit = 'public'
822 new_commit = 'public'
804 else:
823 else:
805 new_commit = 'draft'
824 new_commit = 'draft'
806
825
807 def _filectxfn(_repo, ctx, path):
826 def _filectxfn(_repo, ctx, path):
808 """
827 """
809 Marks given path as added/changed/removed in a given _repo. This is
828 Marks given path as added/changed/removed in a given _repo. This is
810 for internal mercurial commit function.
829 for internal mercurial commit function.
811 """
830 """
812
831
813 # check if this path is removed
832 # check if this path is removed
814 if path in removed:
833 if path in removed:
815 # returning None is a way to mark node for removal
834 # returning None is a way to mark node for removal
816 return None
835 return None
817
836
818 # check if this path is added
837 # check if this path is added
819 for node in updated:
838 for node in updated:
820 if node['path'] == path:
839 if node['path'] == path:
821 return memfilectx(
840 return memfilectx(
822 _repo,
841 _repo,
823 changectx=ctx,
842 changectx=ctx,
824 path=node['path'],
843 path=node['path'],
825 data=node['content'],
844 data=node['content'],
826 islink=False,
845 islink=False,
827 isexec=bool(node['mode'] & stat.S_IXUSR),
846 isexec=bool(node['mode'] & stat.S_IXUSR),
828 copysource=False)
847 copysource=False)
829
848
830 raise exceptions.AbortException()(
849 raise exceptions.AbortException()(
831 "Given path haven't been marked as added, "
850 "Given path haven't been marked as added, "
832 "changed or removed (%s)" % path)
851 "changed or removed (%s)" % path)
833
852
834 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
835
854
836 commit_ctx = memctx(
855 commit_ctx = memctx(
837 repo=repo,
856 repo=repo,
838 parents=parents,
857 parents=parents,
839 text=message,
858 text=message,
840 files=files,
859 files=files,
841 filectxfn=_filectxfn,
860 filectxfn=_filectxfn,
842 user=user,
861 user=user,
843 date=(commit_time, commit_timezone),
862 date=(commit_time, commit_timezone),
844 extra=extra)
863 extra=extra)
845
864
846 n = repo.commitctx(commit_ctx)
865 n = repo.commitctx(commit_ctx)
847 new_id = hex(n)
866 new_id = hex(n)
848
867
849 return new_id
868 return new_id
850
869
851 @reraise_safe_exceptions
870 @reraise_safe_exceptions
852 def pull(self, wire, url, commit_ids=None):
871 def pull(self, wire, url, commit_ids=None):
853 repo = self._factory.repo(wire)
872 repo = self._factory.repo(wire)
854 # Disable any prompts for this repo
873 # Disable any prompts for this repo
855 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
856
875
857 remote = peer(repo, {}, url)
876 remote = peer(repo, {}, url)
858 # Disable any prompts for this remote
877 # Disable any prompts for this remote
859 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
860
879
861 if commit_ids:
880 if commit_ids:
862 commit_ids = [bin(commit_id) for commit_id in commit_ids]
881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
863
882
864 return exchange.pull(
883 return exchange.pull(
865 repo, remote, heads=commit_ids, force=None).cgresult
884 repo, remote, heads=commit_ids, force=None).cgresult
866
885
867 @reraise_safe_exceptions
886 @reraise_safe_exceptions
868 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
869 repo = self._factory.repo(wire)
888 repo = self._factory.repo(wire)
870 baseui = self._factory._create_config(wire['config'], hooks=hooks)
889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
871
890
872 # Mercurial internally has a lot of logic that checks ONLY if
891 # Mercurial internally has a lot of logic that checks ONLY if
873 # option is defined, we just pass those if they are defined then
892 # option is defined, we just pass those if they are defined then
874 opts = {}
893 opts = {}
875 if bookmark:
894 if bookmark:
876 opts['bookmark'] = bookmark
895 opts['bookmark'] = bookmark
877 if branch:
896 if branch:
878 opts['branch'] = branch
897 opts['branch'] = branch
879 if revision:
898 if revision:
880 opts['rev'] = revision
899 opts['rev'] = revision
881
900
882 commands.pull(baseui, repo, source, **opts)
901 commands.pull(baseui, repo, source, **opts)
883
902
884 @reraise_safe_exceptions
903 @reraise_safe_exceptions
885 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
886 repo = self._factory.repo(wire)
905 repo = self._factory.repo(wire)
887 baseui = self._factory._create_config(wire['config'], hooks=hooks)
906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
888 commands.push(baseui, repo, dest=dest_path, rev=revisions,
907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
889 new_branch=push_branches)
908 new_branch=push_branches)
890
909
891 @reraise_safe_exceptions
910 @reraise_safe_exceptions
892 def strip(self, wire, revision, update, backup):
911 def strip(self, wire, revision, update, backup):
893 repo = self._factory.repo(wire)
912 repo = self._factory.repo(wire)
894 ctx = self._get_ctx(repo, revision)
913 ctx = self._get_ctx(repo, revision)
895 hgext_strip(
914 hgext_strip(
896 repo.baseui, repo, ctx.node(), update=update, backup=backup)
915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
897
916
898 @reraise_safe_exceptions
917 @reraise_safe_exceptions
899 def get_unresolved_files(self, wire):
918 def get_unresolved_files(self, wire):
900 repo = self._factory.repo(wire)
919 repo = self._factory.repo(wire)
901
920
902 log.debug('Calculating unresolved files for repo: %s', repo)
921 log.debug('Calculating unresolved files for repo: %s', repo)
903 output = io.BytesIO()
922 output = io.BytesIO()
904
923
905 def write(data, **unused_kwargs):
924 def write(data, **unused_kwargs):
906 output.write(data)
925 output.write(data)
907
926
908 baseui = self._factory._create_config(wire['config'])
927 baseui = self._factory._create_config(wire['config'])
909 baseui.write = write
928 baseui.write = write
910
929
911 commands.resolve(baseui, repo, list=True)
930 commands.resolve(baseui, repo, list=True)
912 unresolved = output.getvalue().splitlines(0)
931 unresolved = output.getvalue().splitlines(0)
913 return unresolved
932 return unresolved
914
933
915 @reraise_safe_exceptions
934 @reraise_safe_exceptions
916 def merge(self, wire, revision):
935 def merge(self, wire, revision):
917 repo = self._factory.repo(wire)
936 repo = self._factory.repo(wire)
918 baseui = self._factory._create_config(wire['config'])
937 baseui = self._factory._create_config(wire['config'])
919 repo.ui.setconfig('ui', 'merge', 'internal:dump')
938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
920
939
921 # In case of sub repositories are used mercurial prompts the user in
940 # In case of sub repositories are used mercurial prompts the user in
922 # case of merge conflicts or different sub repository sources. By
941 # case of merge conflicts or different sub repository sources. By
923 # setting the interactive flag to `False` mercurial doesn't prompt the
942 # setting the interactive flag to `False` mercurial doesn't prompt the
924 # used but instead uses a default value.
943 # used but instead uses a default value.
925 repo.ui.setconfig('ui', 'interactive', False)
944 repo.ui.setconfig('ui', 'interactive', False)
926 commands.merge(baseui, repo, rev=revision)
945 commands.merge(baseui, repo, rev=revision)
927
946
928 @reraise_safe_exceptions
947 @reraise_safe_exceptions
929 def merge_state(self, wire):
948 def merge_state(self, wire):
930 repo = self._factory.repo(wire)
949 repo = self._factory.repo(wire)
931 repo.ui.setconfig('ui', 'merge', 'internal:dump')
950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
932
951
933 # In case of sub repositories are used mercurial prompts the user in
952 # In case of sub repositories are used mercurial prompts the user in
934 # case of merge conflicts or different sub repository sources. By
953 # case of merge conflicts or different sub repository sources. By
935 # setting the interactive flag to `False` mercurial doesn't prompt the
954 # setting the interactive flag to `False` mercurial doesn't prompt the
936 # used but instead uses a default value.
955 # used but instead uses a default value.
937 repo.ui.setconfig('ui', 'interactive', False)
956 repo.ui.setconfig('ui', 'interactive', False)
938 ms = hg_merge.mergestate(repo)
957 ms = hg_merge.mergestate(repo)
939 return [x for x in ms.unresolved()]
958 return [x for x in ms.unresolved()]
940
959
941 @reraise_safe_exceptions
960 @reraise_safe_exceptions
942 def commit(self, wire, message, username, close_branch=False):
961 def commit(self, wire, message, username, close_branch=False):
943 repo = self._factory.repo(wire)
962 repo = self._factory.repo(wire)
944 baseui = self._factory._create_config(wire['config'])
963 baseui = self._factory._create_config(wire['config'])
945 repo.ui.setconfig('ui', 'username', username)
964 repo.ui.setconfig('ui', 'username', username)
946 commands.commit(baseui, repo, message=message, close_branch=close_branch)
965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
947
966
948 @reraise_safe_exceptions
967 @reraise_safe_exceptions
949 def rebase(self, wire, source=None, dest=None, abort=False):
968 def rebase(self, wire, source=None, dest=None, abort=False):
950 repo = self._factory.repo(wire)
969 repo = self._factory.repo(wire)
951 baseui = self._factory._create_config(wire['config'])
970 baseui = self._factory._create_config(wire['config'])
952 repo.ui.setconfig('ui', 'merge', 'internal:dump')
971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
953 # In case of sub repositories are used mercurial prompts the user in
972 # In case of sub repositories are used mercurial prompts the user in
954 # case of merge conflicts or different sub repository sources. By
973 # case of merge conflicts or different sub repository sources. By
955 # setting the interactive flag to `False` mercurial doesn't prompt the
974 # setting the interactive flag to `False` mercurial doesn't prompt the
956 # used but instead uses a default value.
975 # used but instead uses a default value.
957 repo.ui.setconfig('ui', 'interactive', False)
976 repo.ui.setconfig('ui', 'interactive', False)
958 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
959
978
960 @reraise_safe_exceptions
979 @reraise_safe_exceptions
961 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
962 repo = self._factory.repo(wire)
981 repo = self._factory.repo(wire)
963 ctx = self._get_ctx(repo, revision)
982 ctx = self._get_ctx(repo, revision)
964 node = ctx.node()
983 node = ctx.node()
965
984
966 date = (tag_time, tag_timezone)
985 date = (tag_time, tag_timezone)
967 try:
986 try:
968 hg_tag.tag(repo, name, node, message, local, user, date)
987 hg_tag.tag(repo, name, node, message, local, user, date)
969 except Abort as e:
988 except Abort as e:
970 log.exception("Tag operation aborted")
989 log.exception("Tag operation aborted")
971 # Exception can contain unicode which we convert
990 # Exception can contain unicode which we convert
972 raise exceptions.AbortException(e)(repr(e))
991 raise exceptions.AbortException(e)(repr(e))
973
992
974 @reraise_safe_exceptions
993 @reraise_safe_exceptions
975 def bookmark(self, wire, bookmark, revision=None):
994 def bookmark(self, wire, bookmark, revision=None):
976 repo = self._factory.repo(wire)
995 repo = self._factory.repo(wire)
977 baseui = self._factory._create_config(wire['config'])
996 baseui = self._factory._create_config(wire['config'])
978 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
979
998
980 @reraise_safe_exceptions
999 @reraise_safe_exceptions
981 def install_hooks(self, wire, force=False):
1000 def install_hooks(self, wire, force=False):
982 # we don't need any special hooks for Mercurial
1001 # we don't need any special hooks for Mercurial
983 pass
1002 pass
984
1003
985 @reraise_safe_exceptions
1004 @reraise_safe_exceptions
986 def get_hooks_info(self, wire):
1005 def get_hooks_info(self, wire):
987 return {
1006 return {
988 'pre_version': vcsserver.__version__,
1007 'pre_version': vcsserver.__version__,
989 'post_version': vcsserver.__version__,
1008 'post_version': vcsserver.__version__,
990 }
1009 }
@@ -1,75 +1,79 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 from mercurial import demandimport
23 from mercurial import demandimport
24 # patch demandimport, due to bug in mercurial when it always triggers
24 # patch demandimport, due to bug in mercurial when it always triggers
25 # demandimport.enable()
25 # demandimport.enable()
26 demandimport.enable = lambda *args, **kwargs: 1
26 demandimport.enable = lambda *args, **kwargs: 1
27
27
28 from mercurial import ui
28 from mercurial import ui
29 from mercurial import patch
29 from mercurial import patch
30 from mercurial import config
30 from mercurial import config
31 from mercurial import extensions
31 from mercurial import extensions
32 from mercurial import scmutil
32 from mercurial import scmutil
33 from mercurial import archival
33 from mercurial import archival
34 from mercurial import discovery
34 from mercurial import discovery
35 from mercurial import unionrepo
35 from mercurial import unionrepo
36 from mercurial import localrepo
36 from mercurial import localrepo
37 from mercurial import merge as hg_merge
37 from mercurial import merge as hg_merge
38 from mercurial import subrepo
38 from mercurial import subrepo
39 from mercurial import subrepoutil
39 from mercurial import subrepoutil
40 from mercurial import tags as hg_tag
40 from mercurial import tags as hg_tag
41
41
42 from mercurial.commands import clone, nullid, pull
42 from mercurial.commands import clone, nullid, pull
43 from mercurial.context import memctx, memfilectx
43 from mercurial.context import memctx, memfilectx
44 from mercurial.error import (
44 from mercurial.error import (
45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
46 RequirementError, ProgrammingError)
46 RequirementError, ProgrammingError)
47 from mercurial.hgweb import hgweb_mod
47 from mercurial.hgweb import hgweb_mod
48 from mercurial.localrepo import instance
48 from mercurial.localrepo import instance
49 from mercurial.match import match
49 from mercurial.match import match
50 from mercurial.mdiff import diffopts
50 from mercurial.mdiff import diffopts
51 from mercurial.node import bin, hex
51 from mercurial.node import bin, hex
52 from mercurial.encoding import tolocal
52 from mercurial.encoding import tolocal
53 from mercurial.discovery import findcommonoutgoing
53 from mercurial.discovery import findcommonoutgoing
54 from mercurial.hg import peer
54 from mercurial.hg import peer
55 from mercurial.httppeer import makepeer
55 from mercurial.httppeer import makepeer
56 from mercurial.util import url as hg_url
56 from mercurial.util import url as hg_url
57 from mercurial.scmutil import revrange, revsymbol
57 from mercurial.scmutil import revrange, revsymbol
58 from mercurial.node import nullrev
58 from mercurial.node import nullrev
59 from mercurial import exchange
59 from mercurial import exchange
60 from hgext import largefiles
60 from hgext import largefiles
61
61
62 # those authnadlers are patched for python 2.6.5 bug an
62 # those authnadlers are patched for python 2.6.5 bug an
63 # infinit looping when given invalid resources
63 # infinit looping when given invalid resources
64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
65
65
66
66
67 def get_ctx(repo, ref):
67 def get_ctx(repo, ref):
68 try:
68 try:
69 ctx = repo[ref]
69 ctx = repo[ref]
70 except ProgrammingError:
70 except ProgrammingError:
71 # we're unable to find the rev using a regular lookup, we fallback
71 # we're unable to find the rev using a regular lookup, we fallback
72 # to slower, but backward compat revsymbol usage
72 # to slower, but backward compat revsymbol usage
73 ctx = revsymbol(repo, ref)
73 ctx = revsymbol(repo, ref)
74
74 except (LookupError, RepoLookupError):
75 # Similar case as above but only for refs that are not numeric
76 if isinstance(ref, (int, long)):
77 raise
78 ctx = revsymbol(repo, ref)
75 return ctx
79 return ctx
@@ -1,722 +1,729 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HooksHttpClient(object):
40 class HooksHttpClient(object):
41 connection = None
41 connection = None
42
42
43 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
45
45
46 def __call__(self, method, extras):
46 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
49 try:
49 try:
50 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
51 except Exception:
51 except Exception:
52 log.error('Connection failed on %s', connection)
52 log.error('Connection failed on %s', connection)
53 raise
53 raise
54 response = connection.getresponse()
54 response = connection.getresponse()
55
55
56 response_data = response.read()
56 response_data = response.read()
57
57
58 try:
58 try:
59 return json.loads(response_data)
59 return json.loads(response_data)
60 except Exception:
60 except Exception:
61 log.exception('Failed to decode hook response json data. '
61 log.exception('Failed to decode hook response json data. '
62 'response_code:%s, raw_data:%s',
62 'response_code:%s, raw_data:%s',
63 response.status, response_data)
63 response.status, response_data)
64 raise
64 raise
65
65
66 def _serialize(self, hook_name, extras):
66 def _serialize(self, hook_name, extras):
67 data = {
67 data = {
68 'method': hook_name,
68 'method': hook_name,
69 'extras': extras
69 'extras': extras
70 }
70 }
71 return json.dumps(data)
71 return json.dumps(data)
72
72
73
73
74 class HooksDummyClient(object):
74 class HooksDummyClient(object):
75 def __init__(self, hooks_module):
75 def __init__(self, hooks_module):
76 self._hooks_module = importlib.import_module(hooks_module)
76 self._hooks_module = importlib.import_module(hooks_module)
77
77
78 def __call__(self, hook_name, extras):
78 def __call__(self, hook_name, extras):
79 with self._hooks_module.Hooks() as hooks:
79 with self._hooks_module.Hooks() as hooks:
80 return getattr(hooks, hook_name)(extras)
80 return getattr(hooks, hook_name)(extras)
81
81
82
82
83 class HooksShadowRepoClient(object):
83 class HooksShadowRepoClient(object):
84
84
85 def __call__(self, hook_name, extras):
85 def __call__(self, hook_name, extras):
86 return {'output': '', 'status': 0}
86 return {'output': '', 'status': 0}
87
87
88
88
89 class RemoteMessageWriter(object):
89 class RemoteMessageWriter(object):
90 """Writer base class."""
90 """Writer base class."""
91 def write(self, message):
91 def write(self, message):
92 raise NotImplementedError()
92 raise NotImplementedError()
93
93
94
94
95 class HgMessageWriter(RemoteMessageWriter):
95 class HgMessageWriter(RemoteMessageWriter):
96 """Writer that knows how to send messages to mercurial clients."""
96 """Writer that knows how to send messages to mercurial clients."""
97
97
98 def __init__(self, ui):
98 def __init__(self, ui):
99 self.ui = ui
99 self.ui = ui
100
100
101 def write(self, message):
101 def write(self, message):
102 # TODO: Check why the quiet flag is set by default.
102 # TODO: Check why the quiet flag is set by default.
103 old = self.ui.quiet
103 old = self.ui.quiet
104 self.ui.quiet = False
104 self.ui.quiet = False
105 self.ui.status(message.encode('utf-8'))
105 self.ui.status(message.encode('utf-8'))
106 self.ui.quiet = old
106 self.ui.quiet = old
107
107
108
108
109 class GitMessageWriter(RemoteMessageWriter):
109 class GitMessageWriter(RemoteMessageWriter):
110 """Writer that knows how to send messages to git clients."""
110 """Writer that knows how to send messages to git clients."""
111
111
112 def __init__(self, stdout=None):
112 def __init__(self, stdout=None):
113 self.stdout = stdout or sys.stdout
113 self.stdout = stdout or sys.stdout
114
114
115 def write(self, message):
115 def write(self, message):
116 self.stdout.write(message.encode('utf-8'))
116 self.stdout.write(message.encode('utf-8'))
117
117
118
118
119 class SvnMessageWriter(RemoteMessageWriter):
119 class SvnMessageWriter(RemoteMessageWriter):
120 """Writer that knows how to send messages to svn clients."""
120 """Writer that knows how to send messages to svn clients."""
121
121
122 def __init__(self, stderr=None):
122 def __init__(self, stderr=None):
123 # SVN needs data sent to stderr for back-to-client messaging
123 # SVN needs data sent to stderr for back-to-client messaging
124 self.stderr = stderr or sys.stderr
124 self.stderr = stderr or sys.stderr
125
125
126 def write(self, message):
126 def write(self, message):
127 self.stderr.write(message.encode('utf-8'))
127 self.stderr.write(message.encode('utf-8'))
128
128
129
129
130 def _handle_exception(result):
130 def _handle_exception(result):
131 exception_class = result.get('exception')
131 exception_class = result.get('exception')
132 exception_traceback = result.get('exception_traceback')
132 exception_traceback = result.get('exception_traceback')
133
133
134 if exception_traceback:
134 if exception_traceback:
135 log.error('Got traceback from remote call:%s', exception_traceback)
135 log.error('Got traceback from remote call:%s', exception_traceback)
136
136
137 if exception_class == 'HTTPLockedRC':
137 if exception_class == 'HTTPLockedRC':
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
139 elif exception_class == 'HTTPBranchProtected':
139 elif exception_class == 'HTTPBranchProtected':
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
141 elif exception_class == 'RepositoryError':
141 elif exception_class == 'RepositoryError':
142 raise exceptions.VcsException()(*result['exception_args'])
142 raise exceptions.VcsException()(*result['exception_args'])
143 elif exception_class:
143 elif exception_class:
144 raise Exception('Got remote exception "%s" with args "%s"' %
144 raise Exception('Got remote exception "%s" with args "%s"' %
145 (exception_class, result['exception_args']))
145 (exception_class, result['exception_args']))
146
146
147
147
148 def _get_hooks_client(extras):
148 def _get_hooks_client(extras):
149 hooks_uri = extras.get('hooks_uri')
149 hooks_uri = extras.get('hooks_uri')
150 is_shadow_repo = extras.get('is_shadow_repo')
150 is_shadow_repo = extras.get('is_shadow_repo')
151 if hooks_uri:
151 if hooks_uri:
152 return HooksHttpClient(extras['hooks_uri'])
152 return HooksHttpClient(extras['hooks_uri'])
153 elif is_shadow_repo:
153 elif is_shadow_repo:
154 return HooksShadowRepoClient()
154 return HooksShadowRepoClient()
155 else:
155 else:
156 return HooksDummyClient(extras['hooks_module'])
156 return HooksDummyClient(extras['hooks_module'])
157
157
158
158
159 def _call_hook(hook_name, extras, writer):
159 def _call_hook(hook_name, extras, writer):
160 hooks_client = _get_hooks_client(extras)
160 hooks_client = _get_hooks_client(extras)
161 log.debug('Hooks, using client:%s', hooks_client)
161 log.debug('Hooks, using client:%s', hooks_client)
162 result = hooks_client(hook_name, extras)
162 result = hooks_client(hook_name, extras)
163 log.debug('Hooks got result: %s', result)
163 log.debug('Hooks got result: %s', result)
164
164
165 _handle_exception(result)
165 _handle_exception(result)
166 writer.write(result['output'])
166 writer.write(result['output'])
167
167
168 return result['status']
168 return result['status']
169
169
170
170
171 def _extras_from_ui(ui):
171 def _extras_from_ui(ui):
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
173 if not hook_data:
173 if not hook_data:
174 # maybe it's inside environ ?
174 # maybe it's inside environ ?
175 env_hook_data = os.environ.get('RC_SCM_DATA')
175 env_hook_data = os.environ.get('RC_SCM_DATA')
176 if env_hook_data:
176 if env_hook_data:
177 hook_data = env_hook_data
177 hook_data = env_hook_data
178
178
179 extras = {}
179 extras = {}
180 if hook_data:
180 if hook_data:
181 extras = json.loads(hook_data)
181 extras = json.loads(hook_data)
182 return extras
182 return extras
183
183
184
184
185 def _rev_range_hash(repo, node, check_heads=False):
185 def _rev_range_hash(repo, node, check_heads=False):
186 from vcsserver.hgcompat import get_ctx
186 from vcsserver.hgcompat import get_ctx
187
187
188 commits = []
188 commits = []
189 revs = []
189 revs = []
190 start = get_ctx(repo, node).rev()
190 start = get_ctx(repo, node).rev()
191 end = len(repo)
191 end = len(repo)
192 for rev in range(start, end):
192 for rev in range(start, end):
193 revs.append(rev)
193 revs.append(rev)
194 ctx = get_ctx(repo, rev)
194 ctx = get_ctx(repo, rev)
195 commit_id = mercurial.node.hex(ctx.node())
195 commit_id = mercurial.node.hex(ctx.node())
196 branch = ctx.branch()
196 branch = ctx.branch()
197 commits.append((commit_id, branch))
197 commits.append((commit_id, branch))
198
198
199 parent_heads = []
199 parent_heads = []
200 if check_heads:
200 if check_heads:
201 parent_heads = _check_heads(repo, start, end, revs)
201 parent_heads = _check_heads(repo, start, end, revs)
202 return commits, parent_heads
202 return commits, parent_heads
203
203
204
204
205 def _check_heads(repo, start, end, commits):
205 def _check_heads(repo, start, end, commits):
206 from vcsserver.hgcompat import get_ctx
206 from vcsserver.hgcompat import get_ctx
207 changelog = repo.changelog
207 changelog = repo.changelog
208 parents = set()
208 parents = set()
209
209
210 for new_rev in commits:
210 for new_rev in commits:
211 for p in changelog.parentrevs(new_rev):
211 for p in changelog.parentrevs(new_rev):
212 if p == mercurial.node.nullrev:
212 if p == mercurial.node.nullrev:
213 continue
213 continue
214 if p < start:
214 if p < start:
215 parents.add(p)
215 parents.add(p)
216
216
217 for p in parents:
217 for p in parents:
218 branch = get_ctx(repo, p).branch()
218 branch = get_ctx(repo, p).branch()
219 # The heads descending from that parent, on the same branch
219 # The heads descending from that parent, on the same branch
220 parent_heads = set([p])
220 parent_heads = set([p])
221 reachable = set([p])
221 reachable = set([p])
222 for x in xrange(p + 1, end):
222 for x in xrange(p + 1, end):
223 if get_ctx(repo, x).branch() != branch:
223 if get_ctx(repo, x).branch() != branch:
224 continue
224 continue
225 for pp in changelog.parentrevs(x):
225 for pp in changelog.parentrevs(x):
226 if pp in reachable:
226 if pp in reachable:
227 reachable.add(x)
227 reachable.add(x)
228 parent_heads.discard(pp)
228 parent_heads.discard(pp)
229 parent_heads.add(x)
229 parent_heads.add(x)
230 # More than one head? Suggest merging
230 # More than one head? Suggest merging
231 if len(parent_heads) > 1:
231 if len(parent_heads) > 1:
232 return list(parent_heads)
232 return list(parent_heads)
233
233
234 return []
234 return []
235
235
236
236
237 def _get_git_env():
237 def _get_git_env():
238 env = {}
238 env = {}
239 for k, v in os.environ.items():
239 for k, v in os.environ.items():
240 if k.startswith('GIT'):
240 if k.startswith('GIT'):
241 env[k] = v
241 env[k] = v
242
242
243 # serialized version
243 # serialized version
244 return [(k, v) for k, v in env.items()]
244 return [(k, v) for k, v in env.items()]
245
245
246
246
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
248 env = {}
248 env = {}
249 for k, v in os.environ.items():
249 for k, v in os.environ.items():
250 if k.startswith('HG'):
250 if k.startswith('HG'):
251 env[k] = v
251 env[k] = v
252
252
253 env['HG_NODE'] = old_rev
253 env['HG_NODE'] = old_rev
254 env['HG_NODE_LAST'] = new_rev
254 env['HG_NODE_LAST'] = new_rev
255 env['HG_TXNID'] = txnid
255 env['HG_TXNID'] = txnid
256 env['HG_PENDING'] = repo_path
256 env['HG_PENDING'] = repo_path
257
257
258 return [(k, v) for k, v in env.items()]
258 return [(k, v) for k, v in env.items()]
259
259
260
260
261 def repo_size(ui, repo, **kwargs):
261 def repo_size(ui, repo, **kwargs):
262 extras = _extras_from_ui(ui)
262 extras = _extras_from_ui(ui)
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
264
264
265
265
266 def pre_pull(ui, repo, **kwargs):
266 def pre_pull(ui, repo, **kwargs):
267 extras = _extras_from_ui(ui)
267 extras = _extras_from_ui(ui)
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
269
269
270
270
271 def pre_pull_ssh(ui, repo, **kwargs):
271 def pre_pull_ssh(ui, repo, **kwargs):
272 extras = _extras_from_ui(ui)
272 extras = _extras_from_ui(ui)
273 if extras and extras.get('SSH'):
273 if extras and extras.get('SSH'):
274 return pre_pull(ui, repo, **kwargs)
274 return pre_pull(ui, repo, **kwargs)
275 return 0
275 return 0
276
276
277
277
278 def post_pull(ui, repo, **kwargs):
278 def post_pull(ui, repo, **kwargs):
279 extras = _extras_from_ui(ui)
279 extras = _extras_from_ui(ui)
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
281
281
282
282
283 def post_pull_ssh(ui, repo, **kwargs):
283 def post_pull_ssh(ui, repo, **kwargs):
284 extras = _extras_from_ui(ui)
284 extras = _extras_from_ui(ui)
285 if extras and extras.get('SSH'):
285 if extras and extras.get('SSH'):
286 return post_pull(ui, repo, **kwargs)
286 return post_pull(ui, repo, **kwargs)
287 return 0
287 return 0
288
288
289
289
290 def pre_push(ui, repo, node=None, **kwargs):
290 def pre_push(ui, repo, node=None, **kwargs):
291 """
291 """
292 Mercurial pre_push hook
292 Mercurial pre_push hook
293 """
293 """
294 extras = _extras_from_ui(ui)
294 extras = _extras_from_ui(ui)
295 detect_force_push = extras.get('detect_force_push')
295 detect_force_push = extras.get('detect_force_push')
296
296
297 rev_data = []
297 rev_data = []
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
299 branches = collections.defaultdict(list)
299 branches = collections.defaultdict(list)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 for commit_id, branch in commits:
301 for commit_id, branch in commits:
302 branches[branch].append(commit_id)
302 branches[branch].append(commit_id)
303
303
304 for branch, commits in branches.items():
304 for branch, commits in branches.items():
305 old_rev = kwargs.get('node_last') or commits[0]
305 old_rev = kwargs.get('node_last') or commits[0]
306 rev_data.append({
306 rev_data.append({
307 'total_commits': len(commits),
307 'total_commits': len(commits),
308 'old_rev': old_rev,
308 'old_rev': old_rev,
309 'new_rev': commits[-1],
309 'new_rev': commits[-1],
310 'ref': '',
310 'ref': '',
311 'type': 'branch',
311 'type': 'branch',
312 'name': branch,
312 'name': branch,
313 })
313 })
314
314
315 for push_ref in rev_data:
315 for push_ref in rev_data:
316 push_ref['multiple_heads'] = _heads
316 push_ref['multiple_heads'] = _heads
317
317
318 repo_path = os.path.join(
318 repo_path = os.path.join(
319 extras.get('repo_store', ''), extras.get('repository', ''))
319 extras.get('repo_store', ''), extras.get('repository', ''))
320 push_ref['hg_env'] = _get_hg_env(
320 push_ref['hg_env'] = _get_hg_env(
321 old_rev=push_ref['old_rev'],
321 old_rev=push_ref['old_rev'],
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
323 repo_path=repo_path)
323 repo_path=repo_path)
324
324
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
326 extras['commit_ids'] = rev_data
326 extras['commit_ids'] = rev_data
327
327
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
329
329
330
330
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
332 extras = _extras_from_ui(ui)
332 extras = _extras_from_ui(ui)
333 if extras.get('SSH'):
333 if extras.get('SSH'):
334 return pre_push(ui, repo, node, **kwargs)
334 return pre_push(ui, repo, node, **kwargs)
335
335
336 return 0
336 return 0
337
337
338
338
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
340 """
340 """
341 Mercurial pre_push hook for SSH
341 Mercurial pre_push hook for SSH
342 """
342 """
343 extras = _extras_from_ui(ui)
343 extras = _extras_from_ui(ui)
344 if extras.get('SSH'):
344 if extras.get('SSH'):
345 permission = extras['SSH_PERMISSIONS']
345 permission = extras['SSH_PERMISSIONS']
346
346
347 if 'repository.write' == permission or 'repository.admin' == permission:
347 if 'repository.write' == permission or 'repository.admin' == permission:
348 return 0
348 return 0
349
349
350 # non-zero ret code
350 # non-zero ret code
351 return 1
351 return 1
352
352
353 return 0
353 return 0
354
354
355
355
356 def post_push(ui, repo, node, **kwargs):
356 def post_push(ui, repo, node, **kwargs):
357 """
357 """
358 Mercurial post_push hook
358 Mercurial post_push hook
359 """
359 """
360 extras = _extras_from_ui(ui)
360 extras = _extras_from_ui(ui)
361
361
362 commit_ids = []
362 commit_ids = []
363 branches = []
363 branches = []
364 bookmarks = []
364 bookmarks = []
365 tags = []
365 tags = []
366
366
367 commits, _heads = _rev_range_hash(repo, node)
367 commits, _heads = _rev_range_hash(repo, node)
368 for commit_id, branch in commits:
368 for commit_id, branch in commits:
369 commit_ids.append(commit_id)
369 commit_ids.append(commit_id)
370 if branch not in branches:
370 if branch not in branches:
371 branches.append(branch)
371 branches.append(branch)
372
372
373 if hasattr(ui, '_rc_pushkey_branches'):
373 if hasattr(ui, '_rc_pushkey_branches'):
374 bookmarks = ui._rc_pushkey_branches
374 bookmarks = ui._rc_pushkey_branches
375
375
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
377 extras['commit_ids'] = commit_ids
377 extras['commit_ids'] = commit_ids
378 extras['new_refs'] = {
378 extras['new_refs'] = {
379 'branches': branches,
379 'branches': branches,
380 'bookmarks': bookmarks,
380 'bookmarks': bookmarks,
381 'tags': tags
381 'tags': tags
382 }
382 }
383
383
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
385
385
386
386
387 def post_push_ssh(ui, repo, node, **kwargs):
387 def post_push_ssh(ui, repo, node, **kwargs):
388 """
388 """
389 Mercurial post_push hook for SSH
389 Mercurial post_push hook for SSH
390 """
390 """
391 if _extras_from_ui(ui).get('SSH'):
391 if _extras_from_ui(ui).get('SSH'):
392 return post_push(ui, repo, node, **kwargs)
392 return post_push(ui, repo, node, **kwargs)
393 return 0
393 return 0
394
394
395
395
396 def key_push(ui, repo, **kwargs):
396 def key_push(ui, repo, **kwargs):
397 from vcsserver.hgcompat import get_ctx
397 from vcsserver.hgcompat import get_ctx
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
399 # store new bookmarks in our UI object propagated later to post_push
399 # store new bookmarks in our UI object propagated later to post_push
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
401 return
401 return
402
402
403
403
404 # backward compat
404 # backward compat
405 log_pull_action = post_pull
405 log_pull_action = post_pull
406
406
407 # backward compat
407 # backward compat
408 log_push_action = post_push
408 log_push_action = post_push
409
409
410
410
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
412 """
412 """
413 Old hook name: keep here for backward compatibility.
413 Old hook name: keep here for backward compatibility.
414
414
415 This is only required when the installed git hooks are not upgraded.
415 This is only required when the installed git hooks are not upgraded.
416 """
416 """
417 pass
417 pass
418
418
419
419
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
421 """
421 """
422 Old hook name: keep here for backward compatibility.
422 Old hook name: keep here for backward compatibility.
423
423
424 This is only required when the installed git hooks are not upgraded.
424 This is only required when the installed git hooks are not upgraded.
425 """
425 """
426 pass
426 pass
427
427
428
428
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
430
430
431
431
432 def git_pre_pull(extras):
432 def git_pre_pull(extras):
433 """
433 """
434 Pre pull hook.
434 Pre pull hook.
435
435
436 :param extras: dictionary containing the keys defined in simplevcs
436 :param extras: dictionary containing the keys defined in simplevcs
437 :type extras: dict
437 :type extras: dict
438
438
439 :return: status code of the hook. 0 for success.
439 :return: status code of the hook. 0 for success.
440 :rtype: int
440 :rtype: int
441 """
441 """
442 if 'pull' not in extras['hooks']:
442 if 'pull' not in extras['hooks']:
443 return HookResponse(0, '')
443 return HookResponse(0, '')
444
444
445 stdout = io.BytesIO()
445 stdout = io.BytesIO()
446 try:
446 try:
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
448 except Exception as error:
448 except Exception as error:
449 status = 128
449 status = 128
450 stdout.write('ERROR: %s\n' % str(error))
450 stdout.write('ERROR: %s\n' % str(error))
451
451
452 return HookResponse(status, stdout.getvalue())
452 return HookResponse(status, stdout.getvalue())
453
453
454
454
455 def git_post_pull(extras):
455 def git_post_pull(extras):
456 """
456 """
457 Post pull hook.
457 Post pull hook.
458
458
459 :param extras: dictionary containing the keys defined in simplevcs
459 :param extras: dictionary containing the keys defined in simplevcs
460 :type extras: dict
460 :type extras: dict
461
461
462 :return: status code of the hook. 0 for success.
462 :return: status code of the hook. 0 for success.
463 :rtype: int
463 :rtype: int
464 """
464 """
465 if 'pull' not in extras['hooks']:
465 if 'pull' not in extras['hooks']:
466 return HookResponse(0, '')
466 return HookResponse(0, '')
467
467
468 stdout = io.BytesIO()
468 stdout = io.BytesIO()
469 try:
469 try:
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 except Exception as error:
471 except Exception as error:
472 status = 128
472 status = 128
473 stdout.write('ERROR: %s\n' % error)
473 stdout.write('ERROR: %s\n' % error)
474
474
475 return HookResponse(status, stdout.getvalue())
475 return HookResponse(status, stdout.getvalue())
476
476
477
477
478 def _parse_git_ref_lines(revision_lines):
478 def _parse_git_ref_lines(revision_lines):
479 rev_data = []
479 rev_data = []
480 for revision_line in revision_lines or []:
480 for revision_line in revision_lines or []:
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
482 ref_data = ref.split('/', 2)
482 ref_data = ref.split('/', 2)
483 if ref_data[1] in ('tags', 'heads'):
483 if ref_data[1] in ('tags', 'heads'):
484 rev_data.append({
484 rev_data.append({
485 # NOTE(marcink):
485 # NOTE(marcink):
486 # we're unable to tell total_commits for git at this point
486 # we're unable to tell total_commits for git at this point
487 # but we set the variable for consistency with GIT
487 # but we set the variable for consistency with GIT
488 'total_commits': -1,
488 'total_commits': -1,
489 'old_rev': old_rev,
489 'old_rev': old_rev,
490 'new_rev': new_rev,
490 'new_rev': new_rev,
491 'ref': ref,
491 'ref': ref,
492 'type': ref_data[1],
492 'type': ref_data[1],
493 'name': ref_data[2],
493 'name': ref_data[2],
494 })
494 })
495 return rev_data
495 return rev_data
496
496
497
497
498 def git_pre_receive(unused_repo_path, revision_lines, env):
498 def git_pre_receive(unused_repo_path, revision_lines, env):
499 """
499 """
500 Pre push hook.
500 Pre push hook.
501
501
502 :param extras: dictionary containing the keys defined in simplevcs
502 :param extras: dictionary containing the keys defined in simplevcs
503 :type extras: dict
503 :type extras: dict
504
504
505 :return: status code of the hook. 0 for success.
505 :return: status code of the hook. 0 for success.
506 :rtype: int
506 :rtype: int
507 """
507 """
508 extras = json.loads(env['RC_SCM_DATA'])
508 extras = json.loads(env['RC_SCM_DATA'])
509 rev_data = _parse_git_ref_lines(revision_lines)
509 rev_data = _parse_git_ref_lines(revision_lines)
510 if 'push' not in extras['hooks']:
510 if 'push' not in extras['hooks']:
511 return 0
511 return 0
512 empty_commit_id = '0' * 40
512 empty_commit_id = '0' * 40
513
513
514 detect_force_push = extras.get('detect_force_push')
514 detect_force_push = extras.get('detect_force_push')
515
515
516 for push_ref in rev_data:
516 for push_ref in rev_data:
517 # store our git-env which holds the temp store
517 # store our git-env which holds the temp store
518 push_ref['git_env'] = _get_git_env()
518 push_ref['git_env'] = _get_git_env()
519 push_ref['pruned_sha'] = ''
519 push_ref['pruned_sha'] = ''
520 if not detect_force_push:
520 if not detect_force_push:
521 # don't check for forced-push when we don't need to
521 # don't check for forced-push when we don't need to
522 continue
522 continue
523
523
524 type_ = push_ref['type']
524 type_ = push_ref['type']
525 new_branch = push_ref['old_rev'] == empty_commit_id
525 new_branch = push_ref['old_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
527 if type_ == 'heads' and not (new_branch or delete_branch):
527 if type_ == 'heads' and not (new_branch or delete_branch):
528 old_rev = push_ref['old_rev']
528 old_rev = push_ref['old_rev']
529 new_rev = push_ref['new_rev']
529 new_rev = push_ref['new_rev']
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
531 stdout, stderr = subprocessio.run_command(
531 stdout, stderr = subprocessio.run_command(
532 cmd, env=os.environ.copy())
532 cmd, env=os.environ.copy())
533 # means we're having some non-reachable objects, this forced push was used
533 # means we're having some non-reachable objects, this forced push was used
534 if stdout:
534 if stdout:
535 push_ref['pruned_sha'] = stdout.splitlines()
535 push_ref['pruned_sha'] = stdout.splitlines()
536
536
537 extras['hook_type'] = 'pre_receive'
537 extras['hook_type'] = 'pre_receive'
538 extras['commit_ids'] = rev_data
538 extras['commit_ids'] = rev_data
539 return _call_hook('pre_push', extras, GitMessageWriter())
539 return _call_hook('pre_push', extras, GitMessageWriter())
540
540
541
541
542 def git_post_receive(unused_repo_path, revision_lines, env):
542 def git_post_receive(unused_repo_path, revision_lines, env):
543 """
543 """
544 Post push hook.
544 Post push hook.
545
545
546 :param extras: dictionary containing the keys defined in simplevcs
546 :param extras: dictionary containing the keys defined in simplevcs
547 :type extras: dict
547 :type extras: dict
548
548
549 :return: status code of the hook. 0 for success.
549 :return: status code of the hook. 0 for success.
550 :rtype: int
550 :rtype: int
551 """
551 """
552 extras = json.loads(env['RC_SCM_DATA'])
552 extras = json.loads(env['RC_SCM_DATA'])
553 if 'push' not in extras['hooks']:
553 if 'push' not in extras['hooks']:
554 return 0
554 return 0
555
555
556 rev_data = _parse_git_ref_lines(revision_lines)
556 rev_data = _parse_git_ref_lines(revision_lines)
557
557
558 git_revs = []
558 git_revs = []
559
559
560 # N.B.(skreft): it is ok to just call git, as git before calling a
560 # N.B.(skreft): it is ok to just call git, as git before calling a
561 # subcommand sets the PATH environment variable so that it point to the
561 # subcommand sets the PATH environment variable so that it point to the
562 # correct version of the git executable.
562 # correct version of the git executable.
563 empty_commit_id = '0' * 40
563 empty_commit_id = '0' * 40
564 branches = []
564 branches = []
565 tags = []
565 tags = []
566 for push_ref in rev_data:
566 for push_ref in rev_data:
567 type_ = push_ref['type']
567 type_ = push_ref['type']
568
568
569 if type_ == 'heads':
569 if type_ == 'heads':
570 if push_ref['old_rev'] == empty_commit_id:
570 if push_ref['old_rev'] == empty_commit_id:
571 # starting new branch case
571 # starting new branch case
572 if push_ref['name'] not in branches:
572 if push_ref['name'] not in branches:
573 branches.append(push_ref['name'])
573 branches.append(push_ref['name'])
574
574
575 # Fix up head revision if needed
575 # Fix up head revision if needed
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
577 try:
577 try:
578 subprocessio.run_command(cmd, env=os.environ.copy())
578 subprocessio.run_command(cmd, env=os.environ.copy())
579 except Exception:
579 except Exception:
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
581 'refs/heads/%s' % push_ref['name']]
581 'refs/heads/%s' % push_ref['name']]
582 print("Setting default branch to %s" % push_ref['name'])
582 print("Setting default branch to %s" % push_ref['name'])
583 subprocessio.run_command(cmd, env=os.environ.copy())
583 subprocessio.run_command(cmd, env=os.environ.copy())
584
584
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
586 '--format=%(refname)', 'refs/heads/*']
586 '--format=%(refname)', 'refs/heads/*']
587 stdout, stderr = subprocessio.run_command(
587 stdout, stderr = subprocessio.run_command(
588 cmd, env=os.environ.copy())
588 cmd, env=os.environ.copy())
589 heads = stdout
589 heads = stdout
590 heads = heads.replace(push_ref['ref'], '')
590 heads = heads.replace(push_ref['ref'], '')
591 heads = ' '.join(head for head
591 heads = ' '.join(head for head
592 in heads.splitlines() if head) or '.'
592 in heads.splitlines() if head) or '.'
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
594 '--pretty=format:%H', '--', push_ref['new_rev'],
594 '--pretty=format:%H', '--', push_ref['new_rev'],
595 '--not', heads]
595 '--not', heads]
596 stdout, stderr = subprocessio.run_command(
596 stdout, stderr = subprocessio.run_command(
597 cmd, env=os.environ.copy())
597 cmd, env=os.environ.copy())
598 git_revs.extend(stdout.splitlines())
598 git_revs.extend(stdout.splitlines())
599 elif push_ref['new_rev'] == empty_commit_id:
599 elif push_ref['new_rev'] == empty_commit_id:
600 # delete branch case
600 # delete branch case
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 else:
602 else:
603 if push_ref['name'] not in branches:
603 if push_ref['name'] not in branches:
604 branches.append(push_ref['name'])
604 branches.append(push_ref['name'])
605
605
606 cmd = [settings.GIT_EXECUTABLE, 'log',
606 cmd = [settings.GIT_EXECUTABLE, 'log',
607 '{old_rev}..{new_rev}'.format(**push_ref),
607 '{old_rev}..{new_rev}'.format(**push_ref),
608 '--reverse', '--pretty=format:%H']
608 '--reverse', '--pretty=format:%H']
609 stdout, stderr = subprocessio.run_command(
609 stdout, stderr = subprocessio.run_command(
610 cmd, env=os.environ.copy())
610 cmd, env=os.environ.copy())
611 git_revs.extend(stdout.splitlines())
611 git_revs.extend(stdout.splitlines())
612 elif type_ == 'tags':
612 elif type_ == 'tags':
613 if push_ref['name'] not in tags:
613 if push_ref['name'] not in tags:
614 tags.append(push_ref['name'])
614 tags.append(push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
616
616
617 extras['hook_type'] = 'post_receive'
617 extras['hook_type'] = 'post_receive'
618 extras['commit_ids'] = git_revs
618 extras['commit_ids'] = git_revs
619 extras['new_refs'] = {
619 extras['new_refs'] = {
620 'branches': branches,
620 'branches': branches,
621 'bookmarks': [],
621 'bookmarks': [],
622 'tags': tags,
622 'tags': tags,
623 }
623 }
624
624
625 if 'repo_size' in extras['hooks']:
625 if 'repo_size' in extras['hooks']:
626 try:
626 try:
627 _call_hook('repo_size', extras, GitMessageWriter())
627 _call_hook('repo_size', extras, GitMessageWriter())
628 except:
628 except:
629 pass
629 pass
630
630
631 return _call_hook('post_push', extras, GitMessageWriter())
631 return _call_hook('post_push', extras, GitMessageWriter())
632
632
633
633
634 def _get_extras_from_txn_id(path, txn_id):
634 def _get_extras_from_txn_id(path, txn_id):
635 extras = {}
635 extras = {}
636 try:
636 try:
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
638 '-t', txn_id,
638 '-t', txn_id,
639 '--revprop', path, 'rc-scm-extras']
639 '--revprop', path, 'rc-scm-extras']
640 stdout, stderr = subprocessio.run_command(
640 stdout, stderr = subprocessio.run_command(
641 cmd, env=os.environ.copy())
641 cmd, env=os.environ.copy())
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
643 except Exception:
643 except Exception:
644 log.exception('Failed to extract extras info from txn_id')
644 log.exception('Failed to extract extras info from txn_id')
645
645
646 return extras
646 return extras
647
647
648
648
649 def _get_extras_from_commit_id(commit_id, path):
649 def _get_extras_from_commit_id(commit_id, path):
650 extras = {}
650 extras = {}
651 try:
651 try:
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
653 '-r', commit_id,
653 '-r', commit_id,
654 '--revprop', path, 'rc-scm-extras']
654 '--revprop', path, 'rc-scm-extras']
655 stdout, stderr = subprocessio.run_command(
655 stdout, stderr = subprocessio.run_command(
656 cmd, env=os.environ.copy())
656 cmd, env=os.environ.copy())
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
658 except Exception:
658 except Exception:
659 log.exception('Failed to extract extras info from commit_id')
659 log.exception('Failed to extract extras info from commit_id')
660
660
661 return extras
661 return extras
662
662
663
663
664 def svn_pre_commit(repo_path, commit_data, env):
664 def svn_pre_commit(repo_path, commit_data, env):
665 path, txn_id = commit_data
665 path, txn_id = commit_data
666 branches = []
666 branches = []
667 tags = []
667 tags = []
668
668
669 if env.get('RC_SCM_DATA'):
669 if env.get('RC_SCM_DATA'):
670 extras = json.loads(env['RC_SCM_DATA'])
670 extras = json.loads(env['RC_SCM_DATA'])
671 else:
671 else:
672 # fallback method to read from TXN-ID stored data
672 # fallback method to read from TXN-ID stored data
673 extras = _get_extras_from_txn_id(path, txn_id)
673 extras = _get_extras_from_txn_id(path, txn_id)
674 if not extras:
674 if not extras:
675 return 0
675 return 0
676
676
677 extras['hook_type'] = 'pre_commit'
677 extras['hook_type'] = 'pre_commit'
678 extras['commit_ids'] = [txn_id]
678 extras['commit_ids'] = [txn_id]
679 extras['txn_id'] = txn_id
679 extras['txn_id'] = txn_id
680 extras['new_refs'] = {
680 extras['new_refs'] = {
681 'total_commits': 1,
681 'total_commits': 1,
682 'branches': branches,
682 'branches': branches,
683 'bookmarks': [],
683 'bookmarks': [],
684 'tags': tags,
684 'tags': tags,
685 }
685 }
686
686
687 return _call_hook('pre_push', extras, SvnMessageWriter())
687 return _call_hook('pre_push', extras, SvnMessageWriter())
688
688
689
689
690 def svn_post_commit(repo_path, commit_data, env):
690 def svn_post_commit(repo_path, commit_data, env):
691 """
691 """
692 commit_data is path, rev, txn_id
692 commit_data is path, rev, txn_id
693 """
693 """
694 path, commit_id, txn_id = commit_data
694 if len(commit_data) == 3:
695 path, commit_id, txn_id = commit_data
696 elif len(commit_data) == 2:
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
698 'Some functionality might be limited')
699 path, commit_id = commit_data
700 txn_id = None
701
695 branches = []
702 branches = []
696 tags = []
703 tags = []
697
704
698 if env.get('RC_SCM_DATA'):
705 if env.get('RC_SCM_DATA'):
699 extras = json.loads(env['RC_SCM_DATA'])
706 extras = json.loads(env['RC_SCM_DATA'])
700 else:
707 else:
701 # fallback method to read from TXN-ID stored data
708 # fallback method to read from TXN-ID stored data
702 extras = _get_extras_from_commit_id(commit_id, path)
709 extras = _get_extras_from_commit_id(commit_id, path)
703 if not extras:
710 if not extras:
704 return 0
711 return 0
705
712
706 extras['hook_type'] = 'post_commit'
713 extras['hook_type'] = 'post_commit'
707 extras['commit_ids'] = [commit_id]
714 extras['commit_ids'] = [commit_id]
708 extras['txn_id'] = txn_id
715 extras['txn_id'] = txn_id
709 extras['new_refs'] = {
716 extras['new_refs'] = {
710 'branches': branches,
717 'branches': branches,
711 'bookmarks': [],
718 'bookmarks': [],
712 'tags': tags,
719 'tags': tags,
713 'total_commits': 1,
720 'total_commits': 1,
714 }
721 }
715
722
716 if 'repo_size' in extras['hooks']:
723 if 'repo_size' in extras['hooks']:
717 try:
724 try:
718 _call_hook('repo_size', extras, SvnMessageWriter())
725 _call_hook('repo_size', extras, SvnMessageWriter())
719 except Exception:
726 except Exception:
720 pass
727 pass
721
728
722 return _call_hook('post_push', extras, SvnMessageWriter())
729 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,89 +1,110 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import logging
17 import logging
18 import hashlib
18 import hashlib
19
19
20 log = logging.getLogger(__name__)
20 log = logging.getLogger(__name__)
21
21
22
22
23 def safe_int(val, default=None):
23 def safe_int(val, default=None):
24 """
24 """
25 Returns int() of val if val is not convertable to int use default
25 Returns int() of val if val is not convertable to int use default
26 instead
26 instead
27
27
28 :param val:
28 :param val:
29 :param default:
29 :param default:
30 """
30 """
31
31
32 try:
32 try:
33 val = int(val)
33 val = int(val)
34 except (ValueError, TypeError):
34 except (ValueError, TypeError):
35 val = default
35 val = default
36
36
37 return val
37 return val
38
38
39
39
40 def safe_str(unicode_, to_encoding=['utf8']):
40 def safe_str(unicode_, to_encoding=None):
41 """
41 """
42 safe str function. Does few trick to turn unicode_ into string
42 safe str function. Does few trick to turn unicode_ into string
43
43
44 In case of UnicodeEncodeError, we try to return it with encoding detected
45 by chardet library if it fails fallback to string with errors replaced
46
47 :param unicode_: unicode to encode
44 :param unicode_: unicode to encode
45 :param to_encoding: encode to this type UTF8 default
48 :rtype: str
46 :rtype: str
49 :returns: str object
47 :returns: str object
50 """
48 """
51
49 to_encoding = to_encoding or ['utf8']
52 # if it's not basestr cast to str
50 # if it's not basestr cast to str
53 if not isinstance(unicode_, basestring):
51 if not isinstance(unicode_, basestring):
54 return str(unicode_)
52 return str(unicode_)
55
53
56 if isinstance(unicode_, str):
54 if isinstance(unicode_, str):
57 return unicode_
55 return unicode_
58
56
59 if not isinstance(to_encoding, (list, tuple)):
57 if not isinstance(to_encoding, (list, tuple)):
60 to_encoding = [to_encoding]
58 to_encoding = [to_encoding]
61
59
62 for enc in to_encoding:
60 for enc in to_encoding:
63 try:
61 try:
64 return unicode_.encode(enc)
62 return unicode_.encode(enc)
65 except UnicodeEncodeError:
63 except UnicodeEncodeError:
66 pass
64 pass
67
65
66 return unicode_.encode(to_encoding[0], 'replace')
67
68
69 def safe_unicode(str_, from_encoding=None):
70 """
71 safe unicode function. Does few trick to turn str_ into unicode
72
73 :param str_: string to decode
74 :param from_encoding: encode from this type UTF8 default
75 :rtype: unicode
76 :returns: unicode object
77 """
78 from_encoding = from_encoding or ['utf8']
79
80 if isinstance(str_, unicode):
81 return str_
82
83 if not isinstance(from_encoding, (list, tuple)):
84 from_encoding = [from_encoding]
85
68 try:
86 try:
69 import chardet
87 return unicode(str_)
70 encoding = chardet.detect(unicode_)['encoding']
88 except UnicodeDecodeError:
71 if encoding is None:
89 pass
72 raise UnicodeEncodeError()
73
90
74 return unicode_.encode(encoding)
91 for enc in from_encoding:
75 except (ImportError, UnicodeEncodeError):
92 try:
76 return unicode_.encode(to_encoding[0], 'replace')
93 return unicode(str_, enc)
94 except UnicodeDecodeError:
95 pass
96
97 return unicode(str_, from_encoding[0], 'replace')
77
98
78
99
79 class AttributeDict(dict):
100 class AttributeDict(dict):
80 def __getattr__(self, attr):
101 def __getattr__(self, attr):
81 return self.get(attr, None)
102 return self.get(attr, None)
82 __setattr__ = dict.__setitem__
103 __setattr__ = dict.__setitem__
83 __delattr__ = dict.__delitem__
104 __delattr__ = dict.__delitem__
84
105
85
106
86 def sha1(val):
107 def sha1(val):
87 return hashlib.sha1(val).hexdigest()
108 return hashlib.sha1(val).hexdigest()
88
109
89
110
General Comments 0
You need to be logged in to leave comments. Login now