##// END OF EJS Templates
caches: use region conf from base class
marcink -
r752:a6de52aa default
parent child Browse files
Show More
@@ -1,1085 +1,1084 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib
25 import urllib
26 import urllib2
26 import urllib2
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from dulwich import index, objects
32 from dulwich import index, objects
33 from dulwich.client import HttpGitClient, LocalGitClient
33 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.errors import (
34 from dulwich.errors import (
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 MissingCommitError, ObjectMissing, HangupException,
36 MissingCommitError, ObjectMissing, HangupException,
37 UnexpectedCommandError)
37 UnexpectedCommandError)
38 from dulwich.repo import Repo as DulwichRepo
38 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.server import update_server_info
39 from dulwich.server import update_server_info
40
40
41 from vcsserver import exceptions, settings, subprocessio
41 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver.utils import safe_str, safe_int
42 from vcsserver.utils import safe_str, safe_int
43 from vcsserver.base import RepoFactory, obfuscate_qs
43 from vcsserver.base import RepoFactory, obfuscate_qs
44 from vcsserver.hgcompat import (
44 from vcsserver.hgcompat import (
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 from vcsserver.git_lfs.lib import LFSOidStore
46 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.vcs_base import RemoteBase
47 from vcsserver.vcs_base import RemoteBase
48
48
49 DIR_STAT = stat.S_IFDIR
49 DIR_STAT = stat.S_IFDIR
50 FILE_MODE = stat.S_IFMT
50 FILE_MODE = stat.S_IFMT
51 GIT_LINK = objects.S_IFGITLINK
51 GIT_LINK = objects.S_IFGITLINK
52 PEELED_REF_MARKER = '^{}'
52 PEELED_REF_MARKER = '^{}'
53
53
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 def str_to_dulwich(value):
58 def str_to_dulwich(value):
59 """
59 """
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 """
61 """
62 return value.decode(settings.WIRE_ENCODING)
62 return value.decode(settings.WIRE_ENCODING)
63
63
64
64
65 def reraise_safe_exceptions(func):
65 def reraise_safe_exceptions(func):
66 """Converts Dulwich exceptions to something neutral."""
66 """Converts Dulwich exceptions to something neutral."""
67
67
68 @wraps(func)
68 @wraps(func)
69 def wrapper(*args, **kwargs):
69 def wrapper(*args, **kwargs):
70 try:
70 try:
71 return func(*args, **kwargs)
71 return func(*args, **kwargs)
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 exc = exceptions.LookupException(org_exc=e)
73 exc = exceptions.LookupException(org_exc=e)
74 raise exc(safe_str(e))
74 raise exc(safe_str(e))
75 except (HangupException, UnexpectedCommandError) as e:
75 except (HangupException, UnexpectedCommandError) as e:
76 exc = exceptions.VcsException(org_exc=e)
76 exc = exceptions.VcsException(org_exc=e)
77 raise exc(safe_str(e))
77 raise exc(safe_str(e))
78 except Exception as e:
78 except Exception as e:
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 # (KeyError on empty repos), we cannot track this and catch all
80 # (KeyError on empty repos), we cannot track this and catch all
81 # exceptions, it's an exceptions from other handlers
81 # exceptions, it's an exceptions from other handlers
82 #if not hasattr(e, '_vcs_kind'):
82 #if not hasattr(e, '_vcs_kind'):
83 #log.exception("Unhandled exception in git remote call")
83 #log.exception("Unhandled exception in git remote call")
84 #raise_from_original(exceptions.UnhandledException)
84 #raise_from_original(exceptions.UnhandledException)
85 raise
85 raise
86 return wrapper
86 return wrapper
87
87
88
88
89 class Repo(DulwichRepo):
89 class Repo(DulwichRepo):
90 """
90 """
91 A wrapper for dulwich Repo class.
91 A wrapper for dulwich Repo class.
92
92
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 "Too many open files" error. We need to close all opened file descriptors
94 "Too many open files" error. We need to close all opened file descriptors
95 once the repo object is destroyed.
95 once the repo object is destroyed.
96 """
96 """
97 def __del__(self):
97 def __del__(self):
98 if hasattr(self, 'object_store'):
98 if hasattr(self, 'object_store'):
99 self.close()
99 self.close()
100
100
101
101
102 class Repository(LibGit2Repo):
102 class Repository(LibGit2Repo):
103
103
104 def __enter__(self):
104 def __enter__(self):
105 return self
105 return self
106
106
107 def __exit__(self, exc_type, exc_val, exc_tb):
107 def __exit__(self, exc_type, exc_val, exc_tb):
108 self.free()
108 self.free()
109
109
110
110
111 class GitFactory(RepoFactory):
111 class GitFactory(RepoFactory):
112 repo_type = 'git'
112 repo_type = 'git'
113
113
114 def _create_repo(self, wire, create, use_libgit2=False):
114 def _create_repo(self, wire, create, use_libgit2=False):
115 if use_libgit2:
115 if use_libgit2:
116 return Repository(wire['path'])
116 return Repository(wire['path'])
117 else:
117 else:
118 repo_path = str_to_dulwich(wire['path'])
118 repo_path = str_to_dulwich(wire['path'])
119 return Repo(repo_path)
119 return Repo(repo_path)
120
120
121 def repo(self, wire, create=False, use_libgit2=False):
121 def repo(self, wire, create=False, use_libgit2=False):
122 """
122 """
123 Get a repository instance for the given path.
123 Get a repository instance for the given path.
124 """
124 """
125 return self._create_repo(wire, create, use_libgit2)
125 return self._create_repo(wire, create, use_libgit2)
126
126
127 def repo_libgit2(self, wire):
127 def repo_libgit2(self, wire):
128 return self.repo(wire, use_libgit2=True)
128 return self.repo(wire, use_libgit2=True)
129
129
130
130
131 class GitRemote(RemoteBase):
131 class GitRemote(RemoteBase):
132
132
133 def __init__(self, factory):
133 def __init__(self, factory):
134 self._factory = factory
134 self._factory = factory
135 self._bulk_methods = {
135 self._bulk_methods = {
136 "date": self.date,
136 "date": self.date,
137 "author": self.author,
137 "author": self.author,
138 "branch": self.branch,
138 "branch": self.branch,
139 "message": self.message,
139 "message": self.message,
140 "parents": self.parents,
140 "parents": self.parents,
141 "_commit": self.revision,
141 "_commit": self.revision,
142 }
142 }
143 self.region = self._factory._cache_region
144
143
145 def _wire_to_config(self, wire):
144 def _wire_to_config(self, wire):
146 if 'config' in wire:
145 if 'config' in wire:
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
148 return {}
147 return {}
149
148
150 def _remote_conf(self, config):
149 def _remote_conf(self, config):
151 params = [
150 params = [
152 '-c', 'core.askpass=""',
151 '-c', 'core.askpass=""',
153 ]
152 ]
154 ssl_cert_dir = config.get('vcs_ssl_dir')
153 ssl_cert_dir = config.get('vcs_ssl_dir')
155 if ssl_cert_dir:
154 if ssl_cert_dir:
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
157 return params
156 return params
158
157
159 @reraise_safe_exceptions
158 @reraise_safe_exceptions
160 def discover_git_version(self):
159 def discover_git_version(self):
161 stdout, _ = self.run_git_command(
160 stdout, _ = self.run_git_command(
162 {}, ['--version'], _bare=True, _safe=True)
161 {}, ['--version'], _bare=True, _safe=True)
163 prefix = 'git version'
162 prefix = 'git version'
164 if stdout.startswith(prefix):
163 if stdout.startswith(prefix):
165 stdout = stdout[len(prefix):]
164 stdout = stdout[len(prefix):]
166 return stdout.strip()
165 return stdout.strip()
167
166
168 @reraise_safe_exceptions
167 @reraise_safe_exceptions
169 def is_empty(self, wire):
168 def is_empty(self, wire):
170 repo_init = self._factory.repo_libgit2(wire)
169 repo_init = self._factory.repo_libgit2(wire)
171 with repo_init as repo:
170 with repo_init as repo:
172
171
173 try:
172 try:
174 has_head = repo.head.name
173 has_head = repo.head.name
175 if has_head:
174 if has_head:
176 return False
175 return False
177
176
178 # NOTE(marcink): check again using more expensive method
177 # NOTE(marcink): check again using more expensive method
179 return repo.is_empty
178 return repo.is_empty
180 except Exception:
179 except Exception:
181 pass
180 pass
182
181
183 return True
182 return True
184
183
185 @reraise_safe_exceptions
184 @reraise_safe_exceptions
186 def assert_correct_path(self, wire):
185 def assert_correct_path(self, wire):
187 cache_on, context_uid, repo_id = self._cache_on(wire)
186 cache_on, context_uid, repo_id = self._cache_on(wire)
188 @self.region.conditional_cache_on_arguments(condition=cache_on)
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
189 def _assert_correct_path(_context_uid, _repo_id):
188 def _assert_correct_path(_context_uid, _repo_id):
190 try:
189 try:
191 repo_init = self._factory.repo_libgit2(wire)
190 repo_init = self._factory.repo_libgit2(wire)
192 with repo_init as repo:
191 with repo_init as repo:
193 pass
192 pass
194 except pygit2.GitError:
193 except pygit2.GitError:
195 path = wire.get('path')
194 path = wire.get('path')
196 tb = traceback.format_exc()
195 tb = traceback.format_exc()
197 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
198 return False
197 return False
199
198
200 return True
199 return True
201 return _assert_correct_path(context_uid, repo_id)
200 return _assert_correct_path(context_uid, repo_id)
202
201
203 @reraise_safe_exceptions
202 @reraise_safe_exceptions
204 def bare(self, wire):
203 def bare(self, wire):
205 repo_init = self._factory.repo_libgit2(wire)
204 repo_init = self._factory.repo_libgit2(wire)
206 with repo_init as repo:
205 with repo_init as repo:
207 return repo.is_bare
206 return repo.is_bare
208
207
209 @reraise_safe_exceptions
208 @reraise_safe_exceptions
210 def blob_as_pretty_string(self, wire, sha):
209 def blob_as_pretty_string(self, wire, sha):
211 repo_init = self._factory.repo_libgit2(wire)
210 repo_init = self._factory.repo_libgit2(wire)
212 with repo_init as repo:
211 with repo_init as repo:
213 blob_obj = repo[sha]
212 blob_obj = repo[sha]
214 blob = blob_obj.data
213 blob = blob_obj.data
215 return blob
214 return blob
216
215
217 @reraise_safe_exceptions
216 @reraise_safe_exceptions
218 def blob_raw_length(self, wire, sha):
217 def blob_raw_length(self, wire, sha):
219 cache_on, context_uid, repo_id = self._cache_on(wire)
218 cache_on, context_uid, repo_id = self._cache_on(wire)
220 @self.region.conditional_cache_on_arguments(condition=cache_on)
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
221 def _blob_raw_length(_repo_id, _sha):
220 def _blob_raw_length(_repo_id, _sha):
222
221
223 repo_init = self._factory.repo_libgit2(wire)
222 repo_init = self._factory.repo_libgit2(wire)
224 with repo_init as repo:
223 with repo_init as repo:
225 blob = repo[sha]
224 blob = repo[sha]
226 return blob.size
225 return blob.size
227
226
228 return _blob_raw_length(repo_id, sha)
227 return _blob_raw_length(repo_id, sha)
229
228
230 def _parse_lfs_pointer(self, raw_content):
229 def _parse_lfs_pointer(self, raw_content):
231
230
232 spec_string = 'version https://git-lfs.github.com/spec'
231 spec_string = 'version https://git-lfs.github.com/spec'
233 if raw_content and raw_content.startswith(spec_string):
232 if raw_content and raw_content.startswith(spec_string):
234 pattern = re.compile(r"""
233 pattern = re.compile(r"""
235 (?:\n)?
234 (?:\n)?
236 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
237 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
238 ^size[ ](?P<oid_size>[0-9]+)\n
237 ^size[ ](?P<oid_size>[0-9]+)\n
239 (?:\n)?
238 (?:\n)?
240 """, re.VERBOSE | re.MULTILINE)
239 """, re.VERBOSE | re.MULTILINE)
241 match = pattern.match(raw_content)
240 match = pattern.match(raw_content)
242 if match:
241 if match:
243 return match.groupdict()
242 return match.groupdict()
244
243
245 return {}
244 return {}
246
245
247 @reraise_safe_exceptions
246 @reraise_safe_exceptions
248 def is_large_file(self, wire, commit_id):
247 def is_large_file(self, wire, commit_id):
249
248
250 cache_on, context_uid, repo_id = self._cache_on(wire)
249 cache_on, context_uid, repo_id = self._cache_on(wire)
251 @self.region.conditional_cache_on_arguments(condition=cache_on)
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
252 def _is_large_file(_repo_id, _sha):
251 def _is_large_file(_repo_id, _sha):
253 repo_init = self._factory.repo_libgit2(wire)
252 repo_init = self._factory.repo_libgit2(wire)
254 with repo_init as repo:
253 with repo_init as repo:
255 blob = repo[commit_id]
254 blob = repo[commit_id]
256 if blob.is_binary:
255 if blob.is_binary:
257 return {}
256 return {}
258
257
259 return self._parse_lfs_pointer(blob.data)
258 return self._parse_lfs_pointer(blob.data)
260
259
261 return _is_large_file(repo_id, commit_id)
260 return _is_large_file(repo_id, commit_id)
262
261
263 @reraise_safe_exceptions
262 @reraise_safe_exceptions
264 def in_largefiles_store(self, wire, oid):
263 def in_largefiles_store(self, wire, oid):
265 conf = self._wire_to_config(wire)
264 conf = self._wire_to_config(wire)
266 repo_init = self._factory.repo_libgit2(wire)
265 repo_init = self._factory.repo_libgit2(wire)
267 with repo_init as repo:
266 with repo_init as repo:
268 repo_name = repo.path
267 repo_name = repo.path
269
268
270 store_location = conf.get('vcs_git_lfs_store_location')
269 store_location = conf.get('vcs_git_lfs_store_location')
271 if store_location:
270 if store_location:
272
271
273 store = LFSOidStore(
272 store = LFSOidStore(
274 oid=oid, repo=repo_name, store_location=store_location)
273 oid=oid, repo=repo_name, store_location=store_location)
275 return store.has_oid()
274 return store.has_oid()
276
275
277 return False
276 return False
278
277
279 @reraise_safe_exceptions
278 @reraise_safe_exceptions
280 def store_path(self, wire, oid):
279 def store_path(self, wire, oid):
281 conf = self._wire_to_config(wire)
280 conf = self._wire_to_config(wire)
282 repo_init = self._factory.repo_libgit2(wire)
281 repo_init = self._factory.repo_libgit2(wire)
283 with repo_init as repo:
282 with repo_init as repo:
284 repo_name = repo.path
283 repo_name = repo.path
285
284
286 store_location = conf.get('vcs_git_lfs_store_location')
285 store_location = conf.get('vcs_git_lfs_store_location')
287 if store_location:
286 if store_location:
288 store = LFSOidStore(
287 store = LFSOidStore(
289 oid=oid, repo=repo_name, store_location=store_location)
288 oid=oid, repo=repo_name, store_location=store_location)
290 return store.oid_path
289 return store.oid_path
291 raise ValueError('Unable to fetch oid with path {}'.format(oid))
290 raise ValueError('Unable to fetch oid with path {}'.format(oid))
292
291
293 @reraise_safe_exceptions
292 @reraise_safe_exceptions
294 def bulk_request(self, wire, rev, pre_load):
293 def bulk_request(self, wire, rev, pre_load):
295 cache_on, context_uid, repo_id = self._cache_on(wire)
294 cache_on, context_uid, repo_id = self._cache_on(wire)
296 @self.region.conditional_cache_on_arguments(condition=cache_on)
295 @self.region.conditional_cache_on_arguments(condition=cache_on)
297 def _bulk_request(_repo_id, _rev, _pre_load):
296 def _bulk_request(_repo_id, _rev, _pre_load):
298 result = {}
297 result = {}
299 for attr in pre_load:
298 for attr in pre_load:
300 try:
299 try:
301 method = self._bulk_methods[attr]
300 method = self._bulk_methods[attr]
302 args = [wire, rev]
301 args = [wire, rev]
303 result[attr] = method(*args)
302 result[attr] = method(*args)
304 except KeyError as e:
303 except KeyError as e:
305 raise exceptions.VcsException(e)(
304 raise exceptions.VcsException(e)(
306 "Unknown bulk attribute: %s" % attr)
305 "Unknown bulk attribute: %s" % attr)
307 return result
306 return result
308
307
309 return _bulk_request(repo_id, rev, sorted(pre_load))
308 return _bulk_request(repo_id, rev, sorted(pre_load))
310
309
311 def _build_opener(self, url):
310 def _build_opener(self, url):
312 handlers = []
311 handlers = []
313 url_obj = url_parser(url)
312 url_obj = url_parser(url)
314 _, authinfo = url_obj.authinfo()
313 _, authinfo = url_obj.authinfo()
315
314
316 if authinfo:
315 if authinfo:
317 # create a password manager
316 # create a password manager
318 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
317 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
319 passmgr.add_password(*authinfo)
318 passmgr.add_password(*authinfo)
320
319
321 handlers.extend((httpbasicauthhandler(passmgr),
320 handlers.extend((httpbasicauthhandler(passmgr),
322 httpdigestauthhandler(passmgr)))
321 httpdigestauthhandler(passmgr)))
323
322
324 return urllib2.build_opener(*handlers)
323 return urllib2.build_opener(*handlers)
325
324
326 def _type_id_to_name(self, type_id):
325 def _type_id_to_name(self, type_id):
327 return {
326 return {
328 1: b'commit',
327 1: b'commit',
329 2: b'tree',
328 2: b'tree',
330 3: b'blob',
329 3: b'blob',
331 4: b'tag'
330 4: b'tag'
332 }[type_id]
331 }[type_id]
333
332
334 @reraise_safe_exceptions
333 @reraise_safe_exceptions
335 def check_url(self, url, config):
334 def check_url(self, url, config):
336 url_obj = url_parser(url)
335 url_obj = url_parser(url)
337 test_uri, _ = url_obj.authinfo()
336 test_uri, _ = url_obj.authinfo()
338 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
337 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
339 url_obj.query = obfuscate_qs(url_obj.query)
338 url_obj.query = obfuscate_qs(url_obj.query)
340 cleaned_uri = str(url_obj)
339 cleaned_uri = str(url_obj)
341 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
340 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
342
341
343 if not test_uri.endswith('info/refs'):
342 if not test_uri.endswith('info/refs'):
344 test_uri = test_uri.rstrip('/') + '/info/refs'
343 test_uri = test_uri.rstrip('/') + '/info/refs'
345
344
346 o = self._build_opener(url)
345 o = self._build_opener(url)
347 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
346 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
348
347
349 q = {"service": 'git-upload-pack'}
348 q = {"service": 'git-upload-pack'}
350 qs = '?%s' % urllib.urlencode(q)
349 qs = '?%s' % urllib.urlencode(q)
351 cu = "%s%s" % (test_uri, qs)
350 cu = "%s%s" % (test_uri, qs)
352 req = urllib2.Request(cu, None, {})
351 req = urllib2.Request(cu, None, {})
353
352
354 try:
353 try:
355 log.debug("Trying to open URL %s", cleaned_uri)
354 log.debug("Trying to open URL %s", cleaned_uri)
356 resp = o.open(req)
355 resp = o.open(req)
357 if resp.code != 200:
356 if resp.code != 200:
358 raise exceptions.URLError()('Return Code is not 200')
357 raise exceptions.URLError()('Return Code is not 200')
359 except Exception as e:
358 except Exception as e:
360 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
359 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
361 # means it cannot be cloned
360 # means it cannot be cloned
362 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
361 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
363
362
364 # now detect if it's proper git repo
363 # now detect if it's proper git repo
365 gitdata = resp.read()
364 gitdata = resp.read()
366 if 'service=git-upload-pack' in gitdata:
365 if 'service=git-upload-pack' in gitdata:
367 pass
366 pass
368 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
367 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
369 # old style git can return some other format !
368 # old style git can return some other format !
370 pass
369 pass
371 else:
370 else:
372 raise exceptions.URLError()(
371 raise exceptions.URLError()(
373 "url [%s] does not look like an git" % (cleaned_uri,))
372 "url [%s] does not look like an git" % (cleaned_uri,))
374
373
375 return True
374 return True
376
375
377 @reraise_safe_exceptions
376 @reraise_safe_exceptions
378 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
377 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
379 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
378 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
380 remote_refs = self.pull(wire, url, apply_refs=False)
379 remote_refs = self.pull(wire, url, apply_refs=False)
381 repo = self._factory.repo(wire)
380 repo = self._factory.repo(wire)
382 if isinstance(valid_refs, list):
381 if isinstance(valid_refs, list):
383 valid_refs = tuple(valid_refs)
382 valid_refs = tuple(valid_refs)
384
383
385 for k in remote_refs:
384 for k in remote_refs:
386 # only parse heads/tags and skip so called deferred tags
385 # only parse heads/tags and skip so called deferred tags
387 if k.startswith(valid_refs) and not k.endswith(deferred):
386 if k.startswith(valid_refs) and not k.endswith(deferred):
388 repo[k] = remote_refs[k]
387 repo[k] = remote_refs[k]
389
388
390 if update_after_clone:
389 if update_after_clone:
391 # we want to checkout HEAD
390 # we want to checkout HEAD
392 repo["HEAD"] = remote_refs["HEAD"]
391 repo["HEAD"] = remote_refs["HEAD"]
393 index.build_index_from_tree(repo.path, repo.index_path(),
392 index.build_index_from_tree(repo.path, repo.index_path(),
394 repo.object_store, repo["HEAD"].tree)
393 repo.object_store, repo["HEAD"].tree)
395
394
396 @reraise_safe_exceptions
395 @reraise_safe_exceptions
397 def branch(self, wire, commit_id):
396 def branch(self, wire, commit_id):
398 cache_on, context_uid, repo_id = self._cache_on(wire)
397 cache_on, context_uid, repo_id = self._cache_on(wire)
399 @self.region.conditional_cache_on_arguments(condition=cache_on)
398 @self.region.conditional_cache_on_arguments(condition=cache_on)
400 def _branch(_context_uid, _repo_id, _commit_id):
399 def _branch(_context_uid, _repo_id, _commit_id):
401 regex = re.compile('^refs/heads')
400 regex = re.compile('^refs/heads')
402
401
403 def filter_with(ref):
402 def filter_with(ref):
404 return regex.match(ref[0]) and ref[1] == _commit_id
403 return regex.match(ref[0]) and ref[1] == _commit_id
405
404
406 branches = filter(filter_with, self.get_refs(wire).items())
405 branches = filter(filter_with, self.get_refs(wire).items())
407 return [x[0].split('refs/heads/')[-1] for x in branches]
406 return [x[0].split('refs/heads/')[-1] for x in branches]
408
407
409 return _branch(context_uid, repo_id, commit_id)
408 return _branch(context_uid, repo_id, commit_id)
410
409
411 @reraise_safe_exceptions
410 @reraise_safe_exceptions
412 def commit_branches(self, wire, commit_id):
411 def commit_branches(self, wire, commit_id):
413 cache_on, context_uid, repo_id = self._cache_on(wire)
412 cache_on, context_uid, repo_id = self._cache_on(wire)
414 @self.region.conditional_cache_on_arguments(condition=cache_on)
413 @self.region.conditional_cache_on_arguments(condition=cache_on)
415 def _commit_branches(_context_uid, _repo_id, _commit_id):
414 def _commit_branches(_context_uid, _repo_id, _commit_id):
416 repo_init = self._factory.repo_libgit2(wire)
415 repo_init = self._factory.repo_libgit2(wire)
417 with repo_init as repo:
416 with repo_init as repo:
418 branches = [x for x in repo.branches.with_commit(_commit_id)]
417 branches = [x for x in repo.branches.with_commit(_commit_id)]
419 return branches
418 return branches
420
419
421 return _commit_branches(context_uid, repo_id, commit_id)
420 return _commit_branches(context_uid, repo_id, commit_id)
422
421
423 @reraise_safe_exceptions
422 @reraise_safe_exceptions
424 def add_object(self, wire, content):
423 def add_object(self, wire, content):
425 repo_init = self._factory.repo_libgit2(wire)
424 repo_init = self._factory.repo_libgit2(wire)
426 with repo_init as repo:
425 with repo_init as repo:
427 blob = objects.Blob()
426 blob = objects.Blob()
428 blob.set_raw_string(content)
427 blob.set_raw_string(content)
429 repo.object_store.add_object(blob)
428 repo.object_store.add_object(blob)
430 return blob.id
429 return blob.id
431
430
432 # TODO: this is quite complex, check if that can be simplified
431 # TODO: this is quite complex, check if that can be simplified
433 @reraise_safe_exceptions
432 @reraise_safe_exceptions
434 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
433 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
435 repo = self._factory.repo(wire)
434 repo = self._factory.repo(wire)
436 object_store = repo.object_store
435 object_store = repo.object_store
437
436
438 # Create tree and populates it with blobs
437 # Create tree and populates it with blobs
439 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
438 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
440
439
441 for node in updated:
440 for node in updated:
442 # Compute subdirs if needed
441 # Compute subdirs if needed
443 dirpath, nodename = vcspath.split(node['path'])
442 dirpath, nodename = vcspath.split(node['path'])
444 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
443 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
445 parent = commit_tree
444 parent = commit_tree
446 ancestors = [('', parent)]
445 ancestors = [('', parent)]
447
446
448 # Tries to dig for the deepest existing tree
447 # Tries to dig for the deepest existing tree
449 while dirnames:
448 while dirnames:
450 curdir = dirnames.pop(0)
449 curdir = dirnames.pop(0)
451 try:
450 try:
452 dir_id = parent[curdir][1]
451 dir_id = parent[curdir][1]
453 except KeyError:
452 except KeyError:
454 # put curdir back into dirnames and stops
453 # put curdir back into dirnames and stops
455 dirnames.insert(0, curdir)
454 dirnames.insert(0, curdir)
456 break
455 break
457 else:
456 else:
458 # If found, updates parent
457 # If found, updates parent
459 parent = repo[dir_id]
458 parent = repo[dir_id]
460 ancestors.append((curdir, parent))
459 ancestors.append((curdir, parent))
461 # Now parent is deepest existing tree and we need to create
460 # Now parent is deepest existing tree and we need to create
462 # subtrees for dirnames (in reverse order)
461 # subtrees for dirnames (in reverse order)
463 # [this only applies for nodes from added]
462 # [this only applies for nodes from added]
464 new_trees = []
463 new_trees = []
465
464
466 blob = objects.Blob.from_string(node['content'])
465 blob = objects.Blob.from_string(node['content'])
467
466
468 if dirnames:
467 if dirnames:
469 # If there are trees which should be created we need to build
468 # If there are trees which should be created we need to build
470 # them now (in reverse order)
469 # them now (in reverse order)
471 reversed_dirnames = list(reversed(dirnames))
470 reversed_dirnames = list(reversed(dirnames))
472 curtree = objects.Tree()
471 curtree = objects.Tree()
473 curtree[node['node_path']] = node['mode'], blob.id
472 curtree[node['node_path']] = node['mode'], blob.id
474 new_trees.append(curtree)
473 new_trees.append(curtree)
475 for dirname in reversed_dirnames[:-1]:
474 for dirname in reversed_dirnames[:-1]:
476 newtree = objects.Tree()
475 newtree = objects.Tree()
477 newtree[dirname] = (DIR_STAT, curtree.id)
476 newtree[dirname] = (DIR_STAT, curtree.id)
478 new_trees.append(newtree)
477 new_trees.append(newtree)
479 curtree = newtree
478 curtree = newtree
480 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
479 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
481 else:
480 else:
482 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
481 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
483
482
484 new_trees.append(parent)
483 new_trees.append(parent)
485 # Update ancestors
484 # Update ancestors
486 reversed_ancestors = reversed(
485 reversed_ancestors = reversed(
487 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
486 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
488 for parent, tree, path in reversed_ancestors:
487 for parent, tree, path in reversed_ancestors:
489 parent[path] = (DIR_STAT, tree.id)
488 parent[path] = (DIR_STAT, tree.id)
490 object_store.add_object(tree)
489 object_store.add_object(tree)
491
490
492 object_store.add_object(blob)
491 object_store.add_object(blob)
493 for tree in new_trees:
492 for tree in new_trees:
494 object_store.add_object(tree)
493 object_store.add_object(tree)
495
494
496 for node_path in removed:
495 for node_path in removed:
497 paths = node_path.split('/')
496 paths = node_path.split('/')
498 tree = commit_tree
497 tree = commit_tree
499 trees = [tree]
498 trees = [tree]
500 # Traverse deep into the forest...
499 # Traverse deep into the forest...
501 for path in paths:
500 for path in paths:
502 try:
501 try:
503 obj = repo[tree[path][1]]
502 obj = repo[tree[path][1]]
504 if isinstance(obj, objects.Tree):
503 if isinstance(obj, objects.Tree):
505 trees.append(obj)
504 trees.append(obj)
506 tree = obj
505 tree = obj
507 except KeyError:
506 except KeyError:
508 break
507 break
509 # Cut down the blob and all rotten trees on the way back...
508 # Cut down the blob and all rotten trees on the way back...
510 for path, tree in reversed(zip(paths, trees)):
509 for path, tree in reversed(zip(paths, trees)):
511 del tree[path]
510 del tree[path]
512 if tree:
511 if tree:
513 # This tree still has elements - don't remove it or any
512 # This tree still has elements - don't remove it or any
514 # of it's parents
513 # of it's parents
515 break
514 break
516
515
517 object_store.add_object(commit_tree)
516 object_store.add_object(commit_tree)
518
517
519 # Create commit
518 # Create commit
520 commit = objects.Commit()
519 commit = objects.Commit()
521 commit.tree = commit_tree.id
520 commit.tree = commit_tree.id
522 for k, v in commit_data.iteritems():
521 for k, v in commit_data.iteritems():
523 setattr(commit, k, v)
522 setattr(commit, k, v)
524 object_store.add_object(commit)
523 object_store.add_object(commit)
525
524
526 self.create_branch(wire, branch, commit.id)
525 self.create_branch(wire, branch, commit.id)
527
526
528 # dulwich set-ref
527 # dulwich set-ref
529 ref = 'refs/heads/%s' % branch
528 ref = 'refs/heads/%s' % branch
530 repo.refs[ref] = commit.id
529 repo.refs[ref] = commit.id
531
530
532 return commit.id
531 return commit.id
533
532
534 @reraise_safe_exceptions
533 @reraise_safe_exceptions
535 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
534 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
536 if url != 'default' and '://' not in url:
535 if url != 'default' and '://' not in url:
537 client = LocalGitClient(url)
536 client = LocalGitClient(url)
538 else:
537 else:
539 url_obj = url_parser(url)
538 url_obj = url_parser(url)
540 o = self._build_opener(url)
539 o = self._build_opener(url)
541 url, _ = url_obj.authinfo()
540 url, _ = url_obj.authinfo()
542 client = HttpGitClient(base_url=url, opener=o)
541 client = HttpGitClient(base_url=url, opener=o)
543 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
544
543
545 determine_wants = repo.object_store.determine_wants_all
544 determine_wants = repo.object_store.determine_wants_all
546 if refs:
545 if refs:
547 def determine_wants_requested(references):
546 def determine_wants_requested(references):
548 return [references[r] for r in references if r in refs]
547 return [references[r] for r in references if r in refs]
549 determine_wants = determine_wants_requested
548 determine_wants = determine_wants_requested
550
549
551 try:
550 try:
552 remote_refs = client.fetch(
551 remote_refs = client.fetch(
553 path=url, target=repo, determine_wants=determine_wants)
552 path=url, target=repo, determine_wants=determine_wants)
554 except NotGitRepository as e:
553 except NotGitRepository as e:
555 log.warning(
554 log.warning(
556 'Trying to fetch from "%s" failed, not a Git repository.', url)
555 'Trying to fetch from "%s" failed, not a Git repository.', url)
557 # Exception can contain unicode which we convert
556 # Exception can contain unicode which we convert
558 raise exceptions.AbortException(e)(repr(e))
557 raise exceptions.AbortException(e)(repr(e))
559
558
560 # mikhail: client.fetch() returns all the remote refs, but fetches only
559 # mikhail: client.fetch() returns all the remote refs, but fetches only
561 # refs filtered by `determine_wants` function. We need to filter result
560 # refs filtered by `determine_wants` function. We need to filter result
562 # as well
561 # as well
563 if refs:
562 if refs:
564 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
563 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
565
564
566 if apply_refs:
565 if apply_refs:
567 # TODO: johbo: Needs proper test coverage with a git repository
566 # TODO: johbo: Needs proper test coverage with a git repository
568 # that contains a tag object, so that we would end up with
567 # that contains a tag object, so that we would end up with
569 # a peeled ref at this point.
568 # a peeled ref at this point.
570 for k in remote_refs:
569 for k in remote_refs:
571 if k.endswith(PEELED_REF_MARKER):
570 if k.endswith(PEELED_REF_MARKER):
572 log.debug("Skipping peeled reference %s", k)
571 log.debug("Skipping peeled reference %s", k)
573 continue
572 continue
574 repo[k] = remote_refs[k]
573 repo[k] = remote_refs[k]
575
574
576 if refs and not update_after:
575 if refs and not update_after:
577 # mikhail: explicitly set the head to the last ref.
576 # mikhail: explicitly set the head to the last ref.
578 repo['HEAD'] = remote_refs[refs[-1]]
577 repo['HEAD'] = remote_refs[refs[-1]]
579
578
580 if update_after:
579 if update_after:
581 # we want to checkout HEAD
580 # we want to checkout HEAD
582 repo["HEAD"] = remote_refs["HEAD"]
581 repo["HEAD"] = remote_refs["HEAD"]
583 index.build_index_from_tree(repo.path, repo.index_path(),
582 index.build_index_from_tree(repo.path, repo.index_path(),
584 repo.object_store, repo["HEAD"].tree)
583 repo.object_store, repo["HEAD"].tree)
585 return remote_refs
584 return remote_refs
586
585
587 @reraise_safe_exceptions
586 @reraise_safe_exceptions
588 def sync_fetch(self, wire, url, refs=None):
587 def sync_fetch(self, wire, url, refs=None):
589 repo = self._factory.repo(wire)
588 repo = self._factory.repo(wire)
590 if refs and not isinstance(refs, (list, tuple)):
589 if refs and not isinstance(refs, (list, tuple)):
591 refs = [refs]
590 refs = [refs]
592 config = self._wire_to_config(wire)
591 config = self._wire_to_config(wire)
593 # get all remote refs we'll use to fetch later
592 # get all remote refs we'll use to fetch later
594 output, __ = self.run_git_command(
593 output, __ = self.run_git_command(
595 wire, ['ls-remote', url], fail_on_stderr=False,
594 wire, ['ls-remote', url], fail_on_stderr=False,
596 _copts=self._remote_conf(config),
595 _copts=self._remote_conf(config),
597 extra_env={'GIT_TERMINAL_PROMPT': '0'})
596 extra_env={'GIT_TERMINAL_PROMPT': '0'})
598
597
599 remote_refs = collections.OrderedDict()
598 remote_refs = collections.OrderedDict()
600 fetch_refs = []
599 fetch_refs = []
601
600
602 for ref_line in output.splitlines():
601 for ref_line in output.splitlines():
603 sha, ref = ref_line.split('\t')
602 sha, ref = ref_line.split('\t')
604 sha = sha.strip()
603 sha = sha.strip()
605 if ref in remote_refs:
604 if ref in remote_refs:
606 # duplicate, skip
605 # duplicate, skip
607 continue
606 continue
608 if ref.endswith(PEELED_REF_MARKER):
607 if ref.endswith(PEELED_REF_MARKER):
609 log.debug("Skipping peeled reference %s", ref)
608 log.debug("Skipping peeled reference %s", ref)
610 continue
609 continue
611 # don't sync HEAD
610 # don't sync HEAD
612 if ref in ['HEAD']:
611 if ref in ['HEAD']:
613 continue
612 continue
614
613
615 remote_refs[ref] = sha
614 remote_refs[ref] = sha
616
615
617 if refs and sha in refs:
616 if refs and sha in refs:
618 # we filter fetch using our specified refs
617 # we filter fetch using our specified refs
619 fetch_refs.append('{}:{}'.format(ref, ref))
618 fetch_refs.append('{}:{}'.format(ref, ref))
620 elif not refs:
619 elif not refs:
621 fetch_refs.append('{}:{}'.format(ref, ref))
620 fetch_refs.append('{}:{}'.format(ref, ref))
622 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
621 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
623 if fetch_refs:
622 if fetch_refs:
624 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
623 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
625 fetch_refs_chunks = list(chunk)
624 fetch_refs_chunks = list(chunk)
626 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
625 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
627 _out, _err = self.run_git_command(
626 _out, _err = self.run_git_command(
628 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
627 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
629 fail_on_stderr=False,
628 fail_on_stderr=False,
630 _copts=self._remote_conf(config),
629 _copts=self._remote_conf(config),
631 extra_env={'GIT_TERMINAL_PROMPT': '0'})
630 extra_env={'GIT_TERMINAL_PROMPT': '0'})
632
631
633 return remote_refs
632 return remote_refs
634
633
635 @reraise_safe_exceptions
634 @reraise_safe_exceptions
636 def sync_push(self, wire, url, refs=None):
635 def sync_push(self, wire, url, refs=None):
637 if not self.check_url(url, wire):
636 if not self.check_url(url, wire):
638 return
637 return
639 config = self._wire_to_config(wire)
638 config = self._wire_to_config(wire)
640 self._factory.repo(wire)
639 self._factory.repo(wire)
641 self.run_git_command(
640 self.run_git_command(
642 wire, ['push', url, '--mirror'], fail_on_stderr=False,
641 wire, ['push', url, '--mirror'], fail_on_stderr=False,
643 _copts=self._remote_conf(config),
642 _copts=self._remote_conf(config),
644 extra_env={'GIT_TERMINAL_PROMPT': '0'})
643 extra_env={'GIT_TERMINAL_PROMPT': '0'})
645
644
646 @reraise_safe_exceptions
645 @reraise_safe_exceptions
647 def get_remote_refs(self, wire, url):
646 def get_remote_refs(self, wire, url):
648 repo = Repo(url)
647 repo = Repo(url)
649 return repo.get_refs()
648 return repo.get_refs()
650
649
651 @reraise_safe_exceptions
650 @reraise_safe_exceptions
652 def get_description(self, wire):
651 def get_description(self, wire):
653 repo = self._factory.repo(wire)
652 repo = self._factory.repo(wire)
654 return repo.get_description()
653 return repo.get_description()
655
654
656 @reraise_safe_exceptions
655 @reraise_safe_exceptions
657 def get_missing_revs(self, wire, rev1, rev2, path2):
656 def get_missing_revs(self, wire, rev1, rev2, path2):
658 repo = self._factory.repo(wire)
657 repo = self._factory.repo(wire)
659 LocalGitClient(thin_packs=False).fetch(path2, repo)
658 LocalGitClient(thin_packs=False).fetch(path2, repo)
660
659
661 wire_remote = wire.copy()
660 wire_remote = wire.copy()
662 wire_remote['path'] = path2
661 wire_remote['path'] = path2
663 repo_remote = self._factory.repo(wire_remote)
662 repo_remote = self._factory.repo(wire_remote)
664 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
663 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
665
664
666 revs = [
665 revs = [
667 x.commit.id
666 x.commit.id
668 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
667 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
669 return revs
668 return revs
670
669
671 @reraise_safe_exceptions
670 @reraise_safe_exceptions
672 def get_object(self, wire, sha):
671 def get_object(self, wire, sha):
673 cache_on, context_uid, repo_id = self._cache_on(wire)
672 cache_on, context_uid, repo_id = self._cache_on(wire)
674 @self.region.conditional_cache_on_arguments(condition=cache_on)
673 @self.region.conditional_cache_on_arguments(condition=cache_on)
675 def _get_object(_context_uid, _repo_id, _sha):
674 def _get_object(_context_uid, _repo_id, _sha):
676 repo_init = self._factory.repo_libgit2(wire)
675 repo_init = self._factory.repo_libgit2(wire)
677 with repo_init as repo:
676 with repo_init as repo:
678
677
679 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
678 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
680 try:
679 try:
681 commit = repo.revparse_single(sha)
680 commit = repo.revparse_single(sha)
682 except (KeyError, ValueError) as e:
681 except (KeyError, ValueError) as e:
683 raise exceptions.LookupException(e)(missing_commit_err)
682 raise exceptions.LookupException(e)(missing_commit_err)
684
683
685 is_tag = False
684 is_tag = False
686 if isinstance(commit, pygit2.Tag):
685 if isinstance(commit, pygit2.Tag):
687 commit = repo.get(commit.target)
686 commit = repo.get(commit.target)
688 is_tag = True
687 is_tag = True
689
688
690 if not is_tag:
689 if not is_tag:
691 # check for dangling commit
690 # check for dangling commit
692 branches = [x for x in repo.branches.with_commit(commit.hex)]
691 branches = [x for x in repo.branches.with_commit(commit.hex)]
693 if not branches:
692 if not branches:
694 raise exceptions.LookupException(None)(missing_commit_err)
693 raise exceptions.LookupException(None)(missing_commit_err)
695
694
696 commit_id = commit.hex
695 commit_id = commit.hex
697 type_id = commit.type
696 type_id = commit.type
698
697
699 return {
698 return {
700 'id': commit_id,
699 'id': commit_id,
701 'type': self._type_id_to_name(type_id),
700 'type': self._type_id_to_name(type_id),
702 'commit_id': commit_id,
701 'commit_id': commit_id,
703 'idx': 0
702 'idx': 0
704 }
703 }
705
704
706 return _get_object(context_uid, repo_id, sha)
705 return _get_object(context_uid, repo_id, sha)
707
706
708 @reraise_safe_exceptions
707 @reraise_safe_exceptions
709 def get_refs(self, wire):
708 def get_refs(self, wire):
710 cache_on, context_uid, repo_id = self._cache_on(wire)
709 cache_on, context_uid, repo_id = self._cache_on(wire)
711 @self.region.conditional_cache_on_arguments(condition=cache_on)
710 @self.region.conditional_cache_on_arguments(condition=cache_on)
712 def _get_refs(_context_uid, _repo_id):
711 def _get_refs(_context_uid, _repo_id):
713
712
714 repo_init = self._factory.repo_libgit2(wire)
713 repo_init = self._factory.repo_libgit2(wire)
715 with repo_init as repo:
714 with repo_init as repo:
716 regex = re.compile('^refs/(heads|tags)/')
715 regex = re.compile('^refs/(heads|tags)/')
717 return {x.name: x.target.hex for x in
716 return {x.name: x.target.hex for x in
718 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
717 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
719
718
720 return _get_refs(context_uid, repo_id)
719 return _get_refs(context_uid, repo_id)
721
720
722 @reraise_safe_exceptions
721 @reraise_safe_exceptions
723 def get_branch_pointers(self, wire):
722 def get_branch_pointers(self, wire):
724 cache_on, context_uid, repo_id = self._cache_on(wire)
723 cache_on, context_uid, repo_id = self._cache_on(wire)
725 @self.region.conditional_cache_on_arguments(condition=cache_on)
724 @self.region.conditional_cache_on_arguments(condition=cache_on)
726 def _get_branch_pointers(_context_uid, _repo_id):
725 def _get_branch_pointers(_context_uid, _repo_id):
727
726
728 repo_init = self._factory.repo_libgit2(wire)
727 repo_init = self._factory.repo_libgit2(wire)
729 regex = re.compile('^refs/heads')
728 regex = re.compile('^refs/heads')
730 with repo_init as repo:
729 with repo_init as repo:
731 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
730 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
732 return {x.target.hex: x.shorthand for x in branches}
731 return {x.target.hex: x.shorthand for x in branches}
733
732
734 return _get_branch_pointers(context_uid, repo_id)
733 return _get_branch_pointers(context_uid, repo_id)
735
734
736 @reraise_safe_exceptions
735 @reraise_safe_exceptions
737 def head(self, wire, show_exc=True):
736 def head(self, wire, show_exc=True):
738 cache_on, context_uid, repo_id = self._cache_on(wire)
737 cache_on, context_uid, repo_id = self._cache_on(wire)
739 @self.region.conditional_cache_on_arguments(condition=cache_on)
738 @self.region.conditional_cache_on_arguments(condition=cache_on)
740 def _head(_context_uid, _repo_id, _show_exc):
739 def _head(_context_uid, _repo_id, _show_exc):
741 repo_init = self._factory.repo_libgit2(wire)
740 repo_init = self._factory.repo_libgit2(wire)
742 with repo_init as repo:
741 with repo_init as repo:
743 try:
742 try:
744 return repo.head.peel().hex
743 return repo.head.peel().hex
745 except Exception:
744 except Exception:
746 if show_exc:
745 if show_exc:
747 raise
746 raise
748 return _head(context_uid, repo_id, show_exc)
747 return _head(context_uid, repo_id, show_exc)
749
748
750 @reraise_safe_exceptions
749 @reraise_safe_exceptions
751 def init(self, wire):
750 def init(self, wire):
752 repo_path = str_to_dulwich(wire['path'])
751 repo_path = str_to_dulwich(wire['path'])
753 self.repo = Repo.init(repo_path)
752 self.repo = Repo.init(repo_path)
754
753
755 @reraise_safe_exceptions
754 @reraise_safe_exceptions
756 def init_bare(self, wire):
755 def init_bare(self, wire):
757 repo_path = str_to_dulwich(wire['path'])
756 repo_path = str_to_dulwich(wire['path'])
758 self.repo = Repo.init_bare(repo_path)
757 self.repo = Repo.init_bare(repo_path)
759
758
760 @reraise_safe_exceptions
759 @reraise_safe_exceptions
761 def revision(self, wire, rev):
760 def revision(self, wire, rev):
762
761
763 cache_on, context_uid, repo_id = self._cache_on(wire)
762 cache_on, context_uid, repo_id = self._cache_on(wire)
764 @self.region.conditional_cache_on_arguments(condition=cache_on)
763 @self.region.conditional_cache_on_arguments(condition=cache_on)
765 def _revision(_context_uid, _repo_id, _rev):
764 def _revision(_context_uid, _repo_id, _rev):
766 repo_init = self._factory.repo_libgit2(wire)
765 repo_init = self._factory.repo_libgit2(wire)
767 with repo_init as repo:
766 with repo_init as repo:
768 commit = repo[rev]
767 commit = repo[rev]
769 obj_data = {
768 obj_data = {
770 'id': commit.id.hex,
769 'id': commit.id.hex,
771 }
770 }
772 # tree objects itself don't have tree_id attribute
771 # tree objects itself don't have tree_id attribute
773 if hasattr(commit, 'tree_id'):
772 if hasattr(commit, 'tree_id'):
774 obj_data['tree'] = commit.tree_id.hex
773 obj_data['tree'] = commit.tree_id.hex
775
774
776 return obj_data
775 return obj_data
777 return _revision(context_uid, repo_id, rev)
776 return _revision(context_uid, repo_id, rev)
778
777
779 @reraise_safe_exceptions
778 @reraise_safe_exceptions
780 def date(self, wire, commit_id):
779 def date(self, wire, commit_id):
781 cache_on, context_uid, repo_id = self._cache_on(wire)
780 cache_on, context_uid, repo_id = self._cache_on(wire)
782 @self.region.conditional_cache_on_arguments(condition=cache_on)
781 @self.region.conditional_cache_on_arguments(condition=cache_on)
783 def _date(_repo_id, _commit_id):
782 def _date(_repo_id, _commit_id):
784 repo_init = self._factory.repo_libgit2(wire)
783 repo_init = self._factory.repo_libgit2(wire)
785 with repo_init as repo:
784 with repo_init as repo:
786 commit = repo[commit_id]
785 commit = repo[commit_id]
787 # TODO(marcink): check dulwich difference of offset vs timezone
786 # TODO(marcink): check dulwich difference of offset vs timezone
788 return [commit.commit_time, commit.commit_time_offset]
787 return [commit.commit_time, commit.commit_time_offset]
789 return _date(repo_id, commit_id)
788 return _date(repo_id, commit_id)
790
789
791 @reraise_safe_exceptions
790 @reraise_safe_exceptions
792 def author(self, wire, commit_id):
791 def author(self, wire, commit_id):
793 cache_on, context_uid, repo_id = self._cache_on(wire)
792 cache_on, context_uid, repo_id = self._cache_on(wire)
794 @self.region.conditional_cache_on_arguments(condition=cache_on)
793 @self.region.conditional_cache_on_arguments(condition=cache_on)
795 def _author(_repo_id, _commit_id):
794 def _author(_repo_id, _commit_id):
796 repo_init = self._factory.repo_libgit2(wire)
795 repo_init = self._factory.repo_libgit2(wire)
797 with repo_init as repo:
796 with repo_init as repo:
798 commit = repo[commit_id]
797 commit = repo[commit_id]
799 if commit.author.email:
798 if commit.author.email:
800 return u"{} <{}>".format(commit.author.name, commit.author.email)
799 return u"{} <{}>".format(commit.author.name, commit.author.email)
801
800
802 return u"{}".format(commit.author.raw_name)
801 return u"{}".format(commit.author.raw_name)
803 return _author(repo_id, commit_id)
802 return _author(repo_id, commit_id)
804
803
805 @reraise_safe_exceptions
804 @reraise_safe_exceptions
806 def message(self, wire, commit_id):
805 def message(self, wire, commit_id):
807 cache_on, context_uid, repo_id = self._cache_on(wire)
806 cache_on, context_uid, repo_id = self._cache_on(wire)
808 @self.region.conditional_cache_on_arguments(condition=cache_on)
807 @self.region.conditional_cache_on_arguments(condition=cache_on)
809 def _message(_repo_id, _commit_id):
808 def _message(_repo_id, _commit_id):
810 repo_init = self._factory.repo_libgit2(wire)
809 repo_init = self._factory.repo_libgit2(wire)
811 with repo_init as repo:
810 with repo_init as repo:
812 commit = repo[commit_id]
811 commit = repo[commit_id]
813 return commit.message
812 return commit.message
814 return _message(repo_id, commit_id)
813 return _message(repo_id, commit_id)
815
814
816 @reraise_safe_exceptions
815 @reraise_safe_exceptions
817 def parents(self, wire, commit_id):
816 def parents(self, wire, commit_id):
818 cache_on, context_uid, repo_id = self._cache_on(wire)
817 cache_on, context_uid, repo_id = self._cache_on(wire)
819 @self.region.conditional_cache_on_arguments(condition=cache_on)
818 @self.region.conditional_cache_on_arguments(condition=cache_on)
820 def _parents(_repo_id, _commit_id):
819 def _parents(_repo_id, _commit_id):
821 repo_init = self._factory.repo_libgit2(wire)
820 repo_init = self._factory.repo_libgit2(wire)
822 with repo_init as repo:
821 with repo_init as repo:
823 commit = repo[commit_id]
822 commit = repo[commit_id]
824 return [x.hex for x in commit.parent_ids]
823 return [x.hex for x in commit.parent_ids]
825 return _parents(repo_id, commit_id)
824 return _parents(repo_id, commit_id)
826
825
827 @reraise_safe_exceptions
826 @reraise_safe_exceptions
828 def children(self, wire, commit_id):
827 def children(self, wire, commit_id):
829 cache_on, context_uid, repo_id = self._cache_on(wire)
828 cache_on, context_uid, repo_id = self._cache_on(wire)
830 @self.region.conditional_cache_on_arguments(condition=cache_on)
829 @self.region.conditional_cache_on_arguments(condition=cache_on)
831 def _children(_repo_id, _commit_id):
830 def _children(_repo_id, _commit_id):
832 output, __ = self.run_git_command(
831 output, __ = self.run_git_command(
833 wire, ['rev-list', '--all', '--children'])
832 wire, ['rev-list', '--all', '--children'])
834
833
835 child_ids = []
834 child_ids = []
836 pat = re.compile(r'^%s' % commit_id)
835 pat = re.compile(r'^%s' % commit_id)
837 for l in output.splitlines():
836 for l in output.splitlines():
838 if pat.match(l):
837 if pat.match(l):
839 found_ids = l.split(' ')[1:]
838 found_ids = l.split(' ')[1:]
840 child_ids.extend(found_ids)
839 child_ids.extend(found_ids)
841
840
842 return child_ids
841 return child_ids
843 return _children(repo_id, commit_id)
842 return _children(repo_id, commit_id)
844
843
845 @reraise_safe_exceptions
844 @reraise_safe_exceptions
846 def set_refs(self, wire, key, value):
845 def set_refs(self, wire, key, value):
847 repo_init = self._factory.repo_libgit2(wire)
846 repo_init = self._factory.repo_libgit2(wire)
848 with repo_init as repo:
847 with repo_init as repo:
849 repo.references.create(key, value, force=True)
848 repo.references.create(key, value, force=True)
850
849
851 @reraise_safe_exceptions
850 @reraise_safe_exceptions
852 def create_branch(self, wire, branch_name, commit_id, force=False):
851 def create_branch(self, wire, branch_name, commit_id, force=False):
853 repo_init = self._factory.repo_libgit2(wire)
852 repo_init = self._factory.repo_libgit2(wire)
854 with repo_init as repo:
853 with repo_init as repo:
855 commit = repo[commit_id]
854 commit = repo[commit_id]
856
855
857 if force:
856 if force:
858 repo.branches.local.create(branch_name, commit, force=force)
857 repo.branches.local.create(branch_name, commit, force=force)
859 elif not repo.branches.get(branch_name):
858 elif not repo.branches.get(branch_name):
860 # create only if that branch isn't existing
859 # create only if that branch isn't existing
861 repo.branches.local.create(branch_name, commit, force=force)
860 repo.branches.local.create(branch_name, commit, force=force)
862
861
863 @reraise_safe_exceptions
862 @reraise_safe_exceptions
864 def remove_ref(self, wire, key):
863 def remove_ref(self, wire, key):
865 repo_init = self._factory.repo_libgit2(wire)
864 repo_init = self._factory.repo_libgit2(wire)
866 with repo_init as repo:
865 with repo_init as repo:
867 repo.references.delete(key)
866 repo.references.delete(key)
868
867
869 @reraise_safe_exceptions
868 @reraise_safe_exceptions
870 def tag_remove(self, wire, tag_name):
869 def tag_remove(self, wire, tag_name):
871 repo_init = self._factory.repo_libgit2(wire)
870 repo_init = self._factory.repo_libgit2(wire)
872 with repo_init as repo:
871 with repo_init as repo:
873 key = 'refs/tags/{}'.format(tag_name)
872 key = 'refs/tags/{}'.format(tag_name)
874 repo.references.delete(key)
873 repo.references.delete(key)
875
874
876 @reraise_safe_exceptions
875 @reraise_safe_exceptions
877 def tree_changes(self, wire, source_id, target_id):
876 def tree_changes(self, wire, source_id, target_id):
878 # TODO(marcink): remove this seems it's only used by tests
877 # TODO(marcink): remove this seems it's only used by tests
879 repo = self._factory.repo(wire)
878 repo = self._factory.repo(wire)
880 source = repo[source_id].tree if source_id else None
879 source = repo[source_id].tree if source_id else None
881 target = repo[target_id].tree
880 target = repo[target_id].tree
882 result = repo.object_store.tree_changes(source, target)
881 result = repo.object_store.tree_changes(source, target)
883 return list(result)
882 return list(result)
884
883
885 @reraise_safe_exceptions
884 @reraise_safe_exceptions
886 def tree_and_type_for_path(self, wire, commit_id, path):
885 def tree_and_type_for_path(self, wire, commit_id, path):
887
886
888 cache_on, context_uid, repo_id = self._cache_on(wire)
887 cache_on, context_uid, repo_id = self._cache_on(wire)
889 @self.region.conditional_cache_on_arguments(condition=cache_on)
888 @self.region.conditional_cache_on_arguments(condition=cache_on)
890 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
889 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
891 repo_init = self._factory.repo_libgit2(wire)
890 repo_init = self._factory.repo_libgit2(wire)
892
891
893 with repo_init as repo:
892 with repo_init as repo:
894 commit = repo[commit_id]
893 commit = repo[commit_id]
895 try:
894 try:
896 tree = commit.tree[path]
895 tree = commit.tree[path]
897 except KeyError:
896 except KeyError:
898 return None, None, None
897 return None, None, None
899
898
900 return tree.id.hex, tree.type, tree.filemode
899 return tree.id.hex, tree.type, tree.filemode
901 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
900 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
902
901
903 @reraise_safe_exceptions
902 @reraise_safe_exceptions
904 def tree_items(self, wire, tree_id):
903 def tree_items(self, wire, tree_id):
905 cache_on, context_uid, repo_id = self._cache_on(wire)
904 cache_on, context_uid, repo_id = self._cache_on(wire)
906 @self.region.conditional_cache_on_arguments(condition=cache_on)
905 @self.region.conditional_cache_on_arguments(condition=cache_on)
907 def _tree_items(_repo_id, _tree_id):
906 def _tree_items(_repo_id, _tree_id):
908
907
909 repo_init = self._factory.repo_libgit2(wire)
908 repo_init = self._factory.repo_libgit2(wire)
910 with repo_init as repo:
909 with repo_init as repo:
911 try:
910 try:
912 tree = repo[tree_id]
911 tree = repo[tree_id]
913 except KeyError:
912 except KeyError:
914 raise ObjectMissing('No tree with id: {}'.format(tree_id))
913 raise ObjectMissing('No tree with id: {}'.format(tree_id))
915
914
916 result = []
915 result = []
917 for item in tree:
916 for item in tree:
918 item_sha = item.hex
917 item_sha = item.hex
919 item_mode = item.filemode
918 item_mode = item.filemode
920 item_type = item.type
919 item_type = item.type
921
920
922 if item_type == 'commit':
921 if item_type == 'commit':
923 # NOTE(marcink): submodules we translate to 'link' for backward compat
922 # NOTE(marcink): submodules we translate to 'link' for backward compat
924 item_type = 'link'
923 item_type = 'link'
925
924
926 result.append((item.name, item_mode, item_sha, item_type))
925 result.append((item.name, item_mode, item_sha, item_type))
927 return result
926 return result
928 return _tree_items(repo_id, tree_id)
927 return _tree_items(repo_id, tree_id)
929
928
930 @reraise_safe_exceptions
929 @reraise_safe_exceptions
931 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
930 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
932
931
933 flags = [
932 flags = [
934 '-U%s' % context, '--full-index', '--binary', '-p',
933 '-U%s' % context, '--full-index', '--binary', '-p',
935 '-M', '--abbrev=40']
934 '-M', '--abbrev=40']
936
935
937 if opt_ignorews:
936 if opt_ignorews:
938 flags.append('-w')
937 flags.append('-w')
939
938
940 if commit_id_1 == self.EMPTY_COMMIT:
939 if commit_id_1 == self.EMPTY_COMMIT:
941 cmd = ['show'] + flags + [commit_id_2]
940 cmd = ['show'] + flags + [commit_id_2]
942 else:
941 else:
943 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
942 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
944
943
945 if file_filter:
944 if file_filter:
946 cmd.extend(['--', file_filter])
945 cmd.extend(['--', file_filter])
947
946
948 diff, __ = self.run_git_command(wire, cmd)
947 diff, __ = self.run_git_command(wire, cmd)
949 # If we used 'show' command, strip first few lines (until actual diff
948 # If we used 'show' command, strip first few lines (until actual diff
950 # starts)
949 # starts)
951 if commit_id_1 == self.EMPTY_COMMIT:
950 if commit_id_1 == self.EMPTY_COMMIT:
952 lines = diff.splitlines()
951 lines = diff.splitlines()
953 x = 0
952 x = 0
954 for line in lines:
953 for line in lines:
955 if line.startswith('diff'):
954 if line.startswith('diff'):
956 break
955 break
957 x += 1
956 x += 1
958 # Append new line just like 'diff' command do
957 # Append new line just like 'diff' command do
959 diff = '\n'.join(lines[x:]) + '\n'
958 diff = '\n'.join(lines[x:]) + '\n'
960 return diff
959 return diff
961
960
962 @reraise_safe_exceptions
961 @reraise_safe_exceptions
963 def node_history(self, wire, commit_id, path, limit):
962 def node_history(self, wire, commit_id, path, limit):
964 cache_on, context_uid, repo_id = self._cache_on(wire)
963 cache_on, context_uid, repo_id = self._cache_on(wire)
965 @self.region.conditional_cache_on_arguments(condition=cache_on)
964 @self.region.conditional_cache_on_arguments(condition=cache_on)
966 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
965 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
967 # optimize for n==1, rev-list is much faster for that use-case
966 # optimize for n==1, rev-list is much faster for that use-case
968 if limit == 1:
967 if limit == 1:
969 cmd = ['rev-list', '-1', commit_id, '--', path]
968 cmd = ['rev-list', '-1', commit_id, '--', path]
970 else:
969 else:
971 cmd = ['log']
970 cmd = ['log']
972 if limit:
971 if limit:
973 cmd.extend(['-n', str(safe_int(limit, 0))])
972 cmd.extend(['-n', str(safe_int(limit, 0))])
974 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
973 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
975
974
976 output, __ = self.run_git_command(wire, cmd)
975 output, __ = self.run_git_command(wire, cmd)
977 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
976 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
978
977
979 return [x for x in commit_ids]
978 return [x for x in commit_ids]
980 return _node_history(context_uid, repo_id, commit_id, path, limit)
979 return _node_history(context_uid, repo_id, commit_id, path, limit)
981
980
982 @reraise_safe_exceptions
981 @reraise_safe_exceptions
983 def node_annotate(self, wire, commit_id, path):
982 def node_annotate(self, wire, commit_id, path):
984
983
985 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
984 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
986 # -l ==> outputs long shas (and we need all 40 characters)
985 # -l ==> outputs long shas (and we need all 40 characters)
987 # --root ==> doesn't put '^' character for boundaries
986 # --root ==> doesn't put '^' character for boundaries
988 # -r commit_id ==> blames for the given commit
987 # -r commit_id ==> blames for the given commit
989 output, __ = self.run_git_command(wire, cmd)
988 output, __ = self.run_git_command(wire, cmd)
990
989
991 result = []
990 result = []
992 for i, blame_line in enumerate(output.split('\n')[:-1]):
991 for i, blame_line in enumerate(output.split('\n')[:-1]):
993 line_no = i + 1
992 line_no = i + 1
994 commit_id, line = re.split(r' ', blame_line, 1)
993 commit_id, line = re.split(r' ', blame_line, 1)
995 result.append((line_no, commit_id, line))
994 result.append((line_no, commit_id, line))
996 return result
995 return result
997
996
998 @reraise_safe_exceptions
997 @reraise_safe_exceptions
999 def update_server_info(self, wire):
998 def update_server_info(self, wire):
1000 repo = self._factory.repo(wire)
999 repo = self._factory.repo(wire)
1001 update_server_info(repo)
1000 update_server_info(repo)
1002
1001
1003 @reraise_safe_exceptions
1002 @reraise_safe_exceptions
1004 def get_all_commit_ids(self, wire):
1003 def get_all_commit_ids(self, wire):
1005
1004
1006 cache_on, context_uid, repo_id = self._cache_on(wire)
1005 cache_on, context_uid, repo_id = self._cache_on(wire)
1007 @self.region.conditional_cache_on_arguments(condition=cache_on)
1006 @self.region.conditional_cache_on_arguments(condition=cache_on)
1008 def _get_all_commit_ids(_context_uid, _repo_id):
1007 def _get_all_commit_ids(_context_uid, _repo_id):
1009
1008
1010 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1009 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1011 try:
1010 try:
1012 output, __ = self.run_git_command(wire, cmd)
1011 output, __ = self.run_git_command(wire, cmd)
1013 return output.splitlines()
1012 return output.splitlines()
1014 except Exception:
1013 except Exception:
1015 # Can be raised for empty repositories
1014 # Can be raised for empty repositories
1016 return []
1015 return []
1017 return _get_all_commit_ids(context_uid, repo_id)
1016 return _get_all_commit_ids(context_uid, repo_id)
1018
1017
1019 @reraise_safe_exceptions
1018 @reraise_safe_exceptions
1020 def run_git_command(self, wire, cmd, **opts):
1019 def run_git_command(self, wire, cmd, **opts):
1021 path = wire.get('path', None)
1020 path = wire.get('path', None)
1022
1021
1023 if path and os.path.isdir(path):
1022 if path and os.path.isdir(path):
1024 opts['cwd'] = path
1023 opts['cwd'] = path
1025
1024
1026 if '_bare' in opts:
1025 if '_bare' in opts:
1027 _copts = []
1026 _copts = []
1028 del opts['_bare']
1027 del opts['_bare']
1029 else:
1028 else:
1030 _copts = ['-c', 'core.quotepath=false', ]
1029 _copts = ['-c', 'core.quotepath=false', ]
1031 safe_call = False
1030 safe_call = False
1032 if '_safe' in opts:
1031 if '_safe' in opts:
1033 # no exc on failure
1032 # no exc on failure
1034 del opts['_safe']
1033 del opts['_safe']
1035 safe_call = True
1034 safe_call = True
1036
1035
1037 if '_copts' in opts:
1036 if '_copts' in opts:
1038 _copts.extend(opts['_copts'] or [])
1037 _copts.extend(opts['_copts'] or [])
1039 del opts['_copts']
1038 del opts['_copts']
1040
1039
1041 gitenv = os.environ.copy()
1040 gitenv = os.environ.copy()
1042 gitenv.update(opts.pop('extra_env', {}))
1041 gitenv.update(opts.pop('extra_env', {}))
1043 # need to clean fix GIT_DIR !
1042 # need to clean fix GIT_DIR !
1044 if 'GIT_DIR' in gitenv:
1043 if 'GIT_DIR' in gitenv:
1045 del gitenv['GIT_DIR']
1044 del gitenv['GIT_DIR']
1046 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1045 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1047 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1046 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1048
1047
1049 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1048 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1050 _opts = {'env': gitenv, 'shell': False}
1049 _opts = {'env': gitenv, 'shell': False}
1051
1050
1052 try:
1051 try:
1053 _opts.update(opts)
1052 _opts.update(opts)
1054 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
1053 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
1055
1054
1056 return ''.join(p), ''.join(p.error)
1055 return ''.join(p), ''.join(p.error)
1057 except (EnvironmentError, OSError) as err:
1056 except (EnvironmentError, OSError) as err:
1058 cmd = ' '.join(cmd) # human friendly CMD
1057 cmd = ' '.join(cmd) # human friendly CMD
1059 tb_err = ("Couldn't run git command (%s).\n"
1058 tb_err = ("Couldn't run git command (%s).\n"
1060 "Original error was:%s\n"
1059 "Original error was:%s\n"
1061 "Call options:%s\n"
1060 "Call options:%s\n"
1062 % (cmd, err, _opts))
1061 % (cmd, err, _opts))
1063 log.exception(tb_err)
1062 log.exception(tb_err)
1064 if safe_call:
1063 if safe_call:
1065 return '', err
1064 return '', err
1066 else:
1065 else:
1067 raise exceptions.VcsException()(tb_err)
1066 raise exceptions.VcsException()(tb_err)
1068
1067
1069 @reraise_safe_exceptions
1068 @reraise_safe_exceptions
1070 def install_hooks(self, wire, force=False):
1069 def install_hooks(self, wire, force=False):
1071 from vcsserver.hook_utils import install_git_hooks
1070 from vcsserver.hook_utils import install_git_hooks
1072 bare = self.bare(wire)
1071 bare = self.bare(wire)
1073 path = wire['path']
1072 path = wire['path']
1074 return install_git_hooks(path, bare, force_create=force)
1073 return install_git_hooks(path, bare, force_create=force)
1075
1074
1076 @reraise_safe_exceptions
1075 @reraise_safe_exceptions
1077 def get_hooks_info(self, wire):
1076 def get_hooks_info(self, wire):
1078 from vcsserver.hook_utils import (
1077 from vcsserver.hook_utils import (
1079 get_git_pre_hook_version, get_git_post_hook_version)
1078 get_git_pre_hook_version, get_git_post_hook_version)
1080 bare = self.bare(wire)
1079 bare = self.bare(wire)
1081 path = wire['path']
1080 path = wire['path']
1082 return {
1081 return {
1083 'pre_version': get_git_pre_hook_version(path, bare),
1082 'pre_version': get_git_pre_hook_version(path, bare),
1084 'post_version': get_git_post_hook_version(path, bare),
1083 'post_version': get_git_post_hook_version(path, bare),
1085 }
1084 }
@@ -1,939 +1,939 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase
25 from hgext import largefiles, rebase
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30
30
31 import vcsserver
31 import vcsserver
32 from vcsserver import exceptions
32 from vcsserver import exceptions
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 from vcsserver.hgcompat import (
34 from vcsserver.hgcompat import (
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 RepoLookupError, InterventionRequired, RequirementError)
39 RepoLookupError, InterventionRequired, RequirementError)
40 from vcsserver.vcs_base import RemoteBase
40 from vcsserver.vcs_base import RemoteBase
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 def make_ui_from_config(repo_config):
45 def make_ui_from_config(repo_config):
46
46
47 class LoggingUI(ui.ui):
47 class LoggingUI(ui.ui):
48 def status(self, *msg, **opts):
48 def status(self, *msg, **opts):
49 log.info(' '.join(msg).rstrip('\n'))
49 log.info(' '.join(msg).rstrip('\n'))
50 super(LoggingUI, self).status(*msg, **opts)
50 super(LoggingUI, self).status(*msg, **opts)
51
51
52 def warn(self, *msg, **opts):
52 def warn(self, *msg, **opts):
53 log.warn(' '.join(msg).rstrip('\n'))
53 log.warn(' '.join(msg).rstrip('\n'))
54 super(LoggingUI, self).warn(*msg, **opts)
54 super(LoggingUI, self).warn(*msg, **opts)
55
55
56 def error(self, *msg, **opts):
56 def error(self, *msg, **opts):
57 log.error(' '.join(msg).rstrip('\n'))
57 log.error(' '.join(msg).rstrip('\n'))
58 super(LoggingUI, self).error(*msg, **opts)
58 super(LoggingUI, self).error(*msg, **opts)
59
59
60 def note(self, *msg, **opts):
60 def note(self, *msg, **opts):
61 log.info(' '.join(msg).rstrip('\n'))
61 log.info(' '.join(msg).rstrip('\n'))
62 super(LoggingUI, self).note(*msg, **opts)
62 super(LoggingUI, self).note(*msg, **opts)
63
63
64 def debug(self, *msg, **opts):
64 def debug(self, *msg, **opts):
65 log.debug(' '.join(msg).rstrip('\n'))
65 log.debug(' '.join(msg).rstrip('\n'))
66 super(LoggingUI, self).debug(*msg, **opts)
66 super(LoggingUI, self).debug(*msg, **opts)
67
67
68 baseui = LoggingUI()
68 baseui = LoggingUI()
69
69
70 # clean the baseui object
70 # clean the baseui object
71 baseui._ocfg = hgconfig.config()
71 baseui._ocfg = hgconfig.config()
72 baseui._ucfg = hgconfig.config()
72 baseui._ucfg = hgconfig.config()
73 baseui._tcfg = hgconfig.config()
73 baseui._tcfg = hgconfig.config()
74
74
75 for section, option, value in repo_config:
75 for section, option, value in repo_config:
76 baseui.setconfig(section, option, value)
76 baseui.setconfig(section, option, value)
77
77
78 # make our hgweb quiet so it doesn't print output
78 # make our hgweb quiet so it doesn't print output
79 baseui.setconfig('ui', 'quiet', 'true')
79 baseui.setconfig('ui', 'quiet', 'true')
80
80
81 baseui.setconfig('ui', 'paginate', 'never')
81 baseui.setconfig('ui', 'paginate', 'never')
82 # for better Error reporting of Mercurial
82 # for better Error reporting of Mercurial
83 baseui.setconfig('ui', 'message-output', 'stderr')
83 baseui.setconfig('ui', 'message-output', 'stderr')
84
84
85 # force mercurial to only use 1 thread, otherwise it may try to set a
85 # force mercurial to only use 1 thread, otherwise it may try to set a
86 # signal in a non-main thread, thus generating a ValueError.
86 # signal in a non-main thread, thus generating a ValueError.
87 baseui.setconfig('worker', 'numcpus', 1)
87 baseui.setconfig('worker', 'numcpus', 1)
88
88
89 # If there is no config for the largefiles extension, we explicitly disable
89 # If there is no config for the largefiles extension, we explicitly disable
90 # it here. This overrides settings from repositories hgrc file. Recent
90 # it here. This overrides settings from repositories hgrc file. Recent
91 # mercurial versions enable largefiles in hgrc on clone from largefile
91 # mercurial versions enable largefiles in hgrc on clone from largefile
92 # repo.
92 # repo.
93 if not baseui.hasconfig('extensions', 'largefiles'):
93 if not baseui.hasconfig('extensions', 'largefiles'):
94 log.debug('Explicitly disable largefiles extension for repo.')
94 log.debug('Explicitly disable largefiles extension for repo.')
95 baseui.setconfig('extensions', 'largefiles', '!')
95 baseui.setconfig('extensions', 'largefiles', '!')
96
96
97 return baseui
97 return baseui
98
98
99
99
100 def reraise_safe_exceptions(func):
100 def reraise_safe_exceptions(func):
101 """Decorator for converting mercurial exceptions to something neutral."""
101 """Decorator for converting mercurial exceptions to something neutral."""
102
102
103 def wrapper(*args, **kwargs):
103 def wrapper(*args, **kwargs):
104 try:
104 try:
105 return func(*args, **kwargs)
105 return func(*args, **kwargs)
106 except (Abort, InterventionRequired) as e:
106 except (Abort, InterventionRequired) as e:
107 raise_from_original(exceptions.AbortException(e))
107 raise_from_original(exceptions.AbortException(e))
108 except RepoLookupError as e:
108 except RepoLookupError as e:
109 raise_from_original(exceptions.LookupException(e))
109 raise_from_original(exceptions.LookupException(e))
110 except RequirementError as e:
110 except RequirementError as e:
111 raise_from_original(exceptions.RequirementException(e))
111 raise_from_original(exceptions.RequirementException(e))
112 except RepoError as e:
112 except RepoError as e:
113 raise_from_original(exceptions.VcsException(e))
113 raise_from_original(exceptions.VcsException(e))
114 except LookupError as e:
114 except LookupError as e:
115 raise_from_original(exceptions.LookupException(e))
115 raise_from_original(exceptions.LookupException(e))
116 except Exception as e:
116 except Exception as e:
117 if not hasattr(e, '_vcs_kind'):
117 if not hasattr(e, '_vcs_kind'):
118 log.exception("Unhandled exception in hg remote call")
118 log.exception("Unhandled exception in hg remote call")
119 raise_from_original(exceptions.UnhandledException(e))
119 raise_from_original(exceptions.UnhandledException(e))
120
120
121 raise
121 raise
122 return wrapper
122 return wrapper
123
123
124
124
125 class MercurialFactory(RepoFactory):
125 class MercurialFactory(RepoFactory):
126 repo_type = 'hg'
126 repo_type = 'hg'
127
127
128 def _create_config(self, config, hooks=True):
128 def _create_config(self, config, hooks=True):
129 if not hooks:
129 if not hooks:
130 hooks_to_clean = frozenset((
130 hooks_to_clean = frozenset((
131 'changegroup.repo_size', 'preoutgoing.pre_pull',
131 'changegroup.repo_size', 'preoutgoing.pre_pull',
132 'outgoing.pull_logger', 'prechangegroup.pre_push'))
132 'outgoing.pull_logger', 'prechangegroup.pre_push'))
133 new_config = []
133 new_config = []
134 for section, option, value in config:
134 for section, option, value in config:
135 if section == 'hooks' and option in hooks_to_clean:
135 if section == 'hooks' and option in hooks_to_clean:
136 continue
136 continue
137 new_config.append((section, option, value))
137 new_config.append((section, option, value))
138 config = new_config
138 config = new_config
139
139
140 baseui = make_ui_from_config(config)
140 baseui = make_ui_from_config(config)
141 return baseui
141 return baseui
142
142
143 def _create_repo(self, wire, create):
143 def _create_repo(self, wire, create):
144 baseui = self._create_config(wire["config"])
144 baseui = self._create_config(wire["config"])
145 return instance(baseui, wire["path"], create)
145 return instance(baseui, wire["path"], create)
146
146
147 def repo(self, wire, create=False):
147 def repo(self, wire, create=False):
148 """
148 """
149 Get a repository instance for the given path.
149 Get a repository instance for the given path.
150 """
150 """
151 return self._create_repo(wire, create)
151 return self._create_repo(wire, create)
152
152
153
153
154 class HgRemote(RemoteBase):
154 class HgRemote(RemoteBase):
155
155
156 def __init__(self, factory):
156 def __init__(self, factory):
157 self._factory = factory
157 self._factory = factory
158 self._bulk_methods = {
158 self._bulk_methods = {
159 "affected_files": self.ctx_files,
159 "affected_files": self.ctx_files,
160 "author": self.ctx_user,
160 "author": self.ctx_user,
161 "branch": self.ctx_branch,
161 "branch": self.ctx_branch,
162 "children": self.ctx_children,
162 "children": self.ctx_children,
163 "date": self.ctx_date,
163 "date": self.ctx_date,
164 "message": self.ctx_description,
164 "message": self.ctx_description,
165 "parents": self.ctx_parents,
165 "parents": self.ctx_parents,
166 "status": self.ctx_status,
166 "status": self.ctx_status,
167 "obsolete": self.ctx_obsolete,
167 "obsolete": self.ctx_obsolete,
168 "phase": self.ctx_phase,
168 "phase": self.ctx_phase,
169 "hidden": self.ctx_hidden,
169 "hidden": self.ctx_hidden,
170 "_file_paths": self.ctx_list,
170 "_file_paths": self.ctx_list,
171 }
171 }
172 self.region = self._factory._cache_region
173
172
174 def _get_ctx(self, repo, ref):
173 def _get_ctx(self, repo, ref):
175 return get_ctx(repo, ref)
174 return get_ctx(repo, ref)
176
175
177 @reraise_safe_exceptions
176 @reraise_safe_exceptions
178 def discover_hg_version(self):
177 def discover_hg_version(self):
179 from mercurial import util
178 from mercurial import util
180 return util.version()
179 return util.version()
181
180
182 @reraise_safe_exceptions
181 @reraise_safe_exceptions
183 def is_empty(self, wire):
182 def is_empty(self, wire):
184 repo = self._factory.repo(wire)
183 repo = self._factory.repo(wire)
185
184
186 try:
185 try:
187 return len(repo) == 0
186 return len(repo) == 0
188 except Exception:
187 except Exception:
189 log.exception("failed to read object_store")
188 log.exception("failed to read object_store")
190 return False
189 return False
191
190
192 @reraise_safe_exceptions
191 @reraise_safe_exceptions
193 def archive_repo(self, archive_path, mtime, file_info, kind):
192 def archive_repo(self, archive_path, mtime, file_info, kind):
194 if kind == "tgz":
193 if kind == "tgz":
195 archiver = archival.tarit(archive_path, mtime, "gz")
194 archiver = archival.tarit(archive_path, mtime, "gz")
196 elif kind == "tbz2":
195 elif kind == "tbz2":
197 archiver = archival.tarit(archive_path, mtime, "bz2")
196 archiver = archival.tarit(archive_path, mtime, "bz2")
198 elif kind == 'zip':
197 elif kind == 'zip':
199 archiver = archival.zipit(archive_path, mtime)
198 archiver = archival.zipit(archive_path, mtime)
200 else:
199 else:
201 raise exceptions.ArchiveException()(
200 raise exceptions.ArchiveException()(
202 'Remote does not support: "%s".' % kind)
201 'Remote does not support: "%s".' % kind)
203
202
204 for f_path, f_mode, f_is_link, f_content in file_info:
203 for f_path, f_mode, f_is_link, f_content in file_info:
205 archiver.addfile(f_path, f_mode, f_is_link, f_content)
204 archiver.addfile(f_path, f_mode, f_is_link, f_content)
206 archiver.done()
205 archiver.done()
207
206
208 @reraise_safe_exceptions
207 @reraise_safe_exceptions
209 def bookmarks(self, wire):
208 def bookmarks(self, wire):
210 cache_on, context_uid, repo_id = self._cache_on(wire)
209 cache_on, context_uid, repo_id = self._cache_on(wire)
211 @self.region.conditional_cache_on_arguments(condition=cache_on)
210 @self.region.conditional_cache_on_arguments(condition=cache_on)
212 def _bookmarks(_context_uid, _repo_id):
211 def _bookmarks(_context_uid, _repo_id):
213 repo = self._factory.repo(wire)
212 repo = self._factory.repo(wire)
214 return dict(repo._bookmarks)
213 return dict(repo._bookmarks)
215
214
216 return _bookmarks(context_uid, repo_id)
215 return _bookmarks(context_uid, repo_id)
217
216
218 @reraise_safe_exceptions
217 @reraise_safe_exceptions
219 def branches(self, wire, normal, closed):
218 def branches(self, wire, normal, closed):
220 cache_on, context_uid, repo_id = self._cache_on(wire)
219 cache_on, context_uid, repo_id = self._cache_on(wire)
221 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 @self.region.conditional_cache_on_arguments(condition=cache_on)
222 def _branches(_context_uid, _repo_id, _normal, _closed):
221 def _branches(_context_uid, _repo_id, _normal, _closed):
223 repo = self._factory.repo(wire)
222 repo = self._factory.repo(wire)
224 iter_branches = repo.branchmap().iterbranches()
223 iter_branches = repo.branchmap().iterbranches()
225 bt = {}
224 bt = {}
226 for branch_name, _heads, tip, is_closed in iter_branches:
225 for branch_name, _heads, tip, is_closed in iter_branches:
227 if normal and not is_closed:
226 if normal and not is_closed:
228 bt[branch_name] = tip
227 bt[branch_name] = tip
229 if closed and is_closed:
228 if closed and is_closed:
230 bt[branch_name] = tip
229 bt[branch_name] = tip
231
230
232 return bt
231 return bt
233
232
234 return _branches(context_uid, repo_id, normal, closed)
233 return _branches(context_uid, repo_id, normal, closed)
235
234
236 @reraise_safe_exceptions
235 @reraise_safe_exceptions
237 def bulk_request(self, wire, commit_id, pre_load):
236 def bulk_request(self, wire, commit_id, pre_load):
238 cache_on, context_uid, repo_id = self._cache_on(wire)
237 cache_on, context_uid, repo_id = self._cache_on(wire)
239 @self.region.conditional_cache_on_arguments(condition=cache_on)
238 @self.region.conditional_cache_on_arguments(condition=cache_on)
240 def _bulk_request(_repo_id, _commit_id, _pre_load):
239 def _bulk_request(_repo_id, _commit_id, _pre_load):
241 result = {}
240 result = {}
242 for attr in pre_load:
241 for attr in pre_load:
243 try:
242 try:
244 method = self._bulk_methods[attr]
243 method = self._bulk_methods[attr]
245 result[attr] = method(wire, commit_id)
244 result[attr] = method(wire, commit_id)
246 except KeyError as e:
245 except KeyError as e:
247 raise exceptions.VcsException(e)(
246 raise exceptions.VcsException(e)(
248 'Unknown bulk attribute: "%s"' % attr)
247 'Unknown bulk attribute: "%s"' % attr)
249 return result
248 return result
250
249
251 return _bulk_request(repo_id, commit_id, sorted(pre_load))
250 return _bulk_request(repo_id, commit_id, sorted(pre_load))
252
251
253 @reraise_safe_exceptions
252 @reraise_safe_exceptions
254 def ctx_branch(self, wire, commit_id):
253 def ctx_branch(self, wire, commit_id):
255 cache_on, context_uid, repo_id = self._cache_on(wire)
254 cache_on, context_uid, repo_id = self._cache_on(wire)
256 @self.region.conditional_cache_on_arguments(condition=cache_on)
255 @self.region.conditional_cache_on_arguments(condition=cache_on)
257 def _ctx_branch(_repo_id, _commit_id):
256 def _ctx_branch(_repo_id, _commit_id):
258 repo = self._factory.repo(wire)
257 repo = self._factory.repo(wire)
259 ctx = self._get_ctx(repo, commit_id)
258 ctx = self._get_ctx(repo, commit_id)
260 return ctx.branch()
259 return ctx.branch()
261 return _ctx_branch(repo_id, commit_id)
260 return _ctx_branch(repo_id, commit_id)
262
261
263 @reraise_safe_exceptions
262 @reraise_safe_exceptions
264 def ctx_date(self, wire, commit_id):
263 def ctx_date(self, wire, commit_id):
265 cache_on, context_uid, repo_id = self._cache_on(wire)
264 cache_on, context_uid, repo_id = self._cache_on(wire)
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
265 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 def _ctx_date(_repo_id, _commit_id):
266 def _ctx_date(_repo_id, _commit_id):
268 repo = self._factory.repo(wire)
267 repo = self._factory.repo(wire)
269 ctx = self._get_ctx(repo, commit_id)
268 ctx = self._get_ctx(repo, commit_id)
270 return ctx.date()
269 return ctx.date()
271 return _ctx_date(repo_id, commit_id)
270 return _ctx_date(repo_id, commit_id)
272
271
273 @reraise_safe_exceptions
272 @reraise_safe_exceptions
274 def ctx_description(self, wire, revision):
273 def ctx_description(self, wire, revision):
275 repo = self._factory.repo(wire)
274 repo = self._factory.repo(wire)
276 ctx = self._get_ctx(repo, revision)
275 ctx = self._get_ctx(repo, revision)
277 return ctx.description()
276 return ctx.description()
278
277
279 @reraise_safe_exceptions
278 @reraise_safe_exceptions
280 def ctx_files(self, wire, commit_id):
279 def ctx_files(self, wire, commit_id):
281 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
282 @self.region.conditional_cache_on_arguments(condition=cache_on)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
283 def _ctx_files(_repo_id, _commit_id):
282 def _ctx_files(_repo_id, _commit_id):
284 repo = self._factory.repo(wire)
283 repo = self._factory.repo(wire)
285 ctx = self._get_ctx(repo, commit_id)
284 ctx = self._get_ctx(repo, commit_id)
286 return ctx.files()
285 return ctx.files()
287
286
288 return _ctx_files(repo_id, commit_id)
287 return _ctx_files(repo_id, commit_id)
289
288
290 @reraise_safe_exceptions
289 @reraise_safe_exceptions
291 def ctx_list(self, path, revision):
290 def ctx_list(self, path, revision):
292 repo = self._factory.repo(path)
291 repo = self._factory.repo(path)
293 ctx = self._get_ctx(repo, revision)
292 ctx = self._get_ctx(repo, revision)
294 return list(ctx)
293 return list(ctx)
295
294
296 @reraise_safe_exceptions
295 @reraise_safe_exceptions
297 def ctx_parents(self, wire, commit_id):
296 def ctx_parents(self, wire, commit_id):
298 cache_on, context_uid, repo_id = self._cache_on(wire)
297 cache_on, context_uid, repo_id = self._cache_on(wire)
299 @self.region.conditional_cache_on_arguments(condition=cache_on)
298 @self.region.conditional_cache_on_arguments(condition=cache_on)
300 def _ctx_parents(_repo_id, _commit_id):
299 def _ctx_parents(_repo_id, _commit_id):
301 repo = self._factory.repo(wire)
300 repo = self._factory.repo(wire)
302 ctx = self._get_ctx(repo, commit_id)
301 ctx = self._get_ctx(repo, commit_id)
303 return [parent.rev() for parent in ctx.parents()
302 return [parent.rev() for parent in ctx.parents()
304 if not (parent.hidden() or parent.obsolete())]
303 if not (parent.hidden() or parent.obsolete())]
305
304
306 return _ctx_parents(repo_id, commit_id)
305 return _ctx_parents(repo_id, commit_id)
307
306
308 @reraise_safe_exceptions
307 @reraise_safe_exceptions
309 def ctx_children(self, wire, commit_id):
308 def ctx_children(self, wire, commit_id):
310 cache_on, context_uid, repo_id = self._cache_on(wire)
309 cache_on, context_uid, repo_id = self._cache_on(wire)
311 @self.region.conditional_cache_on_arguments(condition=cache_on)
310 @self.region.conditional_cache_on_arguments(condition=cache_on)
312 def _ctx_children(_repo_id, _commit_id):
311 def _ctx_children(_repo_id, _commit_id):
313 repo = self._factory.repo(wire)
312 repo = self._factory.repo(wire)
314 ctx = self._get_ctx(repo, commit_id)
313 ctx = self._get_ctx(repo, commit_id)
315 return [child.rev() for child in ctx.children()
314 return [child.rev() for child in ctx.children()
316 if not (child.hidden() or child.obsolete())]
315 if not (child.hidden() or child.obsolete())]
317
316
318 return _ctx_children(repo_id, commit_id)
317 return _ctx_children(repo_id, commit_id)
319
318
320 @reraise_safe_exceptions
319 @reraise_safe_exceptions
321 def ctx_phase(self, wire, commit_id):
320 def ctx_phase(self, wire, commit_id):
322 cache_on, context_uid, repo_id = self._cache_on(wire)
321 cache_on, context_uid, repo_id = self._cache_on(wire)
323 @self.region.conditional_cache_on_arguments(condition=cache_on)
322 @self.region.conditional_cache_on_arguments(condition=cache_on)
324 def _ctx_phase(_context_uid, _repo_id, _commit_id):
323 def _ctx_phase(_context_uid, _repo_id, _commit_id):
325 repo = self._factory.repo(wire)
324 repo = self._factory.repo(wire)
326 ctx = self._get_ctx(repo, commit_id)
325 ctx = self._get_ctx(repo, commit_id)
327 # public=0, draft=1, secret=3
326 # public=0, draft=1, secret=3
328 return ctx.phase()
327 return ctx.phase()
329 return _ctx_phase(context_uid, repo_id, commit_id)
328 return _ctx_phase(context_uid, repo_id, commit_id)
330
329
331 @reraise_safe_exceptions
330 @reraise_safe_exceptions
332 def ctx_obsolete(self, wire, commit_id):
331 def ctx_obsolete(self, wire, commit_id):
333 cache_on, context_uid, repo_id = self._cache_on(wire)
332 cache_on, context_uid, repo_id = self._cache_on(wire)
334 @self.region.conditional_cache_on_arguments(condition=cache_on)
333 @self.region.conditional_cache_on_arguments(condition=cache_on)
335 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
334 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
336 repo = self._factory.repo(wire)
335 repo = self._factory.repo(wire)
337 ctx = self._get_ctx(repo, commit_id)
336 ctx = self._get_ctx(repo, commit_id)
338 return ctx.obsolete()
337 return ctx.obsolete()
339 return _ctx_obsolete(context_uid, repo_id, commit_id)
338 return _ctx_obsolete(context_uid, repo_id, commit_id)
340
339
341 @reraise_safe_exceptions
340 @reraise_safe_exceptions
342 def ctx_hidden(self, wire, commit_id):
341 def ctx_hidden(self, wire, commit_id):
343 cache_on, context_uid, repo_id = self._cache_on(wire)
342 cache_on, context_uid, repo_id = self._cache_on(wire)
344 @self.region.conditional_cache_on_arguments(condition=cache_on)
343 @self.region.conditional_cache_on_arguments(condition=cache_on)
345 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
344 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
346 repo = self._factory.repo(wire)
345 repo = self._factory.repo(wire)
347 ctx = self._get_ctx(repo, commit_id)
346 ctx = self._get_ctx(repo, commit_id)
348 return ctx.hidden()
347 return ctx.hidden()
349 return _ctx_hidden(context_uid, repo_id, commit_id)
348 return _ctx_hidden(context_uid, repo_id, commit_id)
350
349
351 @reraise_safe_exceptions
350 @reraise_safe_exceptions
352 def ctx_substate(self, wire, revision):
351 def ctx_substate(self, wire, revision):
353 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
354 ctx = self._get_ctx(repo, revision)
353 ctx = self._get_ctx(repo, revision)
355 return ctx.substate
354 return ctx.substate
356
355
357 @reraise_safe_exceptions
356 @reraise_safe_exceptions
358 def ctx_status(self, wire, revision):
357 def ctx_status(self, wire, revision):
359 repo = self._factory.repo(wire)
358 repo = self._factory.repo(wire)
360 ctx = self._get_ctx(repo, revision)
359 ctx = self._get_ctx(repo, revision)
361 status = repo[ctx.p1().node()].status(other=ctx.node())
360 status = repo[ctx.p1().node()].status(other=ctx.node())
362 # object of status (odd, custom named tuple in mercurial) is not
361 # object of status (odd, custom named tuple in mercurial) is not
363 # correctly serializable, we make it a list, as the underling
362 # correctly serializable, we make it a list, as the underling
364 # API expects this to be a list
363 # API expects this to be a list
365 return list(status)
364 return list(status)
366
365
367 @reraise_safe_exceptions
366 @reraise_safe_exceptions
368 def ctx_user(self, wire, revision):
367 def ctx_user(self, wire, revision):
369 repo = self._factory.repo(wire)
368 repo = self._factory.repo(wire)
370 ctx = self._get_ctx(repo, revision)
369 ctx = self._get_ctx(repo, revision)
371 return ctx.user()
370 return ctx.user()
372
371
373 @reraise_safe_exceptions
372 @reraise_safe_exceptions
374 def check_url(self, url, config):
373 def check_url(self, url, config):
375 _proto = None
374 _proto = None
376 if '+' in url[:url.find('://')]:
375 if '+' in url[:url.find('://')]:
377 _proto = url[0:url.find('+')]
376 _proto = url[0:url.find('+')]
378 url = url[url.find('+') + 1:]
377 url = url[url.find('+') + 1:]
379 handlers = []
378 handlers = []
380 url_obj = url_parser(url)
379 url_obj = url_parser(url)
381 test_uri, authinfo = url_obj.authinfo()
380 test_uri, authinfo = url_obj.authinfo()
382 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
381 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
383 url_obj.query = obfuscate_qs(url_obj.query)
382 url_obj.query = obfuscate_qs(url_obj.query)
384
383
385 cleaned_uri = str(url_obj)
384 cleaned_uri = str(url_obj)
386 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
385 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
387
386
388 if authinfo:
387 if authinfo:
389 # create a password manager
388 # create a password manager
390 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
389 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
391 passmgr.add_password(*authinfo)
390 passmgr.add_password(*authinfo)
392
391
393 handlers.extend((httpbasicauthhandler(passmgr),
392 handlers.extend((httpbasicauthhandler(passmgr),
394 httpdigestauthhandler(passmgr)))
393 httpdigestauthhandler(passmgr)))
395
394
396 o = urllib2.build_opener(*handlers)
395 o = urllib2.build_opener(*handlers)
397 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
396 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
398 ('Accept', 'application/mercurial-0.1')]
397 ('Accept', 'application/mercurial-0.1')]
399
398
400 q = {"cmd": 'between'}
399 q = {"cmd": 'between'}
401 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
400 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
402 qs = '?%s' % urllib.urlencode(q)
401 qs = '?%s' % urllib.urlencode(q)
403 cu = "%s%s" % (test_uri, qs)
402 cu = "%s%s" % (test_uri, qs)
404 req = urllib2.Request(cu, None, {})
403 req = urllib2.Request(cu, None, {})
405
404
406 try:
405 try:
407 log.debug("Trying to open URL %s", cleaned_uri)
406 log.debug("Trying to open URL %s", cleaned_uri)
408 resp = o.open(req)
407 resp = o.open(req)
409 if resp.code != 200:
408 if resp.code != 200:
410 raise exceptions.URLError()('Return Code is not 200')
409 raise exceptions.URLError()('Return Code is not 200')
411 except Exception as e:
410 except Exception as e:
412 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
411 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
413 # means it cannot be cloned
412 # means it cannot be cloned
414 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
413 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
415
414
416 # now check if it's a proper hg repo, but don't do it for svn
415 # now check if it's a proper hg repo, but don't do it for svn
417 try:
416 try:
418 if _proto == 'svn':
417 if _proto == 'svn':
419 pass
418 pass
420 else:
419 else:
421 # check for pure hg repos
420 # check for pure hg repos
422 log.debug(
421 log.debug(
423 "Verifying if URL is a Mercurial repository: %s",
422 "Verifying if URL is a Mercurial repository: %s",
424 cleaned_uri)
423 cleaned_uri)
425 ui = make_ui_from_config(config)
424 ui = make_ui_from_config(config)
426 peer_checker = makepeer(ui, url)
425 peer_checker = makepeer(ui, url)
427 peer_checker.lookup('tip')
426 peer_checker.lookup('tip')
428 except Exception as e:
427 except Exception as e:
429 log.warning("URL is not a valid Mercurial repository: %s",
428 log.warning("URL is not a valid Mercurial repository: %s",
430 cleaned_uri)
429 cleaned_uri)
431 raise exceptions.URLError(e)(
430 raise exceptions.URLError(e)(
432 "url [%s] does not look like an hg repo org_exc: %s"
431 "url [%s] does not look like an hg repo org_exc: %s"
433 % (cleaned_uri, e))
432 % (cleaned_uri, e))
434
433
435 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
434 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
436 return True
435 return True
437
436
438 @reraise_safe_exceptions
437 @reraise_safe_exceptions
439 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
438 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
440 repo = self._factory.repo(wire)
439 repo = self._factory.repo(wire)
441
440
442 if file_filter:
441 if file_filter:
443 match_filter = match(file_filter[0], '', [file_filter[1]])
442 match_filter = match(file_filter[0], '', [file_filter[1]])
444 else:
443 else:
445 match_filter = file_filter
444 match_filter = file_filter
446 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
445 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
447
446
448 try:
447 try:
449 return "".join(patch.diff(
448 return "".join(patch.diff(
450 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
449 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
451 except RepoLookupError as e:
450 except RepoLookupError as e:
452 raise exceptions.LookupException(e)()
451 raise exceptions.LookupException(e)()
453
452
454 @reraise_safe_exceptions
453 @reraise_safe_exceptions
455 def node_history(self, wire, revision, path, limit):
454 def node_history(self, wire, revision, path, limit):
456 cache_on, context_uid, repo_id = self._cache_on(wire)
455 cache_on, context_uid, repo_id = self._cache_on(wire)
457 @self.region.conditional_cache_on_arguments(condition=cache_on)
456 @self.region.conditional_cache_on_arguments(condition=cache_on)
458 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
457 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
459 repo = self._factory.repo(wire)
458 repo = self._factory.repo(wire)
460
459
461 ctx = self._get_ctx(repo, revision)
460 ctx = self._get_ctx(repo, revision)
462 fctx = ctx.filectx(path)
461 fctx = ctx.filectx(path)
463
462
464 def history_iter():
463 def history_iter():
465 limit_rev = fctx.rev()
464 limit_rev = fctx.rev()
466 for obj in reversed(list(fctx.filelog())):
465 for obj in reversed(list(fctx.filelog())):
467 obj = fctx.filectx(obj)
466 obj = fctx.filectx(obj)
468 ctx = obj.changectx()
467 ctx = obj.changectx()
469 if ctx.hidden() or ctx.obsolete():
468 if ctx.hidden() or ctx.obsolete():
470 continue
469 continue
471
470
472 if limit_rev >= obj.rev():
471 if limit_rev >= obj.rev():
473 yield obj
472 yield obj
474
473
475 history = []
474 history = []
476 for cnt, obj in enumerate(history_iter()):
475 for cnt, obj in enumerate(history_iter()):
477 if limit and cnt >= limit:
476 if limit and cnt >= limit:
478 break
477 break
479 history.append(hex(obj.node()))
478 history.append(hex(obj.node()))
480
479
481 return [x for x in history]
480 return [x for x in history]
482 return _node_history(context_uid, repo_id, revision, path, limit)
481 return _node_history(context_uid, repo_id, revision, path, limit)
483
482
484 @reraise_safe_exceptions
483 @reraise_safe_exceptions
485 def node_history_untill(self, wire, revision, path, limit):
484 def node_history_untill(self, wire, revision, path, limit):
486 cache_on, context_uid, repo_id = self._cache_on(wire)
485 cache_on, context_uid, repo_id = self._cache_on(wire)
487 @self.region.conditional_cache_on_arguments(condition=cache_on)
486 @self.region.conditional_cache_on_arguments(condition=cache_on)
488 def _node_history_until(_context_uid, _repo_id):
487 def _node_history_until(_context_uid, _repo_id):
489 repo = self._factory.repo(wire)
488 repo = self._factory.repo(wire)
490 ctx = self._get_ctx(repo, revision)
489 ctx = self._get_ctx(repo, revision)
491 fctx = ctx.filectx(path)
490 fctx = ctx.filectx(path)
492
491
493 file_log = list(fctx.filelog())
492 file_log = list(fctx.filelog())
494 if limit:
493 if limit:
495 # Limit to the last n items
494 # Limit to the last n items
496 file_log = file_log[-limit:]
495 file_log = file_log[-limit:]
497
496
498 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
497 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
499 return _node_history_until(context_uid, repo_id, revision, path, limit)
498 return _node_history_until(context_uid, repo_id, revision, path, limit)
500
499
501 @reraise_safe_exceptions
500 @reraise_safe_exceptions
502 def fctx_annotate(self, wire, revision, path):
501 def fctx_annotate(self, wire, revision, path):
503 repo = self._factory.repo(wire)
502 repo = self._factory.repo(wire)
504 ctx = self._get_ctx(repo, revision)
503 ctx = self._get_ctx(repo, revision)
505 fctx = ctx.filectx(path)
504 fctx = ctx.filectx(path)
506
505
507 result = []
506 result = []
508 for i, annotate_obj in enumerate(fctx.annotate(), 1):
507 for i, annotate_obj in enumerate(fctx.annotate(), 1):
509 ln_no = i
508 ln_no = i
510 sha = hex(annotate_obj.fctx.node())
509 sha = hex(annotate_obj.fctx.node())
511 content = annotate_obj.text
510 content = annotate_obj.text
512 result.append((ln_no, sha, content))
511 result.append((ln_no, sha, content))
513 return result
512 return result
514
513
515 @reraise_safe_exceptions
514 @reraise_safe_exceptions
516 def fctx_node_data(self, wire, revision, path):
515 def fctx_node_data(self, wire, revision, path):
517 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
518 ctx = self._get_ctx(repo, revision)
517 ctx = self._get_ctx(repo, revision)
519 fctx = ctx.filectx(path)
518 fctx = ctx.filectx(path)
520 return fctx.data()
519 return fctx.data()
521
520
522 @reraise_safe_exceptions
521 @reraise_safe_exceptions
523 def fctx_flags(self, wire, commit_id, path):
522 def fctx_flags(self, wire, commit_id, path):
524 cache_on, context_uid, repo_id = self._cache_on(wire)
523 cache_on, context_uid, repo_id = self._cache_on(wire)
525 @self.region.conditional_cache_on_arguments(condition=cache_on)
524 @self.region.conditional_cache_on_arguments(condition=cache_on)
526 def _fctx_flags(_repo_id, _commit_id, _path):
525 def _fctx_flags(_repo_id, _commit_id, _path):
527 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
528 ctx = self._get_ctx(repo, commit_id)
527 ctx = self._get_ctx(repo, commit_id)
529 fctx = ctx.filectx(path)
528 fctx = ctx.filectx(path)
530 return fctx.flags()
529 return fctx.flags()
531
530
532 return _fctx_flags(repo_id, commit_id, path)
531 return _fctx_flags(repo_id, commit_id, path)
533
532
534 @reraise_safe_exceptions
533 @reraise_safe_exceptions
535 def fctx_size(self, wire, commit_id, path):
534 def fctx_size(self, wire, commit_id, path):
536 cache_on, context_uid, repo_id = self._cache_on(wire)
535 cache_on, context_uid, repo_id = self._cache_on(wire)
537 @self.region.conditional_cache_on_arguments(condition=cache_on)
536 @self.region.conditional_cache_on_arguments(condition=cache_on)
538 def _fctx_size(_repo_id, _revision, _path):
537 def _fctx_size(_repo_id, _revision, _path):
539 repo = self._factory.repo(wire)
538 repo = self._factory.repo(wire)
540 ctx = self._get_ctx(repo, commit_id)
539 ctx = self._get_ctx(repo, commit_id)
541 fctx = ctx.filectx(path)
540 fctx = ctx.filectx(path)
542 return fctx.size()
541 return fctx.size()
543 return _fctx_size(repo_id, commit_id, path)
542 return _fctx_size(repo_id, commit_id, path)
544
543
545 @reraise_safe_exceptions
544 @reraise_safe_exceptions
546 def get_all_commit_ids(self, wire, name):
545 def get_all_commit_ids(self, wire, name):
547 cache_on, context_uid, repo_id = self._cache_on(wire)
546 cache_on, context_uid, repo_id = self._cache_on(wire)
548 @self.region.conditional_cache_on_arguments(condition=cache_on)
547 @self.region.conditional_cache_on_arguments(condition=cache_on)
549 def _get_all_commit_ids(_context_uid, _repo_id, _name):
548 def _get_all_commit_ids(_context_uid, _repo_id, _name):
550 repo = self._factory.repo(wire)
549 repo = self._factory.repo(wire)
551 repo = repo.filtered(name)
550 repo = repo.filtered(name)
552 revs = map(lambda x: hex(x[7]), repo.changelog.index)
551 revs = map(lambda x: hex(x[7]), repo.changelog.index)
553 return revs
552 return revs
554 return _get_all_commit_ids(context_uid, repo_id, name)
553 return _get_all_commit_ids(context_uid, repo_id, name)
555
554
556 @reraise_safe_exceptions
555 @reraise_safe_exceptions
557 def get_config_value(self, wire, section, name, untrusted=False):
556 def get_config_value(self, wire, section, name, untrusted=False):
558 repo = self._factory.repo(wire)
557 repo = self._factory.repo(wire)
559 return repo.ui.config(section, name, untrusted=untrusted)
558 return repo.ui.config(section, name, untrusted=untrusted)
560
559
561 @reraise_safe_exceptions
560 @reraise_safe_exceptions
562 def is_large_file(self, wire, path):
561 def is_large_file(self, wire, path):
563 cache_on, context_uid, repo_id = self._cache_on(wire)
562 cache_on, context_uid, repo_id = self._cache_on(wire)
564 @self.region.conditional_cache_on_arguments(condition=cache_on)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
565 def _is_large_file(_context_uid, _repo_id, _path):
564 def _is_large_file(_context_uid, _repo_id, _path):
566 return largefiles.lfutil.isstandin(path)
565 return largefiles.lfutil.isstandin(path)
567
566
568 return _is_large_file(context_uid, repo_id, path)
567 return _is_large_file(context_uid, repo_id, path)
569
568
570 @reraise_safe_exceptions
569 @reraise_safe_exceptions
571 def in_largefiles_store(self, wire, sha):
570 def in_largefiles_store(self, wire, sha):
572 repo = self._factory.repo(wire)
571 repo = self._factory.repo(wire)
573 return largefiles.lfutil.instore(repo, sha)
572 return largefiles.lfutil.instore(repo, sha)
574
573
575 @reraise_safe_exceptions
574 @reraise_safe_exceptions
576 def in_user_cache(self, wire, sha):
575 def in_user_cache(self, wire, sha):
577 repo = self._factory.repo(wire)
576 repo = self._factory.repo(wire)
578 return largefiles.lfutil.inusercache(repo.ui, sha)
577 return largefiles.lfutil.inusercache(repo.ui, sha)
579
578
580 @reraise_safe_exceptions
579 @reraise_safe_exceptions
581 def store_path(self, wire, sha):
580 def store_path(self, wire, sha):
582 repo = self._factory.repo(wire)
581 repo = self._factory.repo(wire)
583 return largefiles.lfutil.storepath(repo, sha)
582 return largefiles.lfutil.storepath(repo, sha)
584
583
585 @reraise_safe_exceptions
584 @reraise_safe_exceptions
586 def link(self, wire, sha, path):
585 def link(self, wire, sha, path):
587 repo = self._factory.repo(wire)
586 repo = self._factory.repo(wire)
588 largefiles.lfutil.link(
587 largefiles.lfutil.link(
589 largefiles.lfutil.usercachepath(repo.ui, sha), path)
588 largefiles.lfutil.usercachepath(repo.ui, sha), path)
590
589
591 @reraise_safe_exceptions
590 @reraise_safe_exceptions
592 def localrepository(self, wire, create=False):
591 def localrepository(self, wire, create=False):
593 self._factory.repo(wire, create=create)
592 self._factory.repo(wire, create=create)
594
593
595 @reraise_safe_exceptions
594 @reraise_safe_exceptions
596 def lookup(self, wire, revision, both):
595 def lookup(self, wire, revision, both):
597 cache_on, context_uid, repo_id = self._cache_on(wire)
596 cache_on, context_uid, repo_id = self._cache_on(wire)
598 @self.region.conditional_cache_on_arguments(condition=cache_on)
597 @self.region.conditional_cache_on_arguments(condition=cache_on)
599 def _lookup(_context_uid, _repo_id, _revision, _both):
598 def _lookup(_context_uid, _repo_id, _revision, _both):
600
599
601 repo = self._factory.repo(wire)
600 repo = self._factory.repo(wire)
602 rev = _revision
601 rev = _revision
603 if isinstance(rev, int):
602 if isinstance(rev, int):
604 # NOTE(marcink):
603 # NOTE(marcink):
605 # since Mercurial doesn't support negative indexes properly
604 # since Mercurial doesn't support negative indexes properly
606 # we need to shift accordingly by one to get proper index, e.g
605 # we need to shift accordingly by one to get proper index, e.g
607 # repo[-1] => repo[-2]
606 # repo[-1] => repo[-2]
608 # repo[0] => repo[-1]
607 # repo[0] => repo[-1]
609 if rev <= 0:
608 if rev <= 0:
610 rev = rev + -1
609 rev = rev + -1
611 try:
610 try:
612 ctx = self._get_ctx(repo, rev)
611 ctx = self._get_ctx(repo, rev)
613 except (TypeError, RepoLookupError) as e:
612 except (TypeError, RepoLookupError) as e:
614 e._org_exc_tb = traceback.format_exc()
613 e._org_exc_tb = traceback.format_exc()
615 raise exceptions.LookupException(e)(rev)
614 raise exceptions.LookupException(e)(rev)
616 except LookupError as e:
615 except LookupError as e:
617 e._org_exc_tb = traceback.format_exc()
616 e._org_exc_tb = traceback.format_exc()
618 raise exceptions.LookupException(e)(e.name)
617 raise exceptions.LookupException(e)(e.name)
619
618
620 if not both:
619 if not both:
621 return ctx.hex()
620 return ctx.hex()
622
621
623 ctx = repo[ctx.hex()]
622 ctx = repo[ctx.hex()]
624 return ctx.hex(), ctx.rev()
623 return ctx.hex(), ctx.rev()
625
624
626 return _lookup(context_uid, repo_id, revision, both)
625 return _lookup(context_uid, repo_id, revision, both)
627
626
628 @reraise_safe_exceptions
627 @reraise_safe_exceptions
629 def sync_push(self, wire, url):
628 def sync_push(self, wire, url):
630 if not self.check_url(url, wire['config']):
629 if not self.check_url(url, wire['config']):
631 return
630 return
632
631
633 repo = self._factory.repo(wire)
632 repo = self._factory.repo(wire)
634
633
635 # Disable any prompts for this repo
634 # Disable any prompts for this repo
636 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
635 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
637
636
638 bookmarks = dict(repo._bookmarks).keys()
637 bookmarks = dict(repo._bookmarks).keys()
639 remote = peer(repo, {}, url)
638 remote = peer(repo, {}, url)
640 # Disable any prompts for this remote
639 # Disable any prompts for this remote
641 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
640 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
642
641
643 return exchange.push(
642 return exchange.push(
644 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
643 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
645
644
646 @reraise_safe_exceptions
645 @reraise_safe_exceptions
647 def revision(self, wire, rev):
646 def revision(self, wire, rev):
648 repo = self._factory.repo(wire)
647 repo = self._factory.repo(wire)
649 ctx = self._get_ctx(repo, rev)
648 ctx = self._get_ctx(repo, rev)
650 return ctx.rev()
649 return ctx.rev()
651
650
652 @reraise_safe_exceptions
651 @reraise_safe_exceptions
653 def rev_range(self, wire, commit_filter):
652 def rev_range(self, wire, commit_filter):
654 cache_on, context_uid, repo_id = self._cache_on(wire)
653 cache_on, context_uid, repo_id = self._cache_on(wire)
654
655 @self.region.conditional_cache_on_arguments(condition=cache_on)
655 @self.region.conditional_cache_on_arguments(condition=cache_on)
656 def _rev_range(_context_uid, _repo_id, _filter):
656 def _rev_range(_context_uid, _repo_id, _filter):
657 repo = self._factory.repo(wire)
657 repo = self._factory.repo(wire)
658 revisions = [rev for rev in revrange(repo, commit_filter)]
658 revisions = [rev for rev in revrange(repo, commit_filter)]
659 return revisions
659 return revisions
660
660
661 return _rev_range(context_uid, repo_id, sorted(commit_filter))
661 return _rev_range(context_uid, repo_id, sorted(commit_filter))
662
662
663 @reraise_safe_exceptions
663 @reraise_safe_exceptions
664 def rev_range_hash(self, wire, node):
664 def rev_range_hash(self, wire, node):
665 repo = self._factory.repo(wire)
665 repo = self._factory.repo(wire)
666
666
667 def get_revs(repo, rev_opt):
667 def get_revs(repo, rev_opt):
668 if rev_opt:
668 if rev_opt:
669 revs = revrange(repo, rev_opt)
669 revs = revrange(repo, rev_opt)
670 if len(revs) == 0:
670 if len(revs) == 0:
671 return (nullrev, nullrev)
671 return (nullrev, nullrev)
672 return max(revs), min(revs)
672 return max(revs), min(revs)
673 else:
673 else:
674 return len(repo) - 1, 0
674 return len(repo) - 1, 0
675
675
676 stop, start = get_revs(repo, [node + ':'])
676 stop, start = get_revs(repo, [node + ':'])
677 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
677 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
678 return revs
678 return revs
679
679
680 @reraise_safe_exceptions
680 @reraise_safe_exceptions
681 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
681 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
682 other_path = kwargs.pop('other_path', None)
682 other_path = kwargs.pop('other_path', None)
683
683
684 # case when we want to compare two independent repositories
684 # case when we want to compare two independent repositories
685 if other_path and other_path != wire["path"]:
685 if other_path and other_path != wire["path"]:
686 baseui = self._factory._create_config(wire["config"])
686 baseui = self._factory._create_config(wire["config"])
687 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
687 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
688 else:
688 else:
689 repo = self._factory.repo(wire)
689 repo = self._factory.repo(wire)
690 return list(repo.revs(rev_spec, *args))
690 return list(repo.revs(rev_spec, *args))
691
691
692 @reraise_safe_exceptions
692 @reraise_safe_exceptions
693 def verify(self, wire,):
693 def verify(self, wire,):
694 repo = self._factory.repo(wire)
694 repo = self._factory.repo(wire)
695 baseui = self._factory._create_config(wire['config'])
695 baseui = self._factory._create_config(wire['config'])
696 baseui.setconfig('ui', 'quiet', 'false')
696 baseui.setconfig('ui', 'quiet', 'false')
697 output = io.BytesIO()
697 output = io.BytesIO()
698
698
699 def write(data, **unused_kwargs):
699 def write(data, **unused_kwargs):
700 output.write(data)
700 output.write(data)
701 baseui.write = write
701 baseui.write = write
702
702
703 repo.ui = baseui
703 repo.ui = baseui
704 verify.verify(repo)
704 verify.verify(repo)
705 return output.getvalue()
705 return output.getvalue()
706
706
707 @reraise_safe_exceptions
707 @reraise_safe_exceptions
708 def tags(self, wire):
708 def tags(self, wire):
709 cache_on, context_uid, repo_id = self._cache_on(wire)
709 cache_on, context_uid, repo_id = self._cache_on(wire)
710 @self.region.conditional_cache_on_arguments(condition=cache_on)
710 @self.region.conditional_cache_on_arguments(condition=cache_on)
711 def _tags(_context_uid, _repo_id):
711 def _tags(_context_uid, _repo_id):
712 repo = self._factory.repo(wire)
712 repo = self._factory.repo(wire)
713 return repo.tags()
713 return repo.tags()
714
714
715 return _tags(context_uid, repo_id)
715 return _tags(context_uid, repo_id)
716
716
717 @reraise_safe_exceptions
717 @reraise_safe_exceptions
718 def update(self, wire, node=None, clean=False):
718 def update(self, wire, node=None, clean=False):
719 repo = self._factory.repo(wire)
719 repo = self._factory.repo(wire)
720 baseui = self._factory._create_config(wire['config'])
720 baseui = self._factory._create_config(wire['config'])
721 commands.update(baseui, repo, node=node, clean=clean)
721 commands.update(baseui, repo, node=node, clean=clean)
722
722
723 @reraise_safe_exceptions
723 @reraise_safe_exceptions
724 def identify(self, wire):
724 def identify(self, wire):
725 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
726 baseui = self._factory._create_config(wire['config'])
726 baseui = self._factory._create_config(wire['config'])
727 output = io.BytesIO()
727 output = io.BytesIO()
728 baseui.write = output.write
728 baseui.write = output.write
729 # This is required to get a full node id
729 # This is required to get a full node id
730 baseui.debugflag = True
730 baseui.debugflag = True
731 commands.identify(baseui, repo, id=True)
731 commands.identify(baseui, repo, id=True)
732
732
733 return output.getvalue()
733 return output.getvalue()
734
734
735 @reraise_safe_exceptions
735 @reraise_safe_exceptions
736 def heads(self, wire, branch=None):
736 def heads(self, wire, branch=None):
737 repo = self._factory.repo(wire)
737 repo = self._factory.repo(wire)
738 baseui = self._factory._create_config(wire['config'])
738 baseui = self._factory._create_config(wire['config'])
739 output = io.BytesIO()
739 output = io.BytesIO()
740
740
741 def write(data, **unused_kwargs):
741 def write(data, **unused_kwargs):
742 output.write(data)
742 output.write(data)
743
743
744 baseui.write = write
744 baseui.write = write
745 if branch:
745 if branch:
746 args = [branch]
746 args = [branch]
747 else:
747 else:
748 args = []
748 args = []
749 commands.heads(baseui, repo, template='{node} ', *args)
749 commands.heads(baseui, repo, template='{node} ', *args)
750
750
751 return output.getvalue()
751 return output.getvalue()
752
752
753 @reraise_safe_exceptions
753 @reraise_safe_exceptions
754 def ancestor(self, wire, revision1, revision2):
754 def ancestor(self, wire, revision1, revision2):
755 repo = self._factory.repo(wire)
755 repo = self._factory.repo(wire)
756 changelog = repo.changelog
756 changelog = repo.changelog
757 lookup = repo.lookup
757 lookup = repo.lookup
758 a = changelog.ancestor(lookup(revision1), lookup(revision2))
758 a = changelog.ancestor(lookup(revision1), lookup(revision2))
759 return hex(a)
759 return hex(a)
760
760
761 @reraise_safe_exceptions
761 @reraise_safe_exceptions
762 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
762 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
763 baseui = self._factory._create_config(wire["config"], hooks=hooks)
763 baseui = self._factory._create_config(wire["config"], hooks=hooks)
764 clone(baseui, source, dest, noupdate=not update_after_clone)
764 clone(baseui, source, dest, noupdate=not update_after_clone)
765
765
766 @reraise_safe_exceptions
766 @reraise_safe_exceptions
767 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
767 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
768
768
769 repo = self._factory.repo(wire)
769 repo = self._factory.repo(wire)
770 baseui = self._factory._create_config(wire['config'])
770 baseui = self._factory._create_config(wire['config'])
771 publishing = baseui.configbool('phases', 'publish')
771 publishing = baseui.configbool('phases', 'publish')
772 if publishing:
772 if publishing:
773 new_commit = 'public'
773 new_commit = 'public'
774 else:
774 else:
775 new_commit = 'draft'
775 new_commit = 'draft'
776
776
777 def _filectxfn(_repo, ctx, path):
777 def _filectxfn(_repo, ctx, path):
778 """
778 """
779 Marks given path as added/changed/removed in a given _repo. This is
779 Marks given path as added/changed/removed in a given _repo. This is
780 for internal mercurial commit function.
780 for internal mercurial commit function.
781 """
781 """
782
782
783 # check if this path is removed
783 # check if this path is removed
784 if path in removed:
784 if path in removed:
785 # returning None is a way to mark node for removal
785 # returning None is a way to mark node for removal
786 return None
786 return None
787
787
788 # check if this path is added
788 # check if this path is added
789 for node in updated:
789 for node in updated:
790 if node['path'] == path:
790 if node['path'] == path:
791 return memfilectx(
791 return memfilectx(
792 _repo,
792 _repo,
793 changectx=ctx,
793 changectx=ctx,
794 path=node['path'],
794 path=node['path'],
795 data=node['content'],
795 data=node['content'],
796 islink=False,
796 islink=False,
797 isexec=bool(node['mode'] & stat.S_IXUSR),
797 isexec=bool(node['mode'] & stat.S_IXUSR),
798 copysource=False)
798 copysource=False)
799
799
800 raise exceptions.AbortException()(
800 raise exceptions.AbortException()(
801 "Given path haven't been marked as added, "
801 "Given path haven't been marked as added, "
802 "changed or removed (%s)" % path)
802 "changed or removed (%s)" % path)
803
803
804 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
804 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
805
805
806 commit_ctx = memctx(
806 commit_ctx = memctx(
807 repo=repo,
807 repo=repo,
808 parents=parents,
808 parents=parents,
809 text=message,
809 text=message,
810 files=files,
810 files=files,
811 filectxfn=_filectxfn,
811 filectxfn=_filectxfn,
812 user=user,
812 user=user,
813 date=(commit_time, commit_timezone),
813 date=(commit_time, commit_timezone),
814 extra=extra)
814 extra=extra)
815
815
816 n = repo.commitctx(commit_ctx)
816 n = repo.commitctx(commit_ctx)
817 new_id = hex(n)
817 new_id = hex(n)
818
818
819 return new_id
819 return new_id
820
820
821 @reraise_safe_exceptions
821 @reraise_safe_exceptions
822 def pull(self, wire, url, commit_ids=None):
822 def pull(self, wire, url, commit_ids=None):
823 repo = self._factory.repo(wire)
823 repo = self._factory.repo(wire)
824 # Disable any prompts for this repo
824 # Disable any prompts for this repo
825 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
825 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
826
826
827 remote = peer(repo, {}, url)
827 remote = peer(repo, {}, url)
828 # Disable any prompts for this remote
828 # Disable any prompts for this remote
829 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
829 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
830
830
831 if commit_ids:
831 if commit_ids:
832 commit_ids = [bin(commit_id) for commit_id in commit_ids]
832 commit_ids = [bin(commit_id) for commit_id in commit_ids]
833
833
834 return exchange.pull(
834 return exchange.pull(
835 repo, remote, heads=commit_ids, force=None).cgresult
835 repo, remote, heads=commit_ids, force=None).cgresult
836
836
837 @reraise_safe_exceptions
837 @reraise_safe_exceptions
838 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
838 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
839 repo = self._factory.repo(wire)
839 repo = self._factory.repo(wire)
840 baseui = self._factory._create_config(wire['config'], hooks=hooks)
840 baseui = self._factory._create_config(wire['config'], hooks=hooks)
841
841
842 # Mercurial internally has a lot of logic that checks ONLY if
842 # Mercurial internally has a lot of logic that checks ONLY if
843 # option is defined, we just pass those if they are defined then
843 # option is defined, we just pass those if they are defined then
844 opts = {}
844 opts = {}
845 if bookmark:
845 if bookmark:
846 opts['bookmark'] = bookmark
846 opts['bookmark'] = bookmark
847 if branch:
847 if branch:
848 opts['branch'] = branch
848 opts['branch'] = branch
849 if revision:
849 if revision:
850 opts['rev'] = revision
850 opts['rev'] = revision
851
851
852 commands.pull(baseui, repo, source, **opts)
852 commands.pull(baseui, repo, source, **opts)
853
853
854 @reraise_safe_exceptions
854 @reraise_safe_exceptions
855 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
855 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
856 repo = self._factory.repo(wire)
856 repo = self._factory.repo(wire)
857 baseui = self._factory._create_config(wire['config'], hooks=hooks)
857 baseui = self._factory._create_config(wire['config'], hooks=hooks)
858 commands.push(baseui, repo, dest=dest_path, rev=revisions,
858 commands.push(baseui, repo, dest=dest_path, rev=revisions,
859 new_branch=push_branches)
859 new_branch=push_branches)
860
860
861 @reraise_safe_exceptions
861 @reraise_safe_exceptions
862 def strip(self, wire, revision, update, backup):
862 def strip(self, wire, revision, update, backup):
863 repo = self._factory.repo(wire)
863 repo = self._factory.repo(wire)
864 ctx = self._get_ctx(repo, revision)
864 ctx = self._get_ctx(repo, revision)
865 hgext_strip(
865 hgext_strip(
866 repo.baseui, repo, ctx.node(), update=update, backup=backup)
866 repo.baseui, repo, ctx.node(), update=update, backup=backup)
867
867
868 @reraise_safe_exceptions
868 @reraise_safe_exceptions
869 def merge(self, wire, revision):
869 def merge(self, wire, revision):
870 repo = self._factory.repo(wire)
870 repo = self._factory.repo(wire)
871 baseui = self._factory._create_config(wire['config'])
871 baseui = self._factory._create_config(wire['config'])
872 repo.ui.setconfig('ui', 'merge', 'internal:dump')
872 repo.ui.setconfig('ui', 'merge', 'internal:dump')
873
873
874 # In case of sub repositories are used mercurial prompts the user in
874 # In case of sub repositories are used mercurial prompts the user in
875 # case of merge conflicts or different sub repository sources. By
875 # case of merge conflicts or different sub repository sources. By
876 # setting the interactive flag to `False` mercurial doesn't prompt the
876 # setting the interactive flag to `False` mercurial doesn't prompt the
877 # used but instead uses a default value.
877 # used but instead uses a default value.
878 repo.ui.setconfig('ui', 'interactive', False)
878 repo.ui.setconfig('ui', 'interactive', False)
879 commands.merge(baseui, repo, rev=revision)
879 commands.merge(baseui, repo, rev=revision)
880
880
881 @reraise_safe_exceptions
881 @reraise_safe_exceptions
882 def merge_state(self, wire):
882 def merge_state(self, wire):
883 repo = self._factory.repo(wire)
883 repo = self._factory.repo(wire)
884 repo.ui.setconfig('ui', 'merge', 'internal:dump')
884 repo.ui.setconfig('ui', 'merge', 'internal:dump')
885
885
886 # In case of sub repositories are used mercurial prompts the user in
886 # In case of sub repositories are used mercurial prompts the user in
887 # case of merge conflicts or different sub repository sources. By
887 # case of merge conflicts or different sub repository sources. By
888 # setting the interactive flag to `False` mercurial doesn't prompt the
888 # setting the interactive flag to `False` mercurial doesn't prompt the
889 # used but instead uses a default value.
889 # used but instead uses a default value.
890 repo.ui.setconfig('ui', 'interactive', False)
890 repo.ui.setconfig('ui', 'interactive', False)
891 ms = hg_merge.mergestate(repo)
891 ms = hg_merge.mergestate(repo)
892 return [x for x in ms.unresolved()]
892 return [x for x in ms.unresolved()]
893
893
894 @reraise_safe_exceptions
894 @reraise_safe_exceptions
895 def commit(self, wire, message, username, close_branch=False):
895 def commit(self, wire, message, username, close_branch=False):
896 repo = self._factory.repo(wire)
896 repo = self._factory.repo(wire)
897 baseui = self._factory._create_config(wire['config'])
897 baseui = self._factory._create_config(wire['config'])
898 repo.ui.setconfig('ui', 'username', username)
898 repo.ui.setconfig('ui', 'username', username)
899 commands.commit(baseui, repo, message=message, close_branch=close_branch)
899 commands.commit(baseui, repo, message=message, close_branch=close_branch)
900
900
901 @reraise_safe_exceptions
901 @reraise_safe_exceptions
902 def rebase(self, wire, source=None, dest=None, abort=False):
902 def rebase(self, wire, source=None, dest=None, abort=False):
903 repo = self._factory.repo(wire)
903 repo = self._factory.repo(wire)
904 baseui = self._factory._create_config(wire['config'])
904 baseui = self._factory._create_config(wire['config'])
905 repo.ui.setconfig('ui', 'merge', 'internal:dump')
905 repo.ui.setconfig('ui', 'merge', 'internal:dump')
906 rebase.rebase(
906 rebase.rebase(
907 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
907 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
908
908
909 @reraise_safe_exceptions
909 @reraise_safe_exceptions
910 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
910 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
911 repo = self._factory.repo(wire)
911 repo = self._factory.repo(wire)
912 ctx = self._get_ctx(repo, revision)
912 ctx = self._get_ctx(repo, revision)
913 node = ctx.node()
913 node = ctx.node()
914
914
915 date = (tag_time, tag_timezone)
915 date = (tag_time, tag_timezone)
916 try:
916 try:
917 hg_tag.tag(repo, name, node, message, local, user, date)
917 hg_tag.tag(repo, name, node, message, local, user, date)
918 except Abort as e:
918 except Abort as e:
919 log.exception("Tag operation aborted")
919 log.exception("Tag operation aborted")
920 # Exception can contain unicode which we convert
920 # Exception can contain unicode which we convert
921 raise exceptions.AbortException(e)(repr(e))
921 raise exceptions.AbortException(e)(repr(e))
922
922
923 @reraise_safe_exceptions
923 @reraise_safe_exceptions
924 def bookmark(self, wire, bookmark, revision=None):
924 def bookmark(self, wire, bookmark, revision=None):
925 repo = self._factory.repo(wire)
925 repo = self._factory.repo(wire)
926 baseui = self._factory._create_config(wire['config'])
926 baseui = self._factory._create_config(wire['config'])
927 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
927 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
928
928
929 @reraise_safe_exceptions
929 @reraise_safe_exceptions
930 def install_hooks(self, wire, force=False):
930 def install_hooks(self, wire, force=False):
931 # we don't need any special hooks for Mercurial
931 # we don't need any special hooks for Mercurial
932 pass
932 pass
933
933
934 @reraise_safe_exceptions
934 @reraise_safe_exceptions
935 def get_hooks_info(self, wire):
935 def get_hooks_info(self, wire):
936 return {
936 return {
937 'pre_version': vcsserver.__version__,
937 'pre_version': vcsserver.__version__,
938 'post_version': vcsserver.__version__,
938 'post_version': vcsserver.__version__,
939 }
939 }
@@ -1,789 +1,788 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 from urllib2 import URLError
22 from urllib2 import URLError
23 import urlparse
23 import urlparse
24 import logging
24 import logging
25 import posixpath as vcspath
25 import posixpath as vcspath
26 import StringIO
26 import StringIO
27 import urllib
27 import urllib
28 import traceback
28 import traceback
29
29
30 import svn.client
30 import svn.client
31 import svn.core
31 import svn.core
32 import svn.delta
32 import svn.delta
33 import svn.diff
33 import svn.diff
34 import svn.fs
34 import svn.fs
35 import svn.repos
35 import svn.repos
36
36
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
38 from vcsserver.base import RepoFactory, raise_from_original
39 from vcsserver.vcs_base import RemoteBase
39 from vcsserver.vcs_base import RemoteBase
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 # Set of svn compatible version flags.
44 # Set of svn compatible version flags.
45 # Compare with subversion/svnadmin/svnadmin.c
45 # Compare with subversion/svnadmin/svnadmin.c
46 svn_compatible_versions = {
46 svn_compatible_versions = {
47 'pre-1.4-compatible',
47 'pre-1.4-compatible',
48 'pre-1.5-compatible',
48 'pre-1.5-compatible',
49 'pre-1.6-compatible',
49 'pre-1.6-compatible',
50 'pre-1.8-compatible',
50 'pre-1.8-compatible',
51 'pre-1.9-compatible'
51 'pre-1.9-compatible'
52 }
52 }
53
53
54 svn_compatible_versions_map = {
54 svn_compatible_versions_map = {
55 'pre-1.4-compatible': '1.3',
55 'pre-1.4-compatible': '1.3',
56 'pre-1.5-compatible': '1.4',
56 'pre-1.5-compatible': '1.4',
57 'pre-1.6-compatible': '1.5',
57 'pre-1.6-compatible': '1.5',
58 'pre-1.8-compatible': '1.7',
58 'pre-1.8-compatible': '1.7',
59 'pre-1.9-compatible': '1.8',
59 'pre-1.9-compatible': '1.8',
60 }
60 }
61
61
62
62
63 def reraise_safe_exceptions(func):
63 def reraise_safe_exceptions(func):
64 """Decorator for converting svn exceptions to something neutral."""
64 """Decorator for converting svn exceptions to something neutral."""
65 def wrapper(*args, **kwargs):
65 def wrapper(*args, **kwargs):
66 try:
66 try:
67 return func(*args, **kwargs)
67 return func(*args, **kwargs)
68 except Exception as e:
68 except Exception as e:
69 if not hasattr(e, '_vcs_kind'):
69 if not hasattr(e, '_vcs_kind'):
70 log.exception("Unhandled exception in svn remote call")
70 log.exception("Unhandled exception in svn remote call")
71 raise_from_original(exceptions.UnhandledException(e))
71 raise_from_original(exceptions.UnhandledException(e))
72 raise
72 raise
73 return wrapper
73 return wrapper
74
74
75
75
76 class SubversionFactory(RepoFactory):
76 class SubversionFactory(RepoFactory):
77 repo_type = 'svn'
77 repo_type = 'svn'
78
78
79 def _create_repo(self, wire, create, compatible_version):
79 def _create_repo(self, wire, create, compatible_version):
80 path = svn.core.svn_path_canonicalize(wire['path'])
80 path = svn.core.svn_path_canonicalize(wire['path'])
81 if create:
81 if create:
82 fs_config = {'compatible-version': '1.9'}
82 fs_config = {'compatible-version': '1.9'}
83 if compatible_version:
83 if compatible_version:
84 if compatible_version not in svn_compatible_versions:
84 if compatible_version not in svn_compatible_versions:
85 raise Exception('Unknown SVN compatible version "{}"'
85 raise Exception('Unknown SVN compatible version "{}"'
86 .format(compatible_version))
86 .format(compatible_version))
87 fs_config['compatible-version'] = \
87 fs_config['compatible-version'] = \
88 svn_compatible_versions_map[compatible_version]
88 svn_compatible_versions_map[compatible_version]
89
89
90 log.debug('Create SVN repo with config "%s"', fs_config)
90 log.debug('Create SVN repo with config "%s"', fs_config)
91 repo = svn.repos.create(path, "", "", None, fs_config)
91 repo = svn.repos.create(path, "", "", None, fs_config)
92 else:
92 else:
93 repo = svn.repos.open(path)
93 repo = svn.repos.open(path)
94
94
95 log.debug('Got SVN object: %s', repo)
95 log.debug('Got SVN object: %s', repo)
96 return repo
96 return repo
97
97
98 def repo(self, wire, create=False, compatible_version=None):
98 def repo(self, wire, create=False, compatible_version=None):
99 """
99 """
100 Get a repository instance for the given path.
100 Get a repository instance for the given path.
101 """
101 """
102 return self._create_repo(wire, create, compatible_version)
102 return self._create_repo(wire, create, compatible_version)
103
103
104
104
105 NODE_TYPE_MAPPING = {
105 NODE_TYPE_MAPPING = {
106 svn.core.svn_node_file: 'file',
106 svn.core.svn_node_file: 'file',
107 svn.core.svn_node_dir: 'dir',
107 svn.core.svn_node_dir: 'dir',
108 }
108 }
109
109
110
110
111 class SvnRemote(RemoteBase):
111 class SvnRemote(RemoteBase):
112
112
113 def __init__(self, factory, hg_factory=None):
113 def __init__(self, factory, hg_factory=None):
114 self._factory = factory
114 self._factory = factory
115 # TODO: Remove once we do not use internal Mercurial objects anymore
115 # TODO: Remove once we do not use internal Mercurial objects anymore
116 # for subversion
116 # for subversion
117 self._hg_factory = hg_factory
117 self._hg_factory = hg_factory
118 self.region = self._factory._cache_region
119
118
120 @reraise_safe_exceptions
119 @reraise_safe_exceptions
121 def discover_svn_version(self):
120 def discover_svn_version(self):
122 try:
121 try:
123 import svn.core
122 import svn.core
124 svn_ver = svn.core.SVN_VERSION
123 svn_ver = svn.core.SVN_VERSION
125 except ImportError:
124 except ImportError:
126 svn_ver = None
125 svn_ver = None
127 return svn_ver
126 return svn_ver
128
127
129 @reraise_safe_exceptions
128 @reraise_safe_exceptions
130 def is_empty(self, wire):
129 def is_empty(self, wire):
131
130
132 try:
131 try:
133 return self.lookup(wire, -1) == 0
132 return self.lookup(wire, -1) == 0
134 except Exception:
133 except Exception:
135 log.exception("failed to read object_store")
134 log.exception("failed to read object_store")
136 return False
135 return False
137
136
138 def check_url(self, url, config_items):
137 def check_url(self, url, config_items):
139 # this can throw exception if not installed, but we detect this
138 # this can throw exception if not installed, but we detect this
140 from hgsubversion import svnrepo
139 from hgsubversion import svnrepo
141
140
142 baseui = self._hg_factory._create_config(config_items)
141 baseui = self._hg_factory._create_config(config_items)
143 # uuid function get's only valid UUID from proper repo, else
142 # uuid function get's only valid UUID from proper repo, else
144 # throws exception
143 # throws exception
145 try:
144 try:
146 svnrepo.svnremoterepo(baseui, url).svn.uuid
145 svnrepo.svnremoterepo(baseui, url).svn.uuid
147 except Exception:
146 except Exception:
148 tb = traceback.format_exc()
147 tb = traceback.format_exc()
149 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
148 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
150 raise URLError(
149 raise URLError(
151 '"%s" is not a valid Subversion source url.' % (url, ))
150 '"%s" is not a valid Subversion source url.' % (url, ))
152 return True
151 return True
153
152
154 def is_path_valid_repository(self, wire, path):
153 def is_path_valid_repository(self, wire, path):
155
154
156 # NOTE(marcink): short circuit the check for SVN repo
155 # NOTE(marcink): short circuit the check for SVN repo
157 # the repos.open might be expensive to check, but we have one cheap
156 # the repos.open might be expensive to check, but we have one cheap
158 # pre condition that we can use, to check for 'format' file
157 # pre condition that we can use, to check for 'format' file
159
158
160 if not os.path.isfile(os.path.join(path, 'format')):
159 if not os.path.isfile(os.path.join(path, 'format')):
161 return False
160 return False
162
161
163 try:
162 try:
164 svn.repos.open(path)
163 svn.repos.open(path)
165 except svn.core.SubversionException:
164 except svn.core.SubversionException:
166 tb = traceback.format_exc()
165 tb = traceback.format_exc()
167 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
166 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
168 return False
167 return False
169 return True
168 return True
170
169
171 @reraise_safe_exceptions
170 @reraise_safe_exceptions
172 def verify(self, wire,):
171 def verify(self, wire,):
173 repo_path = wire['path']
172 repo_path = wire['path']
174 if not self.is_path_valid_repository(wire, repo_path):
173 if not self.is_path_valid_repository(wire, repo_path):
175 raise Exception(
174 raise Exception(
176 "Path %s is not a valid Subversion repository." % repo_path)
175 "Path %s is not a valid Subversion repository." % repo_path)
177
176
178 cmd = ['svnadmin', 'info', repo_path]
177 cmd = ['svnadmin', 'info', repo_path]
179 stdout, stderr = subprocessio.run_command(cmd)
178 stdout, stderr = subprocessio.run_command(cmd)
180 return stdout
179 return stdout
181
180
182 def lookup(self, wire, revision):
181 def lookup(self, wire, revision):
183 if revision not in [-1, None, 'HEAD']:
182 if revision not in [-1, None, 'HEAD']:
184 raise NotImplementedError
183 raise NotImplementedError
185 repo = self._factory.repo(wire)
184 repo = self._factory.repo(wire)
186 fs_ptr = svn.repos.fs(repo)
185 fs_ptr = svn.repos.fs(repo)
187 head = svn.fs.youngest_rev(fs_ptr)
186 head = svn.fs.youngest_rev(fs_ptr)
188 return head
187 return head
189
188
190 def lookup_interval(self, wire, start_ts, end_ts):
189 def lookup_interval(self, wire, start_ts, end_ts):
191 repo = self._factory.repo(wire)
190 repo = self._factory.repo(wire)
192 fsobj = svn.repos.fs(repo)
191 fsobj = svn.repos.fs(repo)
193 start_rev = None
192 start_rev = None
194 end_rev = None
193 end_rev = None
195 if start_ts:
194 if start_ts:
196 start_ts_svn = apr_time_t(start_ts)
195 start_ts_svn = apr_time_t(start_ts)
197 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
196 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
198 else:
197 else:
199 start_rev = 1
198 start_rev = 1
200 if end_ts:
199 if end_ts:
201 end_ts_svn = apr_time_t(end_ts)
200 end_ts_svn = apr_time_t(end_ts)
202 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
201 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
203 else:
202 else:
204 end_rev = svn.fs.youngest_rev(fsobj)
203 end_rev = svn.fs.youngest_rev(fsobj)
205 return start_rev, end_rev
204 return start_rev, end_rev
206
205
207 def revision_properties(self, wire, revision):
206 def revision_properties(self, wire, revision):
208
207
209 cache_on, context_uid, repo_id = self._cache_on(wire)
208 cache_on, context_uid, repo_id = self._cache_on(wire)
210 @self.region.conditional_cache_on_arguments(condition=cache_on)
209 @self.region.conditional_cache_on_arguments(condition=cache_on)
211 def _revision_properties(_repo_id, _revision):
210 def _revision_properties(_repo_id, _revision):
212 repo = self._factory.repo(wire)
211 repo = self._factory.repo(wire)
213 fs_ptr = svn.repos.fs(repo)
212 fs_ptr = svn.repos.fs(repo)
214 return svn.fs.revision_proplist(fs_ptr, revision)
213 return svn.fs.revision_proplist(fs_ptr, revision)
215 return _revision_properties(repo_id, revision)
214 return _revision_properties(repo_id, revision)
216
215
217 def revision_changes(self, wire, revision):
216 def revision_changes(self, wire, revision):
218
217
219 repo = self._factory.repo(wire)
218 repo = self._factory.repo(wire)
220 fsobj = svn.repos.fs(repo)
219 fsobj = svn.repos.fs(repo)
221 rev_root = svn.fs.revision_root(fsobj, revision)
220 rev_root = svn.fs.revision_root(fsobj, revision)
222
221
223 editor = svn.repos.ChangeCollector(fsobj, rev_root)
222 editor = svn.repos.ChangeCollector(fsobj, rev_root)
224 editor_ptr, editor_baton = svn.delta.make_editor(editor)
223 editor_ptr, editor_baton = svn.delta.make_editor(editor)
225 base_dir = ""
224 base_dir = ""
226 send_deltas = False
225 send_deltas = False
227 svn.repos.replay2(
226 svn.repos.replay2(
228 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
227 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
229 editor_ptr, editor_baton, None)
228 editor_ptr, editor_baton, None)
230
229
231 added = []
230 added = []
232 changed = []
231 changed = []
233 removed = []
232 removed = []
234
233
235 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
234 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
236 for path, change in editor.changes.iteritems():
235 for path, change in editor.changes.iteritems():
237 # TODO: Decide what to do with directory nodes. Subversion can add
236 # TODO: Decide what to do with directory nodes. Subversion can add
238 # empty directories.
237 # empty directories.
239
238
240 if change.item_kind == svn.core.svn_node_dir:
239 if change.item_kind == svn.core.svn_node_dir:
241 continue
240 continue
242 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
241 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
243 added.append(path)
242 added.append(path)
244 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
243 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
245 svn.repos.CHANGE_ACTION_REPLACE]:
244 svn.repos.CHANGE_ACTION_REPLACE]:
246 changed.append(path)
245 changed.append(path)
247 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
246 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
248 removed.append(path)
247 removed.append(path)
249 else:
248 else:
250 raise NotImplementedError(
249 raise NotImplementedError(
251 "Action %s not supported on path %s" % (
250 "Action %s not supported on path %s" % (
252 change.action, path))
251 change.action, path))
253
252
254 changes = {
253 changes = {
255 'added': added,
254 'added': added,
256 'changed': changed,
255 'changed': changed,
257 'removed': removed,
256 'removed': removed,
258 }
257 }
259 return changes
258 return changes
260
259
261 @reraise_safe_exceptions
260 @reraise_safe_exceptions
262 def node_history(self, wire, path, revision, limit):
261 def node_history(self, wire, path, revision, limit):
263 cache_on, context_uid, repo_id = self._cache_on(wire)
262 cache_on, context_uid, repo_id = self._cache_on(wire)
264 @self.region.conditional_cache_on_arguments(condition=cache_on)
263 @self.region.conditional_cache_on_arguments(condition=cache_on)
265 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
264 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
266 cross_copies = False
265 cross_copies = False
267 repo = self._factory.repo(wire)
266 repo = self._factory.repo(wire)
268 fsobj = svn.repos.fs(repo)
267 fsobj = svn.repos.fs(repo)
269 rev_root = svn.fs.revision_root(fsobj, revision)
268 rev_root = svn.fs.revision_root(fsobj, revision)
270
269
271 history_revisions = []
270 history_revisions = []
272 history = svn.fs.node_history(rev_root, path)
271 history = svn.fs.node_history(rev_root, path)
273 history = svn.fs.history_prev(history, cross_copies)
272 history = svn.fs.history_prev(history, cross_copies)
274 while history:
273 while history:
275 __, node_revision = svn.fs.history_location(history)
274 __, node_revision = svn.fs.history_location(history)
276 history_revisions.append(node_revision)
275 history_revisions.append(node_revision)
277 if limit and len(history_revisions) >= limit:
276 if limit and len(history_revisions) >= limit:
278 break
277 break
279 history = svn.fs.history_prev(history, cross_copies)
278 history = svn.fs.history_prev(history, cross_copies)
280 return history_revisions
279 return history_revisions
281 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
280 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
282
281
283 def node_properties(self, wire, path, revision):
282 def node_properties(self, wire, path, revision):
284 cache_on, context_uid, repo_id = self._cache_on(wire)
283 cache_on, context_uid, repo_id = self._cache_on(wire)
285 @self.region.conditional_cache_on_arguments(condition=cache_on)
284 @self.region.conditional_cache_on_arguments(condition=cache_on)
286 def _node_properties(_repo_id, _path, _revision):
285 def _node_properties(_repo_id, _path, _revision):
287 repo = self._factory.repo(wire)
286 repo = self._factory.repo(wire)
288 fsobj = svn.repos.fs(repo)
287 fsobj = svn.repos.fs(repo)
289 rev_root = svn.fs.revision_root(fsobj, revision)
288 rev_root = svn.fs.revision_root(fsobj, revision)
290 return svn.fs.node_proplist(rev_root, path)
289 return svn.fs.node_proplist(rev_root, path)
291 return _node_properties(repo_id, path, revision)
290 return _node_properties(repo_id, path, revision)
292
291
293 def file_annotate(self, wire, path, revision):
292 def file_annotate(self, wire, path, revision):
294 abs_path = 'file://' + urllib.pathname2url(
293 abs_path = 'file://' + urllib.pathname2url(
295 vcspath.join(wire['path'], path))
294 vcspath.join(wire['path'], path))
296 file_uri = svn.core.svn_path_canonicalize(abs_path)
295 file_uri = svn.core.svn_path_canonicalize(abs_path)
297
296
298 start_rev = svn_opt_revision_value_t(0)
297 start_rev = svn_opt_revision_value_t(0)
299 peg_rev = svn_opt_revision_value_t(revision)
298 peg_rev = svn_opt_revision_value_t(revision)
300 end_rev = peg_rev
299 end_rev = peg_rev
301
300
302 annotations = []
301 annotations = []
303
302
304 def receiver(line_no, revision, author, date, line, pool):
303 def receiver(line_no, revision, author, date, line, pool):
305 annotations.append((line_no, revision, line))
304 annotations.append((line_no, revision, line))
306
305
307 # TODO: Cannot use blame5, missing typemap function in the swig code
306 # TODO: Cannot use blame5, missing typemap function in the swig code
308 try:
307 try:
309 svn.client.blame2(
308 svn.client.blame2(
310 file_uri, peg_rev, start_rev, end_rev,
309 file_uri, peg_rev, start_rev, end_rev,
311 receiver, svn.client.create_context())
310 receiver, svn.client.create_context())
312 except svn.core.SubversionException as exc:
311 except svn.core.SubversionException as exc:
313 log.exception("Error during blame operation.")
312 log.exception("Error during blame operation.")
314 raise Exception(
313 raise Exception(
315 "Blame not supported or file does not exist at path %s. "
314 "Blame not supported or file does not exist at path %s. "
316 "Error %s." % (path, exc))
315 "Error %s." % (path, exc))
317
316
318 return annotations
317 return annotations
319
318
320 def get_node_type(self, wire, path, revision=None):
319 def get_node_type(self, wire, path, revision=None):
321
320
322 cache_on, context_uid, repo_id = self._cache_on(wire)
321 cache_on, context_uid, repo_id = self._cache_on(wire)
323 @self.region.conditional_cache_on_arguments(condition=cache_on)
322 @self.region.conditional_cache_on_arguments(condition=cache_on)
324 def _get_node_type(_repo_id, _path, _revision):
323 def _get_node_type(_repo_id, _path, _revision):
325 repo = self._factory.repo(wire)
324 repo = self._factory.repo(wire)
326 fs_ptr = svn.repos.fs(repo)
325 fs_ptr = svn.repos.fs(repo)
327 if _revision is None:
326 if _revision is None:
328 _revision = svn.fs.youngest_rev(fs_ptr)
327 _revision = svn.fs.youngest_rev(fs_ptr)
329 root = svn.fs.revision_root(fs_ptr, _revision)
328 root = svn.fs.revision_root(fs_ptr, _revision)
330 node = svn.fs.check_path(root, path)
329 node = svn.fs.check_path(root, path)
331 return NODE_TYPE_MAPPING.get(node, None)
330 return NODE_TYPE_MAPPING.get(node, None)
332 return _get_node_type(repo_id, path, revision)
331 return _get_node_type(repo_id, path, revision)
333
332
334 def get_nodes(self, wire, path, revision=None):
333 def get_nodes(self, wire, path, revision=None):
335
334
336 cache_on, context_uid, repo_id = self._cache_on(wire)
335 cache_on, context_uid, repo_id = self._cache_on(wire)
337 @self.region.conditional_cache_on_arguments(condition=cache_on)
336 @self.region.conditional_cache_on_arguments(condition=cache_on)
338 def _get_nodes(_repo_id, _path, _revision):
337 def _get_nodes(_repo_id, _path, _revision):
339 repo = self._factory.repo(wire)
338 repo = self._factory.repo(wire)
340 fsobj = svn.repos.fs(repo)
339 fsobj = svn.repos.fs(repo)
341 if _revision is None:
340 if _revision is None:
342 _revision = svn.fs.youngest_rev(fsobj)
341 _revision = svn.fs.youngest_rev(fsobj)
343 root = svn.fs.revision_root(fsobj, _revision)
342 root = svn.fs.revision_root(fsobj, _revision)
344 entries = svn.fs.dir_entries(root, path)
343 entries = svn.fs.dir_entries(root, path)
345 result = []
344 result = []
346 for entry_path, entry_info in entries.iteritems():
345 for entry_path, entry_info in entries.iteritems():
347 result.append(
346 result.append(
348 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
349 return result
348 return result
350 return _get_nodes(repo_id, path, revision)
349 return _get_nodes(repo_id, path, revision)
351
350
352 def get_file_content(self, wire, path, rev=None):
351 def get_file_content(self, wire, path, rev=None):
353 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
354 fsobj = svn.repos.fs(repo)
353 fsobj = svn.repos.fs(repo)
355 if rev is None:
354 if rev is None:
356 rev = svn.fs.youngest_revision(fsobj)
355 rev = svn.fs.youngest_revision(fsobj)
357 root = svn.fs.revision_root(fsobj, rev)
356 root = svn.fs.revision_root(fsobj, rev)
358 content = svn.core.Stream(svn.fs.file_contents(root, path))
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
359 return content.read()
358 return content.read()
360
359
361 def get_file_size(self, wire, path, revision=None):
360 def get_file_size(self, wire, path, revision=None):
362
361
363 cache_on, context_uid, repo_id = self._cache_on(wire)
362 cache_on, context_uid, repo_id = self._cache_on(wire)
364 @self.region.conditional_cache_on_arguments(condition=cache_on)
363 @self.region.conditional_cache_on_arguments(condition=cache_on)
365 def _get_file_size(_repo_id, _path, _revision):
364 def _get_file_size(_repo_id, _path, _revision):
366 repo = self._factory.repo(wire)
365 repo = self._factory.repo(wire)
367 fsobj = svn.repos.fs(repo)
366 fsobj = svn.repos.fs(repo)
368 if _revision is None:
367 if _revision is None:
369 _revision = svn.fs.youngest_revision(fsobj)
368 _revision = svn.fs.youngest_revision(fsobj)
370 root = svn.fs.revision_root(fsobj, _revision)
369 root = svn.fs.revision_root(fsobj, _revision)
371 size = svn.fs.file_length(root, path)
370 size = svn.fs.file_length(root, path)
372 return size
371 return size
373 return _get_file_size(repo_id, path, revision)
372 return _get_file_size(repo_id, path, revision)
374
373
375 def create_repository(self, wire, compatible_version=None):
374 def create_repository(self, wire, compatible_version=None):
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
375 log.info('Creating Subversion repository in path "%s"', wire['path'])
377 self._factory.repo(wire, create=True,
376 self._factory.repo(wire, create=True,
378 compatible_version=compatible_version)
377 compatible_version=compatible_version)
379
378
380 def get_url_and_credentials(self, src_url):
379 def get_url_and_credentials(self, src_url):
381 obj = urlparse.urlparse(src_url)
380 obj = urlparse.urlparse(src_url)
382 username = obj.username or None
381 username = obj.username or None
383 password = obj.password or None
382 password = obj.password or None
384 return username, password, src_url
383 return username, password, src_url
385
384
386 def import_remote_repository(self, wire, src_url):
385 def import_remote_repository(self, wire, src_url):
387 repo_path = wire['path']
386 repo_path = wire['path']
388 if not self.is_path_valid_repository(wire, repo_path):
387 if not self.is_path_valid_repository(wire, repo_path):
389 raise Exception(
388 raise Exception(
390 "Path %s is not a valid Subversion repository." % repo_path)
389 "Path %s is not a valid Subversion repository." % repo_path)
391
390
392 username, password, src_url = self.get_url_and_credentials(src_url)
391 username, password, src_url = self.get_url_and_credentials(src_url)
393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
392 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
394 '--trust-server-cert-failures=unknown-ca']
393 '--trust-server-cert-failures=unknown-ca']
395 if username and password:
394 if username and password:
396 rdump_cmd += ['--username', username, '--password', password]
395 rdump_cmd += ['--username', username, '--password', password]
397 rdump_cmd += [src_url]
396 rdump_cmd += [src_url]
398
397
399 rdump = subprocess.Popen(
398 rdump = subprocess.Popen(
400 rdump_cmd,
399 rdump_cmd,
401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
400 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
402 load = subprocess.Popen(
401 load = subprocess.Popen(
403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
402 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
404
403
405 # TODO: johbo: This can be a very long operation, might be better
404 # TODO: johbo: This can be a very long operation, might be better
406 # to track some kind of status and provide an api to check if the
405 # to track some kind of status and provide an api to check if the
407 # import is done.
406 # import is done.
408 rdump.wait()
407 rdump.wait()
409 load.wait()
408 load.wait()
410
409
411 log.debug('Return process ended with code: %s', rdump.returncode)
410 log.debug('Return process ended with code: %s', rdump.returncode)
412 if rdump.returncode != 0:
411 if rdump.returncode != 0:
413 errors = rdump.stderr.read()
412 errors = rdump.stderr.read()
414 log.error('svnrdump dump failed: statuscode %s: message: %s',
413 log.error('svnrdump dump failed: statuscode %s: message: %s',
415 rdump.returncode, errors)
414 rdump.returncode, errors)
416 reason = 'UNKNOWN'
415 reason = 'UNKNOWN'
417 if 'svnrdump: E230001:' in errors:
416 if 'svnrdump: E230001:' in errors:
418 reason = 'INVALID_CERTIFICATE'
417 reason = 'INVALID_CERTIFICATE'
419
418
420 if reason == 'UNKNOWN':
419 if reason == 'UNKNOWN':
421 reason = 'UNKNOWN:{}'.format(errors)
420 reason = 'UNKNOWN:{}'.format(errors)
422 raise Exception(
421 raise Exception(
423 'Failed to dump the remote repository from %s. Reason:%s' % (
422 'Failed to dump the remote repository from %s. Reason:%s' % (
424 src_url, reason))
423 src_url, reason))
425 if load.returncode != 0:
424 if load.returncode != 0:
426 raise Exception(
425 raise Exception(
427 'Failed to load the dump of remote repository from %s.' %
426 'Failed to load the dump of remote repository from %s.' %
428 (src_url, ))
427 (src_url, ))
429
428
430 def commit(self, wire, message, author, timestamp, updated, removed):
429 def commit(self, wire, message, author, timestamp, updated, removed):
431 assert isinstance(message, str)
430 assert isinstance(message, str)
432 assert isinstance(author, str)
431 assert isinstance(author, str)
433
432
434 repo = self._factory.repo(wire)
433 repo = self._factory.repo(wire)
435 fsobj = svn.repos.fs(repo)
434 fsobj = svn.repos.fs(repo)
436
435
437 rev = svn.fs.youngest_rev(fsobj)
436 rev = svn.fs.youngest_rev(fsobj)
438 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
437 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
439 txn_root = svn.fs.txn_root(txn)
438 txn_root = svn.fs.txn_root(txn)
440
439
441 for node in updated:
440 for node in updated:
442 TxnNodeProcessor(node, txn_root).update()
441 TxnNodeProcessor(node, txn_root).update()
443 for node in removed:
442 for node in removed:
444 TxnNodeProcessor(node, txn_root).remove()
443 TxnNodeProcessor(node, txn_root).remove()
445
444
446 commit_id = svn.repos.fs_commit_txn(repo, txn)
445 commit_id = svn.repos.fs_commit_txn(repo, txn)
447
446
448 if timestamp:
447 if timestamp:
449 apr_time = apr_time_t(timestamp)
448 apr_time = apr_time_t(timestamp)
450 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
449 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
451 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
450 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
452
451
453 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
452 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
454 return commit_id
453 return commit_id
455
454
456 def diff(self, wire, rev1, rev2, path1=None, path2=None,
455 def diff(self, wire, rev1, rev2, path1=None, path2=None,
457 ignore_whitespace=False, context=3):
456 ignore_whitespace=False, context=3):
458
457
459 wire.update(cache=False)
458 wire.update(cache=False)
460 repo = self._factory.repo(wire)
459 repo = self._factory.repo(wire)
461 diff_creator = SvnDiffer(
460 diff_creator = SvnDiffer(
462 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
461 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
463 try:
462 try:
464 return diff_creator.generate_diff()
463 return diff_creator.generate_diff()
465 except svn.core.SubversionException as e:
464 except svn.core.SubversionException as e:
466 log.exception(
465 log.exception(
467 "Error during diff operation operation. "
466 "Error during diff operation operation. "
468 "Path might not exist %s, %s" % (path1, path2))
467 "Path might not exist %s, %s" % (path1, path2))
469 return ""
468 return ""
470
469
471 @reraise_safe_exceptions
470 @reraise_safe_exceptions
472 def is_large_file(self, wire, path):
471 def is_large_file(self, wire, path):
473 return False
472 return False
474
473
475 @reraise_safe_exceptions
474 @reraise_safe_exceptions
476 def run_svn_command(self, wire, cmd, **opts):
475 def run_svn_command(self, wire, cmd, **opts):
477 path = wire.get('path', None)
476 path = wire.get('path', None)
478
477
479 if path and os.path.isdir(path):
478 if path and os.path.isdir(path):
480 opts['cwd'] = path
479 opts['cwd'] = path
481
480
482 safe_call = False
481 safe_call = False
483 if '_safe' in opts:
482 if '_safe' in opts:
484 safe_call = True
483 safe_call = True
485
484
486 svnenv = os.environ.copy()
485 svnenv = os.environ.copy()
487 svnenv.update(opts.pop('extra_env', {}))
486 svnenv.update(opts.pop('extra_env', {}))
488
487
489 _opts = {'env': svnenv, 'shell': False}
488 _opts = {'env': svnenv, 'shell': False}
490
489
491 try:
490 try:
492 _opts.update(opts)
491 _opts.update(opts)
493 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
492 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
494
493
495 return ''.join(p), ''.join(p.error)
494 return ''.join(p), ''.join(p.error)
496 except (EnvironmentError, OSError) as err:
495 except (EnvironmentError, OSError) as err:
497 cmd = ' '.join(cmd) # human friendly CMD
496 cmd = ' '.join(cmd) # human friendly CMD
498 tb_err = ("Couldn't run svn command (%s).\n"
497 tb_err = ("Couldn't run svn command (%s).\n"
499 "Original error was:%s\n"
498 "Original error was:%s\n"
500 "Call options:%s\n"
499 "Call options:%s\n"
501 % (cmd, err, _opts))
500 % (cmd, err, _opts))
502 log.exception(tb_err)
501 log.exception(tb_err)
503 if safe_call:
502 if safe_call:
504 return '', err
503 return '', err
505 else:
504 else:
506 raise exceptions.VcsException()(tb_err)
505 raise exceptions.VcsException()(tb_err)
507
506
508 @reraise_safe_exceptions
507 @reraise_safe_exceptions
509 def install_hooks(self, wire, force=False):
508 def install_hooks(self, wire, force=False):
510 from vcsserver.hook_utils import install_svn_hooks
509 from vcsserver.hook_utils import install_svn_hooks
511 repo_path = wire['path']
510 repo_path = wire['path']
512 binary_dir = settings.BINARY_DIR
511 binary_dir = settings.BINARY_DIR
513 executable = None
512 executable = None
514 if binary_dir:
513 if binary_dir:
515 executable = os.path.join(binary_dir, 'python')
514 executable = os.path.join(binary_dir, 'python')
516 return install_svn_hooks(
515 return install_svn_hooks(
517 repo_path, executable=executable, force_create=force)
516 repo_path, executable=executable, force_create=force)
518
517
519 @reraise_safe_exceptions
518 @reraise_safe_exceptions
520 def get_hooks_info(self, wire):
519 def get_hooks_info(self, wire):
521 from vcsserver.hook_utils import (
520 from vcsserver.hook_utils import (
522 get_svn_pre_hook_version, get_svn_post_hook_version)
521 get_svn_pre_hook_version, get_svn_post_hook_version)
523 repo_path = wire['path']
522 repo_path = wire['path']
524 return {
523 return {
525 'pre_version': get_svn_pre_hook_version(repo_path),
524 'pre_version': get_svn_pre_hook_version(repo_path),
526 'post_version': get_svn_post_hook_version(repo_path),
525 'post_version': get_svn_post_hook_version(repo_path),
527 }
526 }
528
527
529
528
530 class SvnDiffer(object):
529 class SvnDiffer(object):
531 """
530 """
532 Utility to create diffs based on difflib and the Subversion api
531 Utility to create diffs based on difflib and the Subversion api
533 """
532 """
534
533
535 binary_content = False
534 binary_content = False
536
535
537 def __init__(
536 def __init__(
538 self, repo, src_rev, src_path, tgt_rev, tgt_path,
537 self, repo, src_rev, src_path, tgt_rev, tgt_path,
539 ignore_whitespace, context):
538 ignore_whitespace, context):
540 self.repo = repo
539 self.repo = repo
541 self.ignore_whitespace = ignore_whitespace
540 self.ignore_whitespace = ignore_whitespace
542 self.context = context
541 self.context = context
543
542
544 fsobj = svn.repos.fs(repo)
543 fsobj = svn.repos.fs(repo)
545
544
546 self.tgt_rev = tgt_rev
545 self.tgt_rev = tgt_rev
547 self.tgt_path = tgt_path or ''
546 self.tgt_path = tgt_path or ''
548 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
547 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
549 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
548 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
550
549
551 self.src_rev = src_rev
550 self.src_rev = src_rev
552 self.src_path = src_path or self.tgt_path
551 self.src_path = src_path or self.tgt_path
553 self.src_root = svn.fs.revision_root(fsobj, src_rev)
552 self.src_root = svn.fs.revision_root(fsobj, src_rev)
554 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
553 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
555
554
556 self._validate()
555 self._validate()
557
556
558 def _validate(self):
557 def _validate(self):
559 if (self.tgt_kind != svn.core.svn_node_none and
558 if (self.tgt_kind != svn.core.svn_node_none and
560 self.src_kind != svn.core.svn_node_none and
559 self.src_kind != svn.core.svn_node_none and
561 self.src_kind != self.tgt_kind):
560 self.src_kind != self.tgt_kind):
562 # TODO: johbo: proper error handling
561 # TODO: johbo: proper error handling
563 raise Exception(
562 raise Exception(
564 "Source and target are not compatible for diff generation. "
563 "Source and target are not compatible for diff generation. "
565 "Source type: %s, target type: %s" %
564 "Source type: %s, target type: %s" %
566 (self.src_kind, self.tgt_kind))
565 (self.src_kind, self.tgt_kind))
567
566
568 def generate_diff(self):
567 def generate_diff(self):
569 buf = StringIO.StringIO()
568 buf = StringIO.StringIO()
570 if self.tgt_kind == svn.core.svn_node_dir:
569 if self.tgt_kind == svn.core.svn_node_dir:
571 self._generate_dir_diff(buf)
570 self._generate_dir_diff(buf)
572 else:
571 else:
573 self._generate_file_diff(buf)
572 self._generate_file_diff(buf)
574 return buf.getvalue()
573 return buf.getvalue()
575
574
576 def _generate_dir_diff(self, buf):
575 def _generate_dir_diff(self, buf):
577 editor = DiffChangeEditor()
576 editor = DiffChangeEditor()
578 editor_ptr, editor_baton = svn.delta.make_editor(editor)
577 editor_ptr, editor_baton = svn.delta.make_editor(editor)
579 svn.repos.dir_delta2(
578 svn.repos.dir_delta2(
580 self.src_root,
579 self.src_root,
581 self.src_path,
580 self.src_path,
582 '', # src_entry
581 '', # src_entry
583 self.tgt_root,
582 self.tgt_root,
584 self.tgt_path,
583 self.tgt_path,
585 editor_ptr, editor_baton,
584 editor_ptr, editor_baton,
586 authorization_callback_allow_all,
585 authorization_callback_allow_all,
587 False, # text_deltas
586 False, # text_deltas
588 svn.core.svn_depth_infinity, # depth
587 svn.core.svn_depth_infinity, # depth
589 False, # entry_props
588 False, # entry_props
590 False, # ignore_ancestry
589 False, # ignore_ancestry
591 )
590 )
592
591
593 for path, __, change in sorted(editor.changes):
592 for path, __, change in sorted(editor.changes):
594 self._generate_node_diff(
593 self._generate_node_diff(
595 buf, change, path, self.tgt_path, path, self.src_path)
594 buf, change, path, self.tgt_path, path, self.src_path)
596
595
597 def _generate_file_diff(self, buf):
596 def _generate_file_diff(self, buf):
598 change = None
597 change = None
599 if self.src_kind == svn.core.svn_node_none:
598 if self.src_kind == svn.core.svn_node_none:
600 change = "add"
599 change = "add"
601 elif self.tgt_kind == svn.core.svn_node_none:
600 elif self.tgt_kind == svn.core.svn_node_none:
602 change = "delete"
601 change = "delete"
603 tgt_base, tgt_path = vcspath.split(self.tgt_path)
602 tgt_base, tgt_path = vcspath.split(self.tgt_path)
604 src_base, src_path = vcspath.split(self.src_path)
603 src_base, src_path = vcspath.split(self.src_path)
605 self._generate_node_diff(
604 self._generate_node_diff(
606 buf, change, tgt_path, tgt_base, src_path, src_base)
605 buf, change, tgt_path, tgt_base, src_path, src_base)
607
606
608 def _generate_node_diff(
607 def _generate_node_diff(
609 self, buf, change, tgt_path, tgt_base, src_path, src_base):
608 self, buf, change, tgt_path, tgt_base, src_path, src_base):
610
609
611 if self.src_rev == self.tgt_rev and tgt_base == src_base:
610 if self.src_rev == self.tgt_rev and tgt_base == src_base:
612 # makes consistent behaviour with git/hg to return empty diff if
611 # makes consistent behaviour with git/hg to return empty diff if
613 # we compare same revisions
612 # we compare same revisions
614 return
613 return
615
614
616 tgt_full_path = vcspath.join(tgt_base, tgt_path)
615 tgt_full_path = vcspath.join(tgt_base, tgt_path)
617 src_full_path = vcspath.join(src_base, src_path)
616 src_full_path = vcspath.join(src_base, src_path)
618
617
619 self.binary_content = False
618 self.binary_content = False
620 mime_type = self._get_mime_type(tgt_full_path)
619 mime_type = self._get_mime_type(tgt_full_path)
621
620
622 if mime_type and not mime_type.startswith('text'):
621 if mime_type and not mime_type.startswith('text'):
623 self.binary_content = True
622 self.binary_content = True
624 buf.write("=" * 67 + '\n')
623 buf.write("=" * 67 + '\n')
625 buf.write("Cannot display: file marked as a binary type.\n")
624 buf.write("Cannot display: file marked as a binary type.\n")
626 buf.write("svn:mime-type = %s\n" % mime_type)
625 buf.write("svn:mime-type = %s\n" % mime_type)
627 buf.write("Index: %s\n" % (tgt_path, ))
626 buf.write("Index: %s\n" % (tgt_path, ))
628 buf.write("=" * 67 + '\n')
627 buf.write("=" * 67 + '\n')
629 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
628 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
630 'tgt_path': tgt_path})
629 'tgt_path': tgt_path})
631
630
632 if change == 'add':
631 if change == 'add':
633 # TODO: johbo: SVN is missing a zero here compared to git
632 # TODO: johbo: SVN is missing a zero here compared to git
634 buf.write("new file mode 10644\n")
633 buf.write("new file mode 10644\n")
635
634
636 #TODO(marcink): intro to binary detection of svn patches
635 #TODO(marcink): intro to binary detection of svn patches
637 # if self.binary_content:
636 # if self.binary_content:
638 # buf.write('GIT binary patch\n')
637 # buf.write('GIT binary patch\n')
639
638
640 buf.write("--- /dev/null\t(revision 0)\n")
639 buf.write("--- /dev/null\t(revision 0)\n")
641 src_lines = []
640 src_lines = []
642 else:
641 else:
643 if change == 'delete':
642 if change == 'delete':
644 buf.write("deleted file mode 10644\n")
643 buf.write("deleted file mode 10644\n")
645
644
646 #TODO(marcink): intro to binary detection of svn patches
645 #TODO(marcink): intro to binary detection of svn patches
647 # if self.binary_content:
646 # if self.binary_content:
648 # buf.write('GIT binary patch\n')
647 # buf.write('GIT binary patch\n')
649
648
650 buf.write("--- a/%s\t(revision %s)\n" % (
649 buf.write("--- a/%s\t(revision %s)\n" % (
651 src_path, self.src_rev))
650 src_path, self.src_rev))
652 src_lines = self._svn_readlines(self.src_root, src_full_path)
651 src_lines = self._svn_readlines(self.src_root, src_full_path)
653
652
654 if change == 'delete':
653 if change == 'delete':
655 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
654 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
656 tgt_lines = []
655 tgt_lines = []
657 else:
656 else:
658 buf.write("+++ b/%s\t(revision %s)\n" % (
657 buf.write("+++ b/%s\t(revision %s)\n" % (
659 tgt_path, self.tgt_rev))
658 tgt_path, self.tgt_rev))
660 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
659 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
661
660
662 if not self.binary_content:
661 if not self.binary_content:
663 udiff = svn_diff.unified_diff(
662 udiff = svn_diff.unified_diff(
664 src_lines, tgt_lines, context=self.context,
663 src_lines, tgt_lines, context=self.context,
665 ignore_blank_lines=self.ignore_whitespace,
664 ignore_blank_lines=self.ignore_whitespace,
666 ignore_case=False,
665 ignore_case=False,
667 ignore_space_changes=self.ignore_whitespace)
666 ignore_space_changes=self.ignore_whitespace)
668 buf.writelines(udiff)
667 buf.writelines(udiff)
669
668
670 def _get_mime_type(self, path):
669 def _get_mime_type(self, path):
671 try:
670 try:
672 mime_type = svn.fs.node_prop(
671 mime_type = svn.fs.node_prop(
673 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
672 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
674 except svn.core.SubversionException:
673 except svn.core.SubversionException:
675 mime_type = svn.fs.node_prop(
674 mime_type = svn.fs.node_prop(
676 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
675 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
677 return mime_type
676 return mime_type
678
677
679 def _svn_readlines(self, fs_root, node_path):
678 def _svn_readlines(self, fs_root, node_path):
680 if self.binary_content:
679 if self.binary_content:
681 return []
680 return []
682 node_kind = svn.fs.check_path(fs_root, node_path)
681 node_kind = svn.fs.check_path(fs_root, node_path)
683 if node_kind not in (
682 if node_kind not in (
684 svn.core.svn_node_file, svn.core.svn_node_symlink):
683 svn.core.svn_node_file, svn.core.svn_node_symlink):
685 return []
684 return []
686 content = svn.core.Stream(
685 content = svn.core.Stream(
687 svn.fs.file_contents(fs_root, node_path)).read()
686 svn.fs.file_contents(fs_root, node_path)).read()
688 return content.splitlines(True)
687 return content.splitlines(True)
689
688
690
689
691 class DiffChangeEditor(svn.delta.Editor):
690 class DiffChangeEditor(svn.delta.Editor):
692 """
691 """
693 Records changes between two given revisions
692 Records changes between two given revisions
694 """
693 """
695
694
696 def __init__(self):
695 def __init__(self):
697 self.changes = []
696 self.changes = []
698
697
699 def delete_entry(self, path, revision, parent_baton, pool=None):
698 def delete_entry(self, path, revision, parent_baton, pool=None):
700 self.changes.append((path, None, 'delete'))
699 self.changes.append((path, None, 'delete'))
701
700
702 def add_file(
701 def add_file(
703 self, path, parent_baton, copyfrom_path, copyfrom_revision,
702 self, path, parent_baton, copyfrom_path, copyfrom_revision,
704 file_pool=None):
703 file_pool=None):
705 self.changes.append((path, 'file', 'add'))
704 self.changes.append((path, 'file', 'add'))
706
705
707 def open_file(self, path, parent_baton, base_revision, file_pool=None):
706 def open_file(self, path, parent_baton, base_revision, file_pool=None):
708 self.changes.append((path, 'file', 'change'))
707 self.changes.append((path, 'file', 'change'))
709
708
710
709
711 def authorization_callback_allow_all(root, path, pool):
710 def authorization_callback_allow_all(root, path, pool):
712 return True
711 return True
713
712
714
713
715 class TxnNodeProcessor(object):
714 class TxnNodeProcessor(object):
716 """
715 """
717 Utility to process the change of one node within a transaction root.
716 Utility to process the change of one node within a transaction root.
718
717
719 It encapsulates the knowledge of how to add, update or remove
718 It encapsulates the knowledge of how to add, update or remove
720 a node for a given transaction root. The purpose is to support the method
719 a node for a given transaction root. The purpose is to support the method
721 `SvnRemote.commit`.
720 `SvnRemote.commit`.
722 """
721 """
723
722
724 def __init__(self, node, txn_root):
723 def __init__(self, node, txn_root):
725 assert isinstance(node['path'], str)
724 assert isinstance(node['path'], str)
726
725
727 self.node = node
726 self.node = node
728 self.txn_root = txn_root
727 self.txn_root = txn_root
729
728
730 def update(self):
729 def update(self):
731 self._ensure_parent_dirs()
730 self._ensure_parent_dirs()
732 self._add_file_if_node_does_not_exist()
731 self._add_file_if_node_does_not_exist()
733 self._update_file_content()
732 self._update_file_content()
734 self._update_file_properties()
733 self._update_file_properties()
735
734
736 def remove(self):
735 def remove(self):
737 svn.fs.delete(self.txn_root, self.node['path'])
736 svn.fs.delete(self.txn_root, self.node['path'])
738 # TODO: Clean up directory if empty
737 # TODO: Clean up directory if empty
739
738
740 def _ensure_parent_dirs(self):
739 def _ensure_parent_dirs(self):
741 curdir = vcspath.dirname(self.node['path'])
740 curdir = vcspath.dirname(self.node['path'])
742 dirs_to_create = []
741 dirs_to_create = []
743 while not self._svn_path_exists(curdir):
742 while not self._svn_path_exists(curdir):
744 dirs_to_create.append(curdir)
743 dirs_to_create.append(curdir)
745 curdir = vcspath.dirname(curdir)
744 curdir = vcspath.dirname(curdir)
746
745
747 for curdir in reversed(dirs_to_create):
746 for curdir in reversed(dirs_to_create):
748 log.debug('Creating missing directory "%s"', curdir)
747 log.debug('Creating missing directory "%s"', curdir)
749 svn.fs.make_dir(self.txn_root, curdir)
748 svn.fs.make_dir(self.txn_root, curdir)
750
749
751 def _svn_path_exists(self, path):
750 def _svn_path_exists(self, path):
752 path_status = svn.fs.check_path(self.txn_root, path)
751 path_status = svn.fs.check_path(self.txn_root, path)
753 return path_status != svn.core.svn_node_none
752 return path_status != svn.core.svn_node_none
754
753
755 def _add_file_if_node_does_not_exist(self):
754 def _add_file_if_node_does_not_exist(self):
756 kind = svn.fs.check_path(self.txn_root, self.node['path'])
755 kind = svn.fs.check_path(self.txn_root, self.node['path'])
757 if kind == svn.core.svn_node_none:
756 if kind == svn.core.svn_node_none:
758 svn.fs.make_file(self.txn_root, self.node['path'])
757 svn.fs.make_file(self.txn_root, self.node['path'])
759
758
760 def _update_file_content(self):
759 def _update_file_content(self):
761 assert isinstance(self.node['content'], str)
760 assert isinstance(self.node['content'], str)
762 handler, baton = svn.fs.apply_textdelta(
761 handler, baton = svn.fs.apply_textdelta(
763 self.txn_root, self.node['path'], None, None)
762 self.txn_root, self.node['path'], None, None)
764 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
763 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
765
764
766 def _update_file_properties(self):
765 def _update_file_properties(self):
767 properties = self.node.get('properties', {})
766 properties = self.node.get('properties', {})
768 for key, value in properties.iteritems():
767 for key, value in properties.iteritems():
769 svn.fs.change_node_prop(
768 svn.fs.change_node_prop(
770 self.txn_root, self.node['path'], key, value)
769 self.txn_root, self.node['path'], key, value)
771
770
772
771
773 def apr_time_t(timestamp):
772 def apr_time_t(timestamp):
774 """
773 """
775 Convert a Python timestamp into APR timestamp type apr_time_t
774 Convert a Python timestamp into APR timestamp type apr_time_t
776 """
775 """
777 return timestamp * 1E6
776 return timestamp * 1E6
778
777
779
778
780 def svn_opt_revision_value_t(num):
779 def svn_opt_revision_value_t(num):
781 """
780 """
782 Put `num` into a `svn_opt_revision_value_t` structure.
781 Put `num` into a `svn_opt_revision_value_t` structure.
783 """
782 """
784 value = svn.core.svn_opt_revision_value_t()
783 value = svn.core.svn_opt_revision_value_t()
785 value.number = num
784 value.number = num
786 revision = svn.core.svn_opt_revision_t()
785 revision = svn.core.svn_opt_revision_t()
787 revision.kind = svn.core.svn_opt_revision_number
786 revision.kind = svn.core.svn_opt_revision_number
788 revision.value = value
787 revision.value = value
789 return revision
788 return revision
@@ -1,28 +1,32 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 class RemoteBase(object):
19 class RemoteBase(object):
20 EMPTY_COMMIT = '0' * 40
20 EMPTY_COMMIT = '0' * 40
21
21
22 @property
23 def region(self):
24 return self._factory._cache_region
25
22 def _cache_on(self, wire):
26 def _cache_on(self, wire):
23 context = wire.get('context', '')
27 context = wire.get('context', '')
24 context_uid = '{}'.format(context)
28 context_uid = '{}'.format(context)
25 repo_id = wire.get('repo_id', '')
29 repo_id = wire.get('repo_id', '')
26 cache = wire.get('cache', True)
30 cache = wire.get('cache', True)
27 cache_on = context and cache
31 cache_on = context and cache
28 return cache_on, context_uid, repo_id
32 return cache_on, context_uid, repo_id
General Comments 0
You need to be logged in to leave comments. Login now