##// END OF EJS Templates
vcsserver: added dedicated binary content check functions for all backends
dan -
r769:077cbac3 default
parent child Browse files
Show More
@@ -1,1160 +1,1173 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib
25 import urllib
26 import urllib2
26 import urllib2
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from dulwich import index, objects
32 from dulwich import index, objects
33 from dulwich.client import HttpGitClient, LocalGitClient
33 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.errors import (
34 from dulwich.errors import (
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 MissingCommitError, ObjectMissing, HangupException,
36 MissingCommitError, ObjectMissing, HangupException,
37 UnexpectedCommandError)
37 UnexpectedCommandError)
38 from dulwich.repo import Repo as DulwichRepo
38 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.server import update_server_info
39 from dulwich.server import update_server_info
40
40
41 from vcsserver import exceptions, settings, subprocessio
41 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver.utils import safe_str, safe_int
42 from vcsserver.utils import safe_str, safe_int
43 from vcsserver.base import RepoFactory, obfuscate_qs
43 from vcsserver.base import RepoFactory, obfuscate_qs
44 from vcsserver.hgcompat import (
44 from vcsserver.hgcompat import (
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 from vcsserver.git_lfs.lib import LFSOidStore
46 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.vcs_base import RemoteBase
47 from vcsserver.vcs_base import RemoteBase
48
48
49 DIR_STAT = stat.S_IFDIR
49 DIR_STAT = stat.S_IFDIR
50 FILE_MODE = stat.S_IFMT
50 FILE_MODE = stat.S_IFMT
51 GIT_LINK = objects.S_IFGITLINK
51 GIT_LINK = objects.S_IFGITLINK
52 PEELED_REF_MARKER = '^{}'
52 PEELED_REF_MARKER = '^{}'
53
53
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 def str_to_dulwich(value):
58 def str_to_dulwich(value):
59 """
59 """
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 """
61 """
62 return value.decode(settings.WIRE_ENCODING)
62 return value.decode(settings.WIRE_ENCODING)
63
63
64
64
65 def reraise_safe_exceptions(func):
65 def reraise_safe_exceptions(func):
66 """Converts Dulwich exceptions to something neutral."""
66 """Converts Dulwich exceptions to something neutral."""
67
67
68 @wraps(func)
68 @wraps(func)
69 def wrapper(*args, **kwargs):
69 def wrapper(*args, **kwargs):
70 try:
70 try:
71 return func(*args, **kwargs)
71 return func(*args, **kwargs)
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 exc = exceptions.LookupException(org_exc=e)
73 exc = exceptions.LookupException(org_exc=e)
74 raise exc(safe_str(e))
74 raise exc(safe_str(e))
75 except (HangupException, UnexpectedCommandError) as e:
75 except (HangupException, UnexpectedCommandError) as e:
76 exc = exceptions.VcsException(org_exc=e)
76 exc = exceptions.VcsException(org_exc=e)
77 raise exc(safe_str(e))
77 raise exc(safe_str(e))
78 except Exception as e:
78 except Exception as e:
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 # (KeyError on empty repos), we cannot track this and catch all
80 # (KeyError on empty repos), we cannot track this and catch all
81 # exceptions, it's an exceptions from other handlers
81 # exceptions, it's an exceptions from other handlers
82 #if not hasattr(e, '_vcs_kind'):
82 #if not hasattr(e, '_vcs_kind'):
83 #log.exception("Unhandled exception in git remote call")
83 #log.exception("Unhandled exception in git remote call")
84 #raise_from_original(exceptions.UnhandledException)
84 #raise_from_original(exceptions.UnhandledException)
85 raise
85 raise
86 return wrapper
86 return wrapper
87
87
88
88
89 class Repo(DulwichRepo):
89 class Repo(DulwichRepo):
90 """
90 """
91 A wrapper for dulwich Repo class.
91 A wrapper for dulwich Repo class.
92
92
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 "Too many open files" error. We need to close all opened file descriptors
94 "Too many open files" error. We need to close all opened file descriptors
95 once the repo object is destroyed.
95 once the repo object is destroyed.
96 """
96 """
97 def __del__(self):
97 def __del__(self):
98 if hasattr(self, 'object_store'):
98 if hasattr(self, 'object_store'):
99 self.close()
99 self.close()
100
100
101
101
102 class Repository(LibGit2Repo):
102 class Repository(LibGit2Repo):
103
103
104 def __enter__(self):
104 def __enter__(self):
105 return self
105 return self
106
106
107 def __exit__(self, exc_type, exc_val, exc_tb):
107 def __exit__(self, exc_type, exc_val, exc_tb):
108 self.free()
108 self.free()
109
109
110
110
111 class GitFactory(RepoFactory):
111 class GitFactory(RepoFactory):
112 repo_type = 'git'
112 repo_type = 'git'
113
113
114 def _create_repo(self, wire, create, use_libgit2=False):
114 def _create_repo(self, wire, create, use_libgit2=False):
115 if use_libgit2:
115 if use_libgit2:
116 return Repository(wire['path'])
116 return Repository(wire['path'])
117 else:
117 else:
118 repo_path = str_to_dulwich(wire['path'])
118 repo_path = str_to_dulwich(wire['path'])
119 return Repo(repo_path)
119 return Repo(repo_path)
120
120
121 def repo(self, wire, create=False, use_libgit2=False):
121 def repo(self, wire, create=False, use_libgit2=False):
122 """
122 """
123 Get a repository instance for the given path.
123 Get a repository instance for the given path.
124 """
124 """
125 return self._create_repo(wire, create, use_libgit2)
125 return self._create_repo(wire, create, use_libgit2)
126
126
127 def repo_libgit2(self, wire):
127 def repo_libgit2(self, wire):
128 return self.repo(wire, use_libgit2=True)
128 return self.repo(wire, use_libgit2=True)
129
129
130
130
131 class GitRemote(RemoteBase):
131 class GitRemote(RemoteBase):
132
132
133 def __init__(self, factory):
133 def __init__(self, factory):
134 self._factory = factory
134 self._factory = factory
135 self._bulk_methods = {
135 self._bulk_methods = {
136 "date": self.date,
136 "date": self.date,
137 "author": self.author,
137 "author": self.author,
138 "branch": self.branch,
138 "branch": self.branch,
139 "message": self.message,
139 "message": self.message,
140 "parents": self.parents,
140 "parents": self.parents,
141 "_commit": self.revision,
141 "_commit": self.revision,
142 }
142 }
143
143
144 def _wire_to_config(self, wire):
144 def _wire_to_config(self, wire):
145 if 'config' in wire:
145 if 'config' in wire:
146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return {}
147 return {}
148
148
149 def _remote_conf(self, config):
149 def _remote_conf(self, config):
150 params = [
150 params = [
151 '-c', 'core.askpass=""',
151 '-c', 'core.askpass=""',
152 ]
152 ]
153 ssl_cert_dir = config.get('vcs_ssl_dir')
153 ssl_cert_dir = config.get('vcs_ssl_dir')
154 if ssl_cert_dir:
154 if ssl_cert_dir:
155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 return params
156 return params
157
157
158 @reraise_safe_exceptions
158 @reraise_safe_exceptions
159 def discover_git_version(self):
159 def discover_git_version(self):
160 stdout, _ = self.run_git_command(
160 stdout, _ = self.run_git_command(
161 {}, ['--version'], _bare=True, _safe=True)
161 {}, ['--version'], _bare=True, _safe=True)
162 prefix = 'git version'
162 prefix = 'git version'
163 if stdout.startswith(prefix):
163 if stdout.startswith(prefix):
164 stdout = stdout[len(prefix):]
164 stdout = stdout[len(prefix):]
165 return stdout.strip()
165 return stdout.strip()
166
166
167 @reraise_safe_exceptions
167 @reraise_safe_exceptions
168 def is_empty(self, wire):
168 def is_empty(self, wire):
169 repo_init = self._factory.repo_libgit2(wire)
169 repo_init = self._factory.repo_libgit2(wire)
170 with repo_init as repo:
170 with repo_init as repo:
171
171
172 try:
172 try:
173 has_head = repo.head.name
173 has_head = repo.head.name
174 if has_head:
174 if has_head:
175 return False
175 return False
176
176
177 # NOTE(marcink): check again using more expensive method
177 # NOTE(marcink): check again using more expensive method
178 return repo.is_empty
178 return repo.is_empty
179 except Exception:
179 except Exception:
180 pass
180 pass
181
181
182 return True
182 return True
183
183
184 @reraise_safe_exceptions
184 @reraise_safe_exceptions
185 def assert_correct_path(self, wire):
185 def assert_correct_path(self, wire):
186 cache_on, context_uid, repo_id = self._cache_on(wire)
186 cache_on, context_uid, repo_id = self._cache_on(wire)
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
187 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 def _assert_correct_path(_context_uid, _repo_id):
188 def _assert_correct_path(_context_uid, _repo_id):
189 try:
189 try:
190 repo_init = self._factory.repo_libgit2(wire)
190 repo_init = self._factory.repo_libgit2(wire)
191 with repo_init as repo:
191 with repo_init as repo:
192 pass
192 pass
193 except pygit2.GitError:
193 except pygit2.GitError:
194 path = wire.get('path')
194 path = wire.get('path')
195 tb = traceback.format_exc()
195 tb = traceback.format_exc()
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 return False
197 return False
198
198
199 return True
199 return True
200 return _assert_correct_path(context_uid, repo_id)
200 return _assert_correct_path(context_uid, repo_id)
201
201
202 @reraise_safe_exceptions
202 @reraise_safe_exceptions
203 def bare(self, wire):
203 def bare(self, wire):
204 repo_init = self._factory.repo_libgit2(wire)
204 repo_init = self._factory.repo_libgit2(wire)
205 with repo_init as repo:
205 with repo_init as repo:
206 return repo.is_bare
206 return repo.is_bare
207
207
208 @reraise_safe_exceptions
208 @reraise_safe_exceptions
209 def blob_as_pretty_string(self, wire, sha):
209 def blob_as_pretty_string(self, wire, sha):
210 repo_init = self._factory.repo_libgit2(wire)
210 repo_init = self._factory.repo_libgit2(wire)
211 with repo_init as repo:
211 with repo_init as repo:
212 blob_obj = repo[sha]
212 blob_obj = repo[sha]
213 blob = blob_obj.data
213 blob = blob_obj.data
214 return blob
214 return blob
215
215
216 @reraise_safe_exceptions
216 @reraise_safe_exceptions
217 def blob_raw_length(self, wire, sha):
217 def blob_raw_length(self, wire, sha):
218 cache_on, context_uid, repo_id = self._cache_on(wire)
218 cache_on, context_uid, repo_id = self._cache_on(wire)
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
219 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 def _blob_raw_length(_repo_id, _sha):
220 def _blob_raw_length(_repo_id, _sha):
221
221
222 repo_init = self._factory.repo_libgit2(wire)
222 repo_init = self._factory.repo_libgit2(wire)
223 with repo_init as repo:
223 with repo_init as repo:
224 blob = repo[sha]
224 blob = repo[sha]
225 return blob.size
225 return blob.size
226
226
227 return _blob_raw_length(repo_id, sha)
227 return _blob_raw_length(repo_id, sha)
228
228
229 def _parse_lfs_pointer(self, raw_content):
229 def _parse_lfs_pointer(self, raw_content):
230
230
231 spec_string = 'version https://git-lfs.github.com/spec'
231 spec_string = 'version https://git-lfs.github.com/spec'
232 if raw_content and raw_content.startswith(spec_string):
232 if raw_content and raw_content.startswith(spec_string):
233 pattern = re.compile(r"""
233 pattern = re.compile(r"""
234 (?:\n)?
234 (?:\n)?
235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 ^size[ ](?P<oid_size>[0-9]+)\n
237 ^size[ ](?P<oid_size>[0-9]+)\n
238 (?:\n)?
238 (?:\n)?
239 """, re.VERBOSE | re.MULTILINE)
239 """, re.VERBOSE | re.MULTILINE)
240 match = pattern.match(raw_content)
240 match = pattern.match(raw_content)
241 if match:
241 if match:
242 return match.groupdict()
242 return match.groupdict()
243
243
244 return {}
244 return {}
245
245
246 @reraise_safe_exceptions
246 @reraise_safe_exceptions
247 def is_large_file(self, wire, commit_id):
247 def is_large_file(self, wire, commit_id):
248 cache_on, context_uid, repo_id = self._cache_on(wire)
248
249
249 cache_on, context_uid, repo_id = self._cache_on(wire)
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
250 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 def _is_large_file(_repo_id, _sha):
251 def _is_large_file(_repo_id, _sha):
252 repo_init = self._factory.repo_libgit2(wire)
252 repo_init = self._factory.repo_libgit2(wire)
253 with repo_init as repo:
253 with repo_init as repo:
254 blob = repo[commit_id]
254 blob = repo[commit_id]
255 if blob.is_binary:
255 if blob.is_binary:
256 return {}
256 return {}
257
257
258 return self._parse_lfs_pointer(blob.data)
258 return self._parse_lfs_pointer(blob.data)
259
259
260 return _is_large_file(repo_id, commit_id)
260 return _is_large_file(repo_id, commit_id)
261
261
262 @reraise_safe_exceptions
262 @reraise_safe_exceptions
263 def is_binary(self, wire, tree_id):
264 cache_on, context_uid, repo_id = self._cache_on(wire)
265
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 def _is_binary(_repo_id, _tree_id):
268 repo_init = self._factory.repo_libgit2(wire)
269 with repo_init as repo:
270 blob_obj = repo[tree_id]
271 return blob_obj.is_binary
272
273 return _is_binary(repo_id, tree_id)
274
275 @reraise_safe_exceptions
263 def in_largefiles_store(self, wire, oid):
276 def in_largefiles_store(self, wire, oid):
264 conf = self._wire_to_config(wire)
277 conf = self._wire_to_config(wire)
265 repo_init = self._factory.repo_libgit2(wire)
278 repo_init = self._factory.repo_libgit2(wire)
266 with repo_init as repo:
279 with repo_init as repo:
267 repo_name = repo.path
280 repo_name = repo.path
268
281
269 store_location = conf.get('vcs_git_lfs_store_location')
282 store_location = conf.get('vcs_git_lfs_store_location')
270 if store_location:
283 if store_location:
271
284
272 store = LFSOidStore(
285 store = LFSOidStore(
273 oid=oid, repo=repo_name, store_location=store_location)
286 oid=oid, repo=repo_name, store_location=store_location)
274 return store.has_oid()
287 return store.has_oid()
275
288
276 return False
289 return False
277
290
278 @reraise_safe_exceptions
291 @reraise_safe_exceptions
279 def store_path(self, wire, oid):
292 def store_path(self, wire, oid):
280 conf = self._wire_to_config(wire)
293 conf = self._wire_to_config(wire)
281 repo_init = self._factory.repo_libgit2(wire)
294 repo_init = self._factory.repo_libgit2(wire)
282 with repo_init as repo:
295 with repo_init as repo:
283 repo_name = repo.path
296 repo_name = repo.path
284
297
285 store_location = conf.get('vcs_git_lfs_store_location')
298 store_location = conf.get('vcs_git_lfs_store_location')
286 if store_location:
299 if store_location:
287 store = LFSOidStore(
300 store = LFSOidStore(
288 oid=oid, repo=repo_name, store_location=store_location)
301 oid=oid, repo=repo_name, store_location=store_location)
289 return store.oid_path
302 return store.oid_path
290 raise ValueError('Unable to fetch oid with path {}'.format(oid))
303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
291
304
292 @reraise_safe_exceptions
305 @reraise_safe_exceptions
293 def bulk_request(self, wire, rev, pre_load):
306 def bulk_request(self, wire, rev, pre_load):
294 cache_on, context_uid, repo_id = self._cache_on(wire)
307 cache_on, context_uid, repo_id = self._cache_on(wire)
295 @self.region.conditional_cache_on_arguments(condition=cache_on)
308 @self.region.conditional_cache_on_arguments(condition=cache_on)
296 def _bulk_request(_repo_id, _rev, _pre_load):
309 def _bulk_request(_repo_id, _rev, _pre_load):
297 result = {}
310 result = {}
298 for attr in pre_load:
311 for attr in pre_load:
299 try:
312 try:
300 method = self._bulk_methods[attr]
313 method = self._bulk_methods[attr]
301 args = [wire, rev]
314 args = [wire, rev]
302 result[attr] = method(*args)
315 result[attr] = method(*args)
303 except KeyError as e:
316 except KeyError as e:
304 raise exceptions.VcsException(e)(
317 raise exceptions.VcsException(e)(
305 "Unknown bulk attribute: %s" % attr)
318 "Unknown bulk attribute: %s" % attr)
306 return result
319 return result
307
320
308 return _bulk_request(repo_id, rev, sorted(pre_load))
321 return _bulk_request(repo_id, rev, sorted(pre_load))
309
322
310 def _build_opener(self, url):
323 def _build_opener(self, url):
311 handlers = []
324 handlers = []
312 url_obj = url_parser(url)
325 url_obj = url_parser(url)
313 _, authinfo = url_obj.authinfo()
326 _, authinfo = url_obj.authinfo()
314
327
315 if authinfo:
328 if authinfo:
316 # create a password manager
329 # create a password manager
317 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
318 passmgr.add_password(*authinfo)
331 passmgr.add_password(*authinfo)
319
332
320 handlers.extend((httpbasicauthhandler(passmgr),
333 handlers.extend((httpbasicauthhandler(passmgr),
321 httpdigestauthhandler(passmgr)))
334 httpdigestauthhandler(passmgr)))
322
335
323 return urllib2.build_opener(*handlers)
336 return urllib2.build_opener(*handlers)
324
337
325 def _type_id_to_name(self, type_id):
338 def _type_id_to_name(self, type_id):
326 return {
339 return {
327 1: b'commit',
340 1: b'commit',
328 2: b'tree',
341 2: b'tree',
329 3: b'blob',
342 3: b'blob',
330 4: b'tag'
343 4: b'tag'
331 }[type_id]
344 }[type_id]
332
345
333 @reraise_safe_exceptions
346 @reraise_safe_exceptions
334 def check_url(self, url, config):
347 def check_url(self, url, config):
335 url_obj = url_parser(url)
348 url_obj = url_parser(url)
336 test_uri, _ = url_obj.authinfo()
349 test_uri, _ = url_obj.authinfo()
337 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
338 url_obj.query = obfuscate_qs(url_obj.query)
351 url_obj.query = obfuscate_qs(url_obj.query)
339 cleaned_uri = str(url_obj)
352 cleaned_uri = str(url_obj)
340 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
341
354
342 if not test_uri.endswith('info/refs'):
355 if not test_uri.endswith('info/refs'):
343 test_uri = test_uri.rstrip('/') + '/info/refs'
356 test_uri = test_uri.rstrip('/') + '/info/refs'
344
357
345 o = self._build_opener(url)
358 o = self._build_opener(url)
346 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
347
360
348 q = {"service": 'git-upload-pack'}
361 q = {"service": 'git-upload-pack'}
349 qs = '?%s' % urllib.urlencode(q)
362 qs = '?%s' % urllib.urlencode(q)
350 cu = "%s%s" % (test_uri, qs)
363 cu = "%s%s" % (test_uri, qs)
351 req = urllib2.Request(cu, None, {})
364 req = urllib2.Request(cu, None, {})
352
365
353 try:
366 try:
354 log.debug("Trying to open URL %s", cleaned_uri)
367 log.debug("Trying to open URL %s", cleaned_uri)
355 resp = o.open(req)
368 resp = o.open(req)
356 if resp.code != 200:
369 if resp.code != 200:
357 raise exceptions.URLError()('Return Code is not 200')
370 raise exceptions.URLError()('Return Code is not 200')
358 except Exception as e:
371 except Exception as e:
359 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
360 # means it cannot be cloned
373 # means it cannot be cloned
361 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
362
375
363 # now detect if it's proper git repo
376 # now detect if it's proper git repo
364 gitdata = resp.read()
377 gitdata = resp.read()
365 if 'service=git-upload-pack' in gitdata:
378 if 'service=git-upload-pack' in gitdata:
366 pass
379 pass
367 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
368 # old style git can return some other format !
381 # old style git can return some other format !
369 pass
382 pass
370 else:
383 else:
371 raise exceptions.URLError()(
384 raise exceptions.URLError()(
372 "url [%s] does not look like an git" % (cleaned_uri,))
385 "url [%s] does not look like an git" % (cleaned_uri,))
373
386
374 return True
387 return True
375
388
376 @reraise_safe_exceptions
389 @reraise_safe_exceptions
377 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
378 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
379 remote_refs = self.pull(wire, url, apply_refs=False)
392 remote_refs = self.pull(wire, url, apply_refs=False)
380 repo = self._factory.repo(wire)
393 repo = self._factory.repo(wire)
381 if isinstance(valid_refs, list):
394 if isinstance(valid_refs, list):
382 valid_refs = tuple(valid_refs)
395 valid_refs = tuple(valid_refs)
383
396
384 for k in remote_refs:
397 for k in remote_refs:
385 # only parse heads/tags and skip so called deferred tags
398 # only parse heads/tags and skip so called deferred tags
386 if k.startswith(valid_refs) and not k.endswith(deferred):
399 if k.startswith(valid_refs) and not k.endswith(deferred):
387 repo[k] = remote_refs[k]
400 repo[k] = remote_refs[k]
388
401
389 if update_after_clone:
402 if update_after_clone:
390 # we want to checkout HEAD
403 # we want to checkout HEAD
391 repo["HEAD"] = remote_refs["HEAD"]
404 repo["HEAD"] = remote_refs["HEAD"]
392 index.build_index_from_tree(repo.path, repo.index_path(),
405 index.build_index_from_tree(repo.path, repo.index_path(),
393 repo.object_store, repo["HEAD"].tree)
406 repo.object_store, repo["HEAD"].tree)
394
407
395 @reraise_safe_exceptions
408 @reraise_safe_exceptions
396 def branch(self, wire, commit_id):
409 def branch(self, wire, commit_id):
397 cache_on, context_uid, repo_id = self._cache_on(wire)
410 cache_on, context_uid, repo_id = self._cache_on(wire)
398 @self.region.conditional_cache_on_arguments(condition=cache_on)
411 @self.region.conditional_cache_on_arguments(condition=cache_on)
399 def _branch(_context_uid, _repo_id, _commit_id):
412 def _branch(_context_uid, _repo_id, _commit_id):
400 regex = re.compile('^refs/heads')
413 regex = re.compile('^refs/heads')
401
414
402 def filter_with(ref):
415 def filter_with(ref):
403 return regex.match(ref[0]) and ref[1] == _commit_id
416 return regex.match(ref[0]) and ref[1] == _commit_id
404
417
405 branches = filter(filter_with, self.get_refs(wire).items())
418 branches = filter(filter_with, self.get_refs(wire).items())
406 return [x[0].split('refs/heads/')[-1] for x in branches]
419 return [x[0].split('refs/heads/')[-1] for x in branches]
407
420
408 return _branch(context_uid, repo_id, commit_id)
421 return _branch(context_uid, repo_id, commit_id)
409
422
410 @reraise_safe_exceptions
423 @reraise_safe_exceptions
411 def commit_branches(self, wire, commit_id):
424 def commit_branches(self, wire, commit_id):
412 cache_on, context_uid, repo_id = self._cache_on(wire)
425 cache_on, context_uid, repo_id = self._cache_on(wire)
413 @self.region.conditional_cache_on_arguments(condition=cache_on)
426 @self.region.conditional_cache_on_arguments(condition=cache_on)
414 def _commit_branches(_context_uid, _repo_id, _commit_id):
427 def _commit_branches(_context_uid, _repo_id, _commit_id):
415 repo_init = self._factory.repo_libgit2(wire)
428 repo_init = self._factory.repo_libgit2(wire)
416 with repo_init as repo:
429 with repo_init as repo:
417 branches = [x for x in repo.branches.with_commit(_commit_id)]
430 branches = [x for x in repo.branches.with_commit(_commit_id)]
418 return branches
431 return branches
419
432
420 return _commit_branches(context_uid, repo_id, commit_id)
433 return _commit_branches(context_uid, repo_id, commit_id)
421
434
422 @reraise_safe_exceptions
435 @reraise_safe_exceptions
423 def add_object(self, wire, content):
436 def add_object(self, wire, content):
424 repo_init = self._factory.repo_libgit2(wire)
437 repo_init = self._factory.repo_libgit2(wire)
425 with repo_init as repo:
438 with repo_init as repo:
426 blob = objects.Blob()
439 blob = objects.Blob()
427 blob.set_raw_string(content)
440 blob.set_raw_string(content)
428 repo.object_store.add_object(blob)
441 repo.object_store.add_object(blob)
429 return blob.id
442 return blob.id
430
443
431 # TODO: this is quite complex, check if that can be simplified
444 # TODO: this is quite complex, check if that can be simplified
432 @reraise_safe_exceptions
445 @reraise_safe_exceptions
433 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
434 repo = self._factory.repo(wire)
447 repo = self._factory.repo(wire)
435 object_store = repo.object_store
448 object_store = repo.object_store
436
449
437 # Create tree and populates it with blobs
450 # Create tree and populates it with blobs
438 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
439
452
440 for node in updated:
453 for node in updated:
441 # Compute subdirs if needed
454 # Compute subdirs if needed
442 dirpath, nodename = vcspath.split(node['path'])
455 dirpath, nodename = vcspath.split(node['path'])
443 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
444 parent = commit_tree
457 parent = commit_tree
445 ancestors = [('', parent)]
458 ancestors = [('', parent)]
446
459
447 # Tries to dig for the deepest existing tree
460 # Tries to dig for the deepest existing tree
448 while dirnames:
461 while dirnames:
449 curdir = dirnames.pop(0)
462 curdir = dirnames.pop(0)
450 try:
463 try:
451 dir_id = parent[curdir][1]
464 dir_id = parent[curdir][1]
452 except KeyError:
465 except KeyError:
453 # put curdir back into dirnames and stops
466 # put curdir back into dirnames and stops
454 dirnames.insert(0, curdir)
467 dirnames.insert(0, curdir)
455 break
468 break
456 else:
469 else:
457 # If found, updates parent
470 # If found, updates parent
458 parent = repo[dir_id]
471 parent = repo[dir_id]
459 ancestors.append((curdir, parent))
472 ancestors.append((curdir, parent))
460 # Now parent is deepest existing tree and we need to create
473 # Now parent is deepest existing tree and we need to create
461 # subtrees for dirnames (in reverse order)
474 # subtrees for dirnames (in reverse order)
462 # [this only applies for nodes from added]
475 # [this only applies for nodes from added]
463 new_trees = []
476 new_trees = []
464
477
465 blob = objects.Blob.from_string(node['content'])
478 blob = objects.Blob.from_string(node['content'])
466
479
467 if dirnames:
480 if dirnames:
468 # If there are trees which should be created we need to build
481 # If there are trees which should be created we need to build
469 # them now (in reverse order)
482 # them now (in reverse order)
470 reversed_dirnames = list(reversed(dirnames))
483 reversed_dirnames = list(reversed(dirnames))
471 curtree = objects.Tree()
484 curtree = objects.Tree()
472 curtree[node['node_path']] = node['mode'], blob.id
485 curtree[node['node_path']] = node['mode'], blob.id
473 new_trees.append(curtree)
486 new_trees.append(curtree)
474 for dirname in reversed_dirnames[:-1]:
487 for dirname in reversed_dirnames[:-1]:
475 newtree = objects.Tree()
488 newtree = objects.Tree()
476 newtree[dirname] = (DIR_STAT, curtree.id)
489 newtree[dirname] = (DIR_STAT, curtree.id)
477 new_trees.append(newtree)
490 new_trees.append(newtree)
478 curtree = newtree
491 curtree = newtree
479 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
480 else:
493 else:
481 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
482
495
483 new_trees.append(parent)
496 new_trees.append(parent)
484 # Update ancestors
497 # Update ancestors
485 reversed_ancestors = reversed(
498 reversed_ancestors = reversed(
486 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
487 for parent, tree, path in reversed_ancestors:
500 for parent, tree, path in reversed_ancestors:
488 parent[path] = (DIR_STAT, tree.id)
501 parent[path] = (DIR_STAT, tree.id)
489 object_store.add_object(tree)
502 object_store.add_object(tree)
490
503
491 object_store.add_object(blob)
504 object_store.add_object(blob)
492 for tree in new_trees:
505 for tree in new_trees:
493 object_store.add_object(tree)
506 object_store.add_object(tree)
494
507
495 for node_path in removed:
508 for node_path in removed:
496 paths = node_path.split('/')
509 paths = node_path.split('/')
497 tree = commit_tree
510 tree = commit_tree
498 trees = [tree]
511 trees = [tree]
499 # Traverse deep into the forest...
512 # Traverse deep into the forest...
500 for path in paths:
513 for path in paths:
501 try:
514 try:
502 obj = repo[tree[path][1]]
515 obj = repo[tree[path][1]]
503 if isinstance(obj, objects.Tree):
516 if isinstance(obj, objects.Tree):
504 trees.append(obj)
517 trees.append(obj)
505 tree = obj
518 tree = obj
506 except KeyError:
519 except KeyError:
507 break
520 break
508 # Cut down the blob and all rotten trees on the way back...
521 # Cut down the blob and all rotten trees on the way back...
509 for path, tree in reversed(zip(paths, trees)):
522 for path, tree in reversed(zip(paths, trees)):
510 del tree[path]
523 del tree[path]
511 if tree:
524 if tree:
512 # This tree still has elements - don't remove it or any
525 # This tree still has elements - don't remove it or any
513 # of it's parents
526 # of it's parents
514 break
527 break
515
528
516 object_store.add_object(commit_tree)
529 object_store.add_object(commit_tree)
517
530
518 # Create commit
531 # Create commit
519 commit = objects.Commit()
532 commit = objects.Commit()
520 commit.tree = commit_tree.id
533 commit.tree = commit_tree.id
521 for k, v in commit_data.iteritems():
534 for k, v in commit_data.iteritems():
522 setattr(commit, k, v)
535 setattr(commit, k, v)
523 object_store.add_object(commit)
536 object_store.add_object(commit)
524
537
525 self.create_branch(wire, branch, commit.id)
538 self.create_branch(wire, branch, commit.id)
526
539
527 # dulwich set-ref
540 # dulwich set-ref
528 ref = 'refs/heads/%s' % branch
541 ref = 'refs/heads/%s' % branch
529 repo.refs[ref] = commit.id
542 repo.refs[ref] = commit.id
530
543
531 return commit.id
544 return commit.id
532
545
533 @reraise_safe_exceptions
546 @reraise_safe_exceptions
534 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
535 if url != 'default' and '://' not in url:
548 if url != 'default' and '://' not in url:
536 client = LocalGitClient(url)
549 client = LocalGitClient(url)
537 else:
550 else:
538 url_obj = url_parser(url)
551 url_obj = url_parser(url)
539 o = self._build_opener(url)
552 o = self._build_opener(url)
540 url, _ = url_obj.authinfo()
553 url, _ = url_obj.authinfo()
541 client = HttpGitClient(base_url=url, opener=o)
554 client = HttpGitClient(base_url=url, opener=o)
542 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
543
556
544 determine_wants = repo.object_store.determine_wants_all
557 determine_wants = repo.object_store.determine_wants_all
545 if refs:
558 if refs:
546 def determine_wants_requested(references):
559 def determine_wants_requested(references):
547 return [references[r] for r in references if r in refs]
560 return [references[r] for r in references if r in refs]
548 determine_wants = determine_wants_requested
561 determine_wants = determine_wants_requested
549
562
550 try:
563 try:
551 remote_refs = client.fetch(
564 remote_refs = client.fetch(
552 path=url, target=repo, determine_wants=determine_wants)
565 path=url, target=repo, determine_wants=determine_wants)
553 except NotGitRepository as e:
566 except NotGitRepository as e:
554 log.warning(
567 log.warning(
555 'Trying to fetch from "%s" failed, not a Git repository.', url)
568 'Trying to fetch from "%s" failed, not a Git repository.', url)
556 # Exception can contain unicode which we convert
569 # Exception can contain unicode which we convert
557 raise exceptions.AbortException(e)(repr(e))
570 raise exceptions.AbortException(e)(repr(e))
558
571
559 # mikhail: client.fetch() returns all the remote refs, but fetches only
572 # mikhail: client.fetch() returns all the remote refs, but fetches only
560 # refs filtered by `determine_wants` function. We need to filter result
573 # refs filtered by `determine_wants` function. We need to filter result
561 # as well
574 # as well
562 if refs:
575 if refs:
563 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
564
577
565 if apply_refs:
578 if apply_refs:
566 # TODO: johbo: Needs proper test coverage with a git repository
579 # TODO: johbo: Needs proper test coverage with a git repository
567 # that contains a tag object, so that we would end up with
580 # that contains a tag object, so that we would end up with
568 # a peeled ref at this point.
581 # a peeled ref at this point.
569 for k in remote_refs:
582 for k in remote_refs:
570 if k.endswith(PEELED_REF_MARKER):
583 if k.endswith(PEELED_REF_MARKER):
571 log.debug("Skipping peeled reference %s", k)
584 log.debug("Skipping peeled reference %s", k)
572 continue
585 continue
573 repo[k] = remote_refs[k]
586 repo[k] = remote_refs[k]
574
587
575 if refs and not update_after:
588 if refs and not update_after:
576 # mikhail: explicitly set the head to the last ref.
589 # mikhail: explicitly set the head to the last ref.
577 repo['HEAD'] = remote_refs[refs[-1]]
590 repo['HEAD'] = remote_refs[refs[-1]]
578
591
579 if update_after:
592 if update_after:
580 # we want to checkout HEAD
593 # we want to checkout HEAD
581 repo["HEAD"] = remote_refs["HEAD"]
594 repo["HEAD"] = remote_refs["HEAD"]
582 index.build_index_from_tree(repo.path, repo.index_path(),
595 index.build_index_from_tree(repo.path, repo.index_path(),
583 repo.object_store, repo["HEAD"].tree)
596 repo.object_store, repo["HEAD"].tree)
584 return remote_refs
597 return remote_refs
585
598
586 @reraise_safe_exceptions
599 @reraise_safe_exceptions
587 def sync_fetch(self, wire, url, refs=None, all_refs=False):
600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
588 repo = self._factory.repo(wire)
601 repo = self._factory.repo(wire)
589 if refs and not isinstance(refs, (list, tuple)):
602 if refs and not isinstance(refs, (list, tuple)):
590 refs = [refs]
603 refs = [refs]
591
604
592 config = self._wire_to_config(wire)
605 config = self._wire_to_config(wire)
593 # get all remote refs we'll use to fetch later
606 # get all remote refs we'll use to fetch later
594 cmd = ['ls-remote']
607 cmd = ['ls-remote']
595 if not all_refs:
608 if not all_refs:
596 cmd += ['--heads', '--tags']
609 cmd += ['--heads', '--tags']
597 cmd += [url]
610 cmd += [url]
598 output, __ = self.run_git_command(
611 output, __ = self.run_git_command(
599 wire, cmd, fail_on_stderr=False,
612 wire, cmd, fail_on_stderr=False,
600 _copts=self._remote_conf(config),
613 _copts=self._remote_conf(config),
601 extra_env={'GIT_TERMINAL_PROMPT': '0'})
614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
602
615
603 remote_refs = collections.OrderedDict()
616 remote_refs = collections.OrderedDict()
604 fetch_refs = []
617 fetch_refs = []
605
618
606 for ref_line in output.splitlines():
619 for ref_line in output.splitlines():
607 sha, ref = ref_line.split('\t')
620 sha, ref = ref_line.split('\t')
608 sha = sha.strip()
621 sha = sha.strip()
609 if ref in remote_refs:
622 if ref in remote_refs:
610 # duplicate, skip
623 # duplicate, skip
611 continue
624 continue
612 if ref.endswith(PEELED_REF_MARKER):
625 if ref.endswith(PEELED_REF_MARKER):
613 log.debug("Skipping peeled reference %s", ref)
626 log.debug("Skipping peeled reference %s", ref)
614 continue
627 continue
615 # don't sync HEAD
628 # don't sync HEAD
616 if ref in ['HEAD']:
629 if ref in ['HEAD']:
617 continue
630 continue
618
631
619 remote_refs[ref] = sha
632 remote_refs[ref] = sha
620
633
621 if refs and sha in refs:
634 if refs and sha in refs:
622 # we filter fetch using our specified refs
635 # we filter fetch using our specified refs
623 fetch_refs.append('{}:{}'.format(ref, ref))
636 fetch_refs.append('{}:{}'.format(ref, ref))
624 elif not refs:
637 elif not refs:
625 fetch_refs.append('{}:{}'.format(ref, ref))
638 fetch_refs.append('{}:{}'.format(ref, ref))
626 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
627
640
628 if fetch_refs:
641 if fetch_refs:
629 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
630 fetch_refs_chunks = list(chunk)
643 fetch_refs_chunks = list(chunk)
631 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
632 _out, _err = self.run_git_command(
645 _out, _err = self.run_git_command(
633 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
634 fail_on_stderr=False,
647 fail_on_stderr=False,
635 _copts=self._remote_conf(config),
648 _copts=self._remote_conf(config),
636 extra_env={'GIT_TERMINAL_PROMPT': '0'})
649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
637
650
638 return remote_refs
651 return remote_refs
639
652
640 @reraise_safe_exceptions
653 @reraise_safe_exceptions
641 def sync_push(self, wire, url, refs=None):
654 def sync_push(self, wire, url, refs=None):
642 if not self.check_url(url, wire):
655 if not self.check_url(url, wire):
643 return
656 return
644 config = self._wire_to_config(wire)
657 config = self._wire_to_config(wire)
645 self._factory.repo(wire)
658 self._factory.repo(wire)
646 self.run_git_command(
659 self.run_git_command(
647 wire, ['push', url, '--mirror'], fail_on_stderr=False,
660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
648 _copts=self._remote_conf(config),
661 _copts=self._remote_conf(config),
649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
650
663
651 @reraise_safe_exceptions
664 @reraise_safe_exceptions
652 def get_remote_refs(self, wire, url):
665 def get_remote_refs(self, wire, url):
653 repo = Repo(url)
666 repo = Repo(url)
654 return repo.get_refs()
667 return repo.get_refs()
655
668
656 @reraise_safe_exceptions
669 @reraise_safe_exceptions
657 def get_description(self, wire):
670 def get_description(self, wire):
658 repo = self._factory.repo(wire)
671 repo = self._factory.repo(wire)
659 return repo.get_description()
672 return repo.get_description()
660
673
661 @reraise_safe_exceptions
674 @reraise_safe_exceptions
662 def get_missing_revs(self, wire, rev1, rev2, path2):
675 def get_missing_revs(self, wire, rev1, rev2, path2):
663 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
664 LocalGitClient(thin_packs=False).fetch(path2, repo)
677 LocalGitClient(thin_packs=False).fetch(path2, repo)
665
678
666 wire_remote = wire.copy()
679 wire_remote = wire.copy()
667 wire_remote['path'] = path2
680 wire_remote['path'] = path2
668 repo_remote = self._factory.repo(wire_remote)
681 repo_remote = self._factory.repo(wire_remote)
669 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
670
683
671 revs = [
684 revs = [
672 x.commit.id
685 x.commit.id
673 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
674 return revs
687 return revs
675
688
676 @reraise_safe_exceptions
689 @reraise_safe_exceptions
677 def get_object(self, wire, sha):
690 def get_object(self, wire, sha):
678 cache_on, context_uid, repo_id = self._cache_on(wire)
691 cache_on, context_uid, repo_id = self._cache_on(wire)
679 @self.region.conditional_cache_on_arguments(condition=cache_on)
692 @self.region.conditional_cache_on_arguments(condition=cache_on)
680 def _get_object(_context_uid, _repo_id, _sha):
693 def _get_object(_context_uid, _repo_id, _sha):
681 repo_init = self._factory.repo_libgit2(wire)
694 repo_init = self._factory.repo_libgit2(wire)
682 with repo_init as repo:
695 with repo_init as repo:
683
696
684 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
685 try:
698 try:
686 commit = repo.revparse_single(sha)
699 commit = repo.revparse_single(sha)
687 except (KeyError, ValueError) as e:
700 except (KeyError, ValueError) as e:
688 raise exceptions.LookupException(e)(missing_commit_err)
701 raise exceptions.LookupException(e)(missing_commit_err)
689
702
690 is_tag = False
703 is_tag = False
691 if isinstance(commit, pygit2.Tag):
704 if isinstance(commit, pygit2.Tag):
692 commit = repo.get(commit.target)
705 commit = repo.get(commit.target)
693 is_tag = True
706 is_tag = True
694
707
695 check_dangling = True
708 check_dangling = True
696 if is_tag:
709 if is_tag:
697 check_dangling = False
710 check_dangling = False
698
711
699 # we used a reference and it parsed means we're not having a dangling commit
712 # we used a reference and it parsed means we're not having a dangling commit
700 if sha != commit.hex:
713 if sha != commit.hex:
701 check_dangling = False
714 check_dangling = False
702
715
703 if check_dangling:
716 if check_dangling:
704 # check for dangling commit
717 # check for dangling commit
705 for branch in repo.branches.with_commit(commit.hex):
718 for branch in repo.branches.with_commit(commit.hex):
706 if branch:
719 if branch:
707 break
720 break
708 else:
721 else:
709 raise exceptions.LookupException(None)(missing_commit_err)
722 raise exceptions.LookupException(None)(missing_commit_err)
710
723
711 commit_id = commit.hex
724 commit_id = commit.hex
712 type_id = commit.type
725 type_id = commit.type
713
726
714 return {
727 return {
715 'id': commit_id,
728 'id': commit_id,
716 'type': self._type_id_to_name(type_id),
729 'type': self._type_id_to_name(type_id),
717 'commit_id': commit_id,
730 'commit_id': commit_id,
718 'idx': 0
731 'idx': 0
719 }
732 }
720
733
721 return _get_object(context_uid, repo_id, sha)
734 return _get_object(context_uid, repo_id, sha)
722
735
723 @reraise_safe_exceptions
736 @reraise_safe_exceptions
724 def get_refs(self, wire):
737 def get_refs(self, wire):
725 cache_on, context_uid, repo_id = self._cache_on(wire)
738 cache_on, context_uid, repo_id = self._cache_on(wire)
726 @self.region.conditional_cache_on_arguments(condition=cache_on)
739 @self.region.conditional_cache_on_arguments(condition=cache_on)
727 def _get_refs(_context_uid, _repo_id):
740 def _get_refs(_context_uid, _repo_id):
728
741
729 repo_init = self._factory.repo_libgit2(wire)
742 repo_init = self._factory.repo_libgit2(wire)
730 with repo_init as repo:
743 with repo_init as repo:
731 regex = re.compile('^refs/(heads|tags)/')
744 regex = re.compile('^refs/(heads|tags)/')
732 return {x.name: x.target.hex for x in
745 return {x.name: x.target.hex for x in
733 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
746 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
734
747
735 return _get_refs(context_uid, repo_id)
748 return _get_refs(context_uid, repo_id)
736
749
737 @reraise_safe_exceptions
750 @reraise_safe_exceptions
738 def get_branch_pointers(self, wire):
751 def get_branch_pointers(self, wire):
739 cache_on, context_uid, repo_id = self._cache_on(wire)
752 cache_on, context_uid, repo_id = self._cache_on(wire)
740 @self.region.conditional_cache_on_arguments(condition=cache_on)
753 @self.region.conditional_cache_on_arguments(condition=cache_on)
741 def _get_branch_pointers(_context_uid, _repo_id):
754 def _get_branch_pointers(_context_uid, _repo_id):
742
755
743 repo_init = self._factory.repo_libgit2(wire)
756 repo_init = self._factory.repo_libgit2(wire)
744 regex = re.compile('^refs/heads')
757 regex = re.compile('^refs/heads')
745 with repo_init as repo:
758 with repo_init as repo:
746 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
759 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
747 return {x.target.hex: x.shorthand for x in branches}
760 return {x.target.hex: x.shorthand for x in branches}
748
761
749 return _get_branch_pointers(context_uid, repo_id)
762 return _get_branch_pointers(context_uid, repo_id)
750
763
751 @reraise_safe_exceptions
764 @reraise_safe_exceptions
752 def head(self, wire, show_exc=True):
765 def head(self, wire, show_exc=True):
753 cache_on, context_uid, repo_id = self._cache_on(wire)
766 cache_on, context_uid, repo_id = self._cache_on(wire)
754 @self.region.conditional_cache_on_arguments(condition=cache_on)
767 @self.region.conditional_cache_on_arguments(condition=cache_on)
755 def _head(_context_uid, _repo_id, _show_exc):
768 def _head(_context_uid, _repo_id, _show_exc):
756 repo_init = self._factory.repo_libgit2(wire)
769 repo_init = self._factory.repo_libgit2(wire)
757 with repo_init as repo:
770 with repo_init as repo:
758 try:
771 try:
759 return repo.head.peel().hex
772 return repo.head.peel().hex
760 except Exception:
773 except Exception:
761 if show_exc:
774 if show_exc:
762 raise
775 raise
763 return _head(context_uid, repo_id, show_exc)
776 return _head(context_uid, repo_id, show_exc)
764
777
765 @reraise_safe_exceptions
778 @reraise_safe_exceptions
766 def init(self, wire):
779 def init(self, wire):
767 repo_path = str_to_dulwich(wire['path'])
780 repo_path = str_to_dulwich(wire['path'])
768 self.repo = Repo.init(repo_path)
781 self.repo = Repo.init(repo_path)
769
782
770 @reraise_safe_exceptions
783 @reraise_safe_exceptions
771 def init_bare(self, wire):
784 def init_bare(self, wire):
772 repo_path = str_to_dulwich(wire['path'])
785 repo_path = str_to_dulwich(wire['path'])
773 self.repo = Repo.init_bare(repo_path)
786 self.repo = Repo.init_bare(repo_path)
774
787
775 @reraise_safe_exceptions
788 @reraise_safe_exceptions
776 def revision(self, wire, rev):
789 def revision(self, wire, rev):
777
790
778 cache_on, context_uid, repo_id = self._cache_on(wire)
791 cache_on, context_uid, repo_id = self._cache_on(wire)
779 @self.region.conditional_cache_on_arguments(condition=cache_on)
792 @self.region.conditional_cache_on_arguments(condition=cache_on)
780 def _revision(_context_uid, _repo_id, _rev):
793 def _revision(_context_uid, _repo_id, _rev):
781 repo_init = self._factory.repo_libgit2(wire)
794 repo_init = self._factory.repo_libgit2(wire)
782 with repo_init as repo:
795 with repo_init as repo:
783 commit = repo[rev]
796 commit = repo[rev]
784 obj_data = {
797 obj_data = {
785 'id': commit.id.hex,
798 'id': commit.id.hex,
786 }
799 }
787 # tree objects itself don't have tree_id attribute
800 # tree objects itself don't have tree_id attribute
788 if hasattr(commit, 'tree_id'):
801 if hasattr(commit, 'tree_id'):
789 obj_data['tree'] = commit.tree_id.hex
802 obj_data['tree'] = commit.tree_id.hex
790
803
791 return obj_data
804 return obj_data
792 return _revision(context_uid, repo_id, rev)
805 return _revision(context_uid, repo_id, rev)
793
806
794 @reraise_safe_exceptions
807 @reraise_safe_exceptions
795 def date(self, wire, commit_id):
808 def date(self, wire, commit_id):
796 cache_on, context_uid, repo_id = self._cache_on(wire)
809 cache_on, context_uid, repo_id = self._cache_on(wire)
797 @self.region.conditional_cache_on_arguments(condition=cache_on)
810 @self.region.conditional_cache_on_arguments(condition=cache_on)
798 def _date(_repo_id, _commit_id):
811 def _date(_repo_id, _commit_id):
799 repo_init = self._factory.repo_libgit2(wire)
812 repo_init = self._factory.repo_libgit2(wire)
800 with repo_init as repo:
813 with repo_init as repo:
801 commit = repo[commit_id]
814 commit = repo[commit_id]
802
815
803 if hasattr(commit, 'commit_time'):
816 if hasattr(commit, 'commit_time'):
804 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
817 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
805 else:
818 else:
806 commit = commit.get_object()
819 commit = commit.get_object()
807 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
820 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
808
821
809 # TODO(marcink): check dulwich difference of offset vs timezone
822 # TODO(marcink): check dulwich difference of offset vs timezone
810 return [commit_time, commit_time_offset]
823 return [commit_time, commit_time_offset]
811 return _date(repo_id, commit_id)
824 return _date(repo_id, commit_id)
812
825
813 @reraise_safe_exceptions
826 @reraise_safe_exceptions
814 def author(self, wire, commit_id):
827 def author(self, wire, commit_id):
815 cache_on, context_uid, repo_id = self._cache_on(wire)
828 cache_on, context_uid, repo_id = self._cache_on(wire)
816 @self.region.conditional_cache_on_arguments(condition=cache_on)
829 @self.region.conditional_cache_on_arguments(condition=cache_on)
817 def _author(_repo_id, _commit_id):
830 def _author(_repo_id, _commit_id):
818 repo_init = self._factory.repo_libgit2(wire)
831 repo_init = self._factory.repo_libgit2(wire)
819 with repo_init as repo:
832 with repo_init as repo:
820 commit = repo[commit_id]
833 commit = repo[commit_id]
821
834
822 if hasattr(commit, 'author'):
835 if hasattr(commit, 'author'):
823 author = commit.author
836 author = commit.author
824 else:
837 else:
825 author = commit.get_object().author
838 author = commit.get_object().author
826
839
827 if author.email:
840 if author.email:
828 return u"{} <{}>".format(author.name, author.email)
841 return u"{} <{}>".format(author.name, author.email)
829
842
830 return u"{}".format(author.raw_name)
843 return u"{}".format(author.raw_name)
831 return _author(repo_id, commit_id)
844 return _author(repo_id, commit_id)
832
845
833 @reraise_safe_exceptions
846 @reraise_safe_exceptions
834 def message(self, wire, commit_id):
847 def message(self, wire, commit_id):
835 cache_on, context_uid, repo_id = self._cache_on(wire)
848 cache_on, context_uid, repo_id = self._cache_on(wire)
836 @self.region.conditional_cache_on_arguments(condition=cache_on)
849 @self.region.conditional_cache_on_arguments(condition=cache_on)
837 def _message(_repo_id, _commit_id):
850 def _message(_repo_id, _commit_id):
838 repo_init = self._factory.repo_libgit2(wire)
851 repo_init = self._factory.repo_libgit2(wire)
839 with repo_init as repo:
852 with repo_init as repo:
840 commit = repo[commit_id]
853 commit = repo[commit_id]
841 return commit.message
854 return commit.message
842 return _message(repo_id, commit_id)
855 return _message(repo_id, commit_id)
843
856
844 @reraise_safe_exceptions
857 @reraise_safe_exceptions
845 def parents(self, wire, commit_id):
858 def parents(self, wire, commit_id):
846 cache_on, context_uid, repo_id = self._cache_on(wire)
859 cache_on, context_uid, repo_id = self._cache_on(wire)
847 @self.region.conditional_cache_on_arguments(condition=cache_on)
860 @self.region.conditional_cache_on_arguments(condition=cache_on)
848 def _parents(_repo_id, _commit_id):
861 def _parents(_repo_id, _commit_id):
849 repo_init = self._factory.repo_libgit2(wire)
862 repo_init = self._factory.repo_libgit2(wire)
850 with repo_init as repo:
863 with repo_init as repo:
851 commit = repo[commit_id]
864 commit = repo[commit_id]
852 if hasattr(commit, 'parent_ids'):
865 if hasattr(commit, 'parent_ids'):
853 parent_ids = commit.parent_ids
866 parent_ids = commit.parent_ids
854 else:
867 else:
855 parent_ids = commit.get_object().parent_ids
868 parent_ids = commit.get_object().parent_ids
856
869
857 return [x.hex for x in parent_ids]
870 return [x.hex for x in parent_ids]
858 return _parents(repo_id, commit_id)
871 return _parents(repo_id, commit_id)
859
872
860 @reraise_safe_exceptions
873 @reraise_safe_exceptions
861 def children(self, wire, commit_id):
874 def children(self, wire, commit_id):
862 cache_on, context_uid, repo_id = self._cache_on(wire)
875 cache_on, context_uid, repo_id = self._cache_on(wire)
863 @self.region.conditional_cache_on_arguments(condition=cache_on)
876 @self.region.conditional_cache_on_arguments(condition=cache_on)
864 def _children(_repo_id, _commit_id):
877 def _children(_repo_id, _commit_id):
865 output, __ = self.run_git_command(
878 output, __ = self.run_git_command(
866 wire, ['rev-list', '--all', '--children'])
879 wire, ['rev-list', '--all', '--children'])
867
880
868 child_ids = []
881 child_ids = []
869 pat = re.compile(r'^%s' % commit_id)
882 pat = re.compile(r'^%s' % commit_id)
870 for l in output.splitlines():
883 for l in output.splitlines():
871 if pat.match(l):
884 if pat.match(l):
872 found_ids = l.split(' ')[1:]
885 found_ids = l.split(' ')[1:]
873 child_ids.extend(found_ids)
886 child_ids.extend(found_ids)
874
887
875 return child_ids
888 return child_ids
876 return _children(repo_id, commit_id)
889 return _children(repo_id, commit_id)
877
890
878 @reraise_safe_exceptions
891 @reraise_safe_exceptions
879 def set_refs(self, wire, key, value):
892 def set_refs(self, wire, key, value):
880 repo_init = self._factory.repo_libgit2(wire)
893 repo_init = self._factory.repo_libgit2(wire)
881 with repo_init as repo:
894 with repo_init as repo:
882 repo.references.create(key, value, force=True)
895 repo.references.create(key, value, force=True)
883
896
884 @reraise_safe_exceptions
897 @reraise_safe_exceptions
885 def create_branch(self, wire, branch_name, commit_id, force=False):
898 def create_branch(self, wire, branch_name, commit_id, force=False):
886 repo_init = self._factory.repo_libgit2(wire)
899 repo_init = self._factory.repo_libgit2(wire)
887 with repo_init as repo:
900 with repo_init as repo:
888 commit = repo[commit_id]
901 commit = repo[commit_id]
889
902
890 if force:
903 if force:
891 repo.branches.local.create(branch_name, commit, force=force)
904 repo.branches.local.create(branch_name, commit, force=force)
892 elif not repo.branches.get(branch_name):
905 elif not repo.branches.get(branch_name):
893 # create only if that branch isn't existing
906 # create only if that branch isn't existing
894 repo.branches.local.create(branch_name, commit, force=force)
907 repo.branches.local.create(branch_name, commit, force=force)
895
908
896 @reraise_safe_exceptions
909 @reraise_safe_exceptions
897 def remove_ref(self, wire, key):
910 def remove_ref(self, wire, key):
898 repo_init = self._factory.repo_libgit2(wire)
911 repo_init = self._factory.repo_libgit2(wire)
899 with repo_init as repo:
912 with repo_init as repo:
900 repo.references.delete(key)
913 repo.references.delete(key)
901
914
902 @reraise_safe_exceptions
915 @reraise_safe_exceptions
903 def tag_remove(self, wire, tag_name):
916 def tag_remove(self, wire, tag_name):
904 repo_init = self._factory.repo_libgit2(wire)
917 repo_init = self._factory.repo_libgit2(wire)
905 with repo_init as repo:
918 with repo_init as repo:
906 key = 'refs/tags/{}'.format(tag_name)
919 key = 'refs/tags/{}'.format(tag_name)
907 repo.references.delete(key)
920 repo.references.delete(key)
908
921
909 @reraise_safe_exceptions
922 @reraise_safe_exceptions
910 def tree_changes(self, wire, source_id, target_id):
923 def tree_changes(self, wire, source_id, target_id):
911 # TODO(marcink): remove this seems it's only used by tests
924 # TODO(marcink): remove this seems it's only used by tests
912 repo = self._factory.repo(wire)
925 repo = self._factory.repo(wire)
913 source = repo[source_id].tree if source_id else None
926 source = repo[source_id].tree if source_id else None
914 target = repo[target_id].tree
927 target = repo[target_id].tree
915 result = repo.object_store.tree_changes(source, target)
928 result = repo.object_store.tree_changes(source, target)
916 return list(result)
929 return list(result)
917
930
918 @reraise_safe_exceptions
931 @reraise_safe_exceptions
919 def tree_and_type_for_path(self, wire, commit_id, path):
932 def tree_and_type_for_path(self, wire, commit_id, path):
920
933
921 cache_on, context_uid, repo_id = self._cache_on(wire)
934 cache_on, context_uid, repo_id = self._cache_on(wire)
922 @self.region.conditional_cache_on_arguments(condition=cache_on)
935 @self.region.conditional_cache_on_arguments(condition=cache_on)
923 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
936 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
924 repo_init = self._factory.repo_libgit2(wire)
937 repo_init = self._factory.repo_libgit2(wire)
925
938
926 with repo_init as repo:
939 with repo_init as repo:
927 commit = repo[commit_id]
940 commit = repo[commit_id]
928 try:
941 try:
929 tree = commit.tree[path]
942 tree = commit.tree[path]
930 except KeyError:
943 except KeyError:
931 return None, None, None
944 return None, None, None
932
945
933 return tree.id.hex, tree.type, tree.filemode
946 return tree.id.hex, tree.type, tree.filemode
934 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
947 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
935
948
936 @reraise_safe_exceptions
949 @reraise_safe_exceptions
937 def tree_items(self, wire, tree_id):
950 def tree_items(self, wire, tree_id):
938 cache_on, context_uid, repo_id = self._cache_on(wire)
951 cache_on, context_uid, repo_id = self._cache_on(wire)
939 @self.region.conditional_cache_on_arguments(condition=cache_on)
952 @self.region.conditional_cache_on_arguments(condition=cache_on)
940 def _tree_items(_repo_id, _tree_id):
953 def _tree_items(_repo_id, _tree_id):
941
954
942 repo_init = self._factory.repo_libgit2(wire)
955 repo_init = self._factory.repo_libgit2(wire)
943 with repo_init as repo:
956 with repo_init as repo:
944 try:
957 try:
945 tree = repo[tree_id]
958 tree = repo[tree_id]
946 except KeyError:
959 except KeyError:
947 raise ObjectMissing('No tree with id: {}'.format(tree_id))
960 raise ObjectMissing('No tree with id: {}'.format(tree_id))
948
961
949 result = []
962 result = []
950 for item in tree:
963 for item in tree:
951 item_sha = item.hex
964 item_sha = item.hex
952 item_mode = item.filemode
965 item_mode = item.filemode
953 item_type = item.type
966 item_type = item.type
954
967
955 if item_type == 'commit':
968 if item_type == 'commit':
956 # NOTE(marcink): submodules we translate to 'link' for backward compat
969 # NOTE(marcink): submodules we translate to 'link' for backward compat
957 item_type = 'link'
970 item_type = 'link'
958
971
959 result.append((item.name, item_mode, item_sha, item_type))
972 result.append((item.name, item_mode, item_sha, item_type))
960 return result
973 return result
961 return _tree_items(repo_id, tree_id)
974 return _tree_items(repo_id, tree_id)
962
975
963 @reraise_safe_exceptions
976 @reraise_safe_exceptions
964 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
977 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
965 """
978 """
966 Old version that uses subprocess to call diff
979 Old version that uses subprocess to call diff
967 """
980 """
968
981
969 flags = [
982 flags = [
970 '-U%s' % context, '--patch',
983 '-U%s' % context, '--patch',
971 '--binary',
984 '--binary',
972 '--find-renames',
985 '--find-renames',
973 '--no-indent-heuristic',
986 '--no-indent-heuristic',
974 # '--indent-heuristic',
987 # '--indent-heuristic',
975 #'--full-index',
988 #'--full-index',
976 #'--abbrev=40'
989 #'--abbrev=40'
977 ]
990 ]
978
991
979 if opt_ignorews:
992 if opt_ignorews:
980 flags.append('--ignore-all-space')
993 flags.append('--ignore-all-space')
981
994
982 if commit_id_1 == self.EMPTY_COMMIT:
995 if commit_id_1 == self.EMPTY_COMMIT:
983 cmd = ['show'] + flags + [commit_id_2]
996 cmd = ['show'] + flags + [commit_id_2]
984 else:
997 else:
985 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
998 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
986
999
987 if file_filter:
1000 if file_filter:
988 cmd.extend(['--', file_filter])
1001 cmd.extend(['--', file_filter])
989
1002
990 diff, __ = self.run_git_command(wire, cmd)
1003 diff, __ = self.run_git_command(wire, cmd)
991 # If we used 'show' command, strip first few lines (until actual diff
1004 # If we used 'show' command, strip first few lines (until actual diff
992 # starts)
1005 # starts)
993 if commit_id_1 == self.EMPTY_COMMIT:
1006 if commit_id_1 == self.EMPTY_COMMIT:
994 lines = diff.splitlines()
1007 lines = diff.splitlines()
995 x = 0
1008 x = 0
996 for line in lines:
1009 for line in lines:
997 if line.startswith('diff'):
1010 if line.startswith('diff'):
998 break
1011 break
999 x += 1
1012 x += 1
1000 # Append new line just like 'diff' command do
1013 # Append new line just like 'diff' command do
1001 diff = '\n'.join(lines[x:]) + '\n'
1014 diff = '\n'.join(lines[x:]) + '\n'
1002 return diff
1015 return diff
1003
1016
1004 @reraise_safe_exceptions
1017 @reraise_safe_exceptions
1005 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1018 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1006 repo_init = self._factory.repo_libgit2(wire)
1019 repo_init = self._factory.repo_libgit2(wire)
1007 with repo_init as repo:
1020 with repo_init as repo:
1008 swap = True
1021 swap = True
1009 flags = 0
1022 flags = 0
1010 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1023 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1011
1024
1012 if opt_ignorews:
1025 if opt_ignorews:
1013 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1026 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1014
1027
1015 if commit_id_1 == self.EMPTY_COMMIT:
1028 if commit_id_1 == self.EMPTY_COMMIT:
1016 comm1 = repo[commit_id_2]
1029 comm1 = repo[commit_id_2]
1017 diff_obj = comm1.tree.diff_to_tree(
1030 diff_obj = comm1.tree.diff_to_tree(
1018 flags=flags, context_lines=context, swap=swap)
1031 flags=flags, context_lines=context, swap=swap)
1019
1032
1020 else:
1033 else:
1021 comm1 = repo[commit_id_2]
1034 comm1 = repo[commit_id_2]
1022 comm2 = repo[commit_id_1]
1035 comm2 = repo[commit_id_1]
1023 diff_obj = comm1.tree.diff_to_tree(
1036 diff_obj = comm1.tree.diff_to_tree(
1024 comm2.tree, flags=flags, context_lines=context, swap=swap)
1037 comm2.tree, flags=flags, context_lines=context, swap=swap)
1025 similar_flags = 0
1038 similar_flags = 0
1026 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1039 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1027 diff_obj.find_similar(flags=similar_flags)
1040 diff_obj.find_similar(flags=similar_flags)
1028
1041
1029 if file_filter:
1042 if file_filter:
1030 for p in diff_obj:
1043 for p in diff_obj:
1031 if p.delta.old_file.path == file_filter:
1044 if p.delta.old_file.path == file_filter:
1032 return p.patch or ''
1045 return p.patch or ''
1033 # fo matching path == no diff
1046 # fo matching path == no diff
1034 return ''
1047 return ''
1035 return diff_obj.patch or ''
1048 return diff_obj.patch or ''
1036
1049
1037 @reraise_safe_exceptions
1050 @reraise_safe_exceptions
1038 def node_history(self, wire, commit_id, path, limit):
1051 def node_history(self, wire, commit_id, path, limit):
1039 cache_on, context_uid, repo_id = self._cache_on(wire)
1052 cache_on, context_uid, repo_id = self._cache_on(wire)
1040 @self.region.conditional_cache_on_arguments(condition=cache_on)
1053 @self.region.conditional_cache_on_arguments(condition=cache_on)
1041 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1054 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1042 # optimize for n==1, rev-list is much faster for that use-case
1055 # optimize for n==1, rev-list is much faster for that use-case
1043 if limit == 1:
1056 if limit == 1:
1044 cmd = ['rev-list', '-1', commit_id, '--', path]
1057 cmd = ['rev-list', '-1', commit_id, '--', path]
1045 else:
1058 else:
1046 cmd = ['log']
1059 cmd = ['log']
1047 if limit:
1060 if limit:
1048 cmd.extend(['-n', str(safe_int(limit, 0))])
1061 cmd.extend(['-n', str(safe_int(limit, 0))])
1049 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1062 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1050
1063
1051 output, __ = self.run_git_command(wire, cmd)
1064 output, __ = self.run_git_command(wire, cmd)
1052 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1065 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1053
1066
1054 return [x for x in commit_ids]
1067 return [x for x in commit_ids]
1055 return _node_history(context_uid, repo_id, commit_id, path, limit)
1068 return _node_history(context_uid, repo_id, commit_id, path, limit)
1056
1069
1057 @reraise_safe_exceptions
1070 @reraise_safe_exceptions
1058 def node_annotate(self, wire, commit_id, path):
1071 def node_annotate(self, wire, commit_id, path):
1059
1072
1060 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1073 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1061 # -l ==> outputs long shas (and we need all 40 characters)
1074 # -l ==> outputs long shas (and we need all 40 characters)
1062 # --root ==> doesn't put '^' character for boundaries
1075 # --root ==> doesn't put '^' character for boundaries
1063 # -r commit_id ==> blames for the given commit
1076 # -r commit_id ==> blames for the given commit
1064 output, __ = self.run_git_command(wire, cmd)
1077 output, __ = self.run_git_command(wire, cmd)
1065
1078
1066 result = []
1079 result = []
1067 for i, blame_line in enumerate(output.split('\n')[:-1]):
1080 for i, blame_line in enumerate(output.split('\n')[:-1]):
1068 line_no = i + 1
1081 line_no = i + 1
1069 commit_id, line = re.split(r' ', blame_line, 1)
1082 commit_id, line = re.split(r' ', blame_line, 1)
1070 result.append((line_no, commit_id, line))
1083 result.append((line_no, commit_id, line))
1071 return result
1084 return result
1072
1085
1073 @reraise_safe_exceptions
1086 @reraise_safe_exceptions
1074 def update_server_info(self, wire):
1087 def update_server_info(self, wire):
1075 repo = self._factory.repo(wire)
1088 repo = self._factory.repo(wire)
1076 update_server_info(repo)
1089 update_server_info(repo)
1077
1090
1078 @reraise_safe_exceptions
1091 @reraise_safe_exceptions
1079 def get_all_commit_ids(self, wire):
1092 def get_all_commit_ids(self, wire):
1080
1093
1081 cache_on, context_uid, repo_id = self._cache_on(wire)
1094 cache_on, context_uid, repo_id = self._cache_on(wire)
1082 @self.region.conditional_cache_on_arguments(condition=cache_on)
1095 @self.region.conditional_cache_on_arguments(condition=cache_on)
1083 def _get_all_commit_ids(_context_uid, _repo_id):
1096 def _get_all_commit_ids(_context_uid, _repo_id):
1084
1097
1085 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1098 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1086 try:
1099 try:
1087 output, __ = self.run_git_command(wire, cmd)
1100 output, __ = self.run_git_command(wire, cmd)
1088 return output.splitlines()
1101 return output.splitlines()
1089 except Exception:
1102 except Exception:
1090 # Can be raised for empty repositories
1103 # Can be raised for empty repositories
1091 return []
1104 return []
1092 return _get_all_commit_ids(context_uid, repo_id)
1105 return _get_all_commit_ids(context_uid, repo_id)
1093
1106
1094 @reraise_safe_exceptions
1107 @reraise_safe_exceptions
1095 def run_git_command(self, wire, cmd, **opts):
1108 def run_git_command(self, wire, cmd, **opts):
1096 path = wire.get('path', None)
1109 path = wire.get('path', None)
1097
1110
1098 if path and os.path.isdir(path):
1111 if path and os.path.isdir(path):
1099 opts['cwd'] = path
1112 opts['cwd'] = path
1100
1113
1101 if '_bare' in opts:
1114 if '_bare' in opts:
1102 _copts = []
1115 _copts = []
1103 del opts['_bare']
1116 del opts['_bare']
1104 else:
1117 else:
1105 _copts = ['-c', 'core.quotepath=false', ]
1118 _copts = ['-c', 'core.quotepath=false', ]
1106 safe_call = False
1119 safe_call = False
1107 if '_safe' in opts:
1120 if '_safe' in opts:
1108 # no exc on failure
1121 # no exc on failure
1109 del opts['_safe']
1122 del opts['_safe']
1110 safe_call = True
1123 safe_call = True
1111
1124
1112 if '_copts' in opts:
1125 if '_copts' in opts:
1113 _copts.extend(opts['_copts'] or [])
1126 _copts.extend(opts['_copts'] or [])
1114 del opts['_copts']
1127 del opts['_copts']
1115
1128
1116 gitenv = os.environ.copy()
1129 gitenv = os.environ.copy()
1117 gitenv.update(opts.pop('extra_env', {}))
1130 gitenv.update(opts.pop('extra_env', {}))
1118 # need to clean fix GIT_DIR !
1131 # need to clean fix GIT_DIR !
1119 if 'GIT_DIR' in gitenv:
1132 if 'GIT_DIR' in gitenv:
1120 del gitenv['GIT_DIR']
1133 del gitenv['GIT_DIR']
1121 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1134 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1122 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1135 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1123
1136
1124 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1137 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1125 _opts = {'env': gitenv, 'shell': False}
1138 _opts = {'env': gitenv, 'shell': False}
1126
1139
1127 try:
1140 try:
1128 _opts.update(opts)
1141 _opts.update(opts)
1129 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
1142 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
1130
1143
1131 return ''.join(p), ''.join(p.error)
1144 return ''.join(p), ''.join(p.error)
1132 except (EnvironmentError, OSError) as err:
1145 except (EnvironmentError, OSError) as err:
1133 cmd = ' '.join(cmd) # human friendly CMD
1146 cmd = ' '.join(cmd) # human friendly CMD
1134 tb_err = ("Couldn't run git command (%s).\n"
1147 tb_err = ("Couldn't run git command (%s).\n"
1135 "Original error was:%s\n"
1148 "Original error was:%s\n"
1136 "Call options:%s\n"
1149 "Call options:%s\n"
1137 % (cmd, err, _opts))
1150 % (cmd, err, _opts))
1138 log.exception(tb_err)
1151 log.exception(tb_err)
1139 if safe_call:
1152 if safe_call:
1140 return '', err
1153 return '', err
1141 else:
1154 else:
1142 raise exceptions.VcsException()(tb_err)
1155 raise exceptions.VcsException()(tb_err)
1143
1156
1144 @reraise_safe_exceptions
1157 @reraise_safe_exceptions
1145 def install_hooks(self, wire, force=False):
1158 def install_hooks(self, wire, force=False):
1146 from vcsserver.hook_utils import install_git_hooks
1159 from vcsserver.hook_utils import install_git_hooks
1147 bare = self.bare(wire)
1160 bare = self.bare(wire)
1148 path = wire['path']
1161 path = wire['path']
1149 return install_git_hooks(path, bare, force_create=force)
1162 return install_git_hooks(path, bare, force_create=force)
1150
1163
1151 @reraise_safe_exceptions
1164 @reraise_safe_exceptions
1152 def get_hooks_info(self, wire):
1165 def get_hooks_info(self, wire):
1153 from vcsserver.hook_utils import (
1166 from vcsserver.hook_utils import (
1154 get_git_pre_hook_version, get_git_post_hook_version)
1167 get_git_pre_hook_version, get_git_post_hook_version)
1155 bare = self.bare(wire)
1168 bare = self.bare(wire)
1156 path = wire['path']
1169 path = wire['path']
1157 return {
1170 return {
1158 'pre_version': get_git_pre_hook_version(path, bare),
1171 'pre_version': get_git_pre_hook_version(path, bare),
1159 'post_version': get_git_post_hook_version(path, bare),
1172 'post_version': get_git_post_hook_version(path, bare),
1160 }
1173 }
@@ -1,939 +1,952 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase
25 from hgext import largefiles, rebase
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30
30
31 import vcsserver
31 import vcsserver
32 from vcsserver import exceptions
32 from vcsserver import exceptions
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 from vcsserver.hgcompat import (
34 from vcsserver.hgcompat import (
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 RepoLookupError, InterventionRequired, RequirementError)
39 RepoLookupError, InterventionRequired, RequirementError)
40 from vcsserver.vcs_base import RemoteBase
40 from vcsserver.vcs_base import RemoteBase
41
41
42 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
43
43
44
44
45 def make_ui_from_config(repo_config):
45 def make_ui_from_config(repo_config):
46
46
47 class LoggingUI(ui.ui):
47 class LoggingUI(ui.ui):
48 def status(self, *msg, **opts):
48 def status(self, *msg, **opts):
49 log.info(' '.join(msg).rstrip('\n'))
49 log.info(' '.join(msg).rstrip('\n'))
50 super(LoggingUI, self).status(*msg, **opts)
50 super(LoggingUI, self).status(*msg, **opts)
51
51
52 def warn(self, *msg, **opts):
52 def warn(self, *msg, **opts):
53 log.warn(' '.join(msg).rstrip('\n'))
53 log.warn(' '.join(msg).rstrip('\n'))
54 super(LoggingUI, self).warn(*msg, **opts)
54 super(LoggingUI, self).warn(*msg, **opts)
55
55
56 def error(self, *msg, **opts):
56 def error(self, *msg, **opts):
57 log.error(' '.join(msg).rstrip('\n'))
57 log.error(' '.join(msg).rstrip('\n'))
58 super(LoggingUI, self).error(*msg, **opts)
58 super(LoggingUI, self).error(*msg, **opts)
59
59
60 def note(self, *msg, **opts):
60 def note(self, *msg, **opts):
61 log.info(' '.join(msg).rstrip('\n'))
61 log.info(' '.join(msg).rstrip('\n'))
62 super(LoggingUI, self).note(*msg, **opts)
62 super(LoggingUI, self).note(*msg, **opts)
63
63
64 def debug(self, *msg, **opts):
64 def debug(self, *msg, **opts):
65 log.debug(' '.join(msg).rstrip('\n'))
65 log.debug(' '.join(msg).rstrip('\n'))
66 super(LoggingUI, self).debug(*msg, **opts)
66 super(LoggingUI, self).debug(*msg, **opts)
67
67
68 baseui = LoggingUI()
68 baseui = LoggingUI()
69
69
70 # clean the baseui object
70 # clean the baseui object
71 baseui._ocfg = hgconfig.config()
71 baseui._ocfg = hgconfig.config()
72 baseui._ucfg = hgconfig.config()
72 baseui._ucfg = hgconfig.config()
73 baseui._tcfg = hgconfig.config()
73 baseui._tcfg = hgconfig.config()
74
74
75 for section, option, value in repo_config:
75 for section, option, value in repo_config:
76 baseui.setconfig(section, option, value)
76 baseui.setconfig(section, option, value)
77
77
78 # make our hgweb quiet so it doesn't print output
78 # make our hgweb quiet so it doesn't print output
79 baseui.setconfig('ui', 'quiet', 'true')
79 baseui.setconfig('ui', 'quiet', 'true')
80
80
81 baseui.setconfig('ui', 'paginate', 'never')
81 baseui.setconfig('ui', 'paginate', 'never')
82 # for better Error reporting of Mercurial
82 # for better Error reporting of Mercurial
83 baseui.setconfig('ui', 'message-output', 'stderr')
83 baseui.setconfig('ui', 'message-output', 'stderr')
84
84
85 # force mercurial to only use 1 thread, otherwise it may try to set a
85 # force mercurial to only use 1 thread, otherwise it may try to set a
86 # signal in a non-main thread, thus generating a ValueError.
86 # signal in a non-main thread, thus generating a ValueError.
87 baseui.setconfig('worker', 'numcpus', 1)
87 baseui.setconfig('worker', 'numcpus', 1)
88
88
89 # If there is no config for the largefiles extension, we explicitly disable
89 # If there is no config for the largefiles extension, we explicitly disable
90 # it here. This overrides settings from repositories hgrc file. Recent
90 # it here. This overrides settings from repositories hgrc file. Recent
91 # mercurial versions enable largefiles in hgrc on clone from largefile
91 # mercurial versions enable largefiles in hgrc on clone from largefile
92 # repo.
92 # repo.
93 if not baseui.hasconfig('extensions', 'largefiles'):
93 if not baseui.hasconfig('extensions', 'largefiles'):
94 log.debug('Explicitly disable largefiles extension for repo.')
94 log.debug('Explicitly disable largefiles extension for repo.')
95 baseui.setconfig('extensions', 'largefiles', '!')
95 baseui.setconfig('extensions', 'largefiles', '!')
96
96
97 return baseui
97 return baseui
98
98
99
99
100 def reraise_safe_exceptions(func):
100 def reraise_safe_exceptions(func):
101 """Decorator for converting mercurial exceptions to something neutral."""
101 """Decorator for converting mercurial exceptions to something neutral."""
102
102
103 def wrapper(*args, **kwargs):
103 def wrapper(*args, **kwargs):
104 try:
104 try:
105 return func(*args, **kwargs)
105 return func(*args, **kwargs)
106 except (Abort, InterventionRequired) as e:
106 except (Abort, InterventionRequired) as e:
107 raise_from_original(exceptions.AbortException(e))
107 raise_from_original(exceptions.AbortException(e))
108 except RepoLookupError as e:
108 except RepoLookupError as e:
109 raise_from_original(exceptions.LookupException(e))
109 raise_from_original(exceptions.LookupException(e))
110 except RequirementError as e:
110 except RequirementError as e:
111 raise_from_original(exceptions.RequirementException(e))
111 raise_from_original(exceptions.RequirementException(e))
112 except RepoError as e:
112 except RepoError as e:
113 raise_from_original(exceptions.VcsException(e))
113 raise_from_original(exceptions.VcsException(e))
114 except LookupError as e:
114 except LookupError as e:
115 raise_from_original(exceptions.LookupException(e))
115 raise_from_original(exceptions.LookupException(e))
116 except Exception as e:
116 except Exception as e:
117 if not hasattr(e, '_vcs_kind'):
117 if not hasattr(e, '_vcs_kind'):
118 log.exception("Unhandled exception in hg remote call")
118 log.exception("Unhandled exception in hg remote call")
119 raise_from_original(exceptions.UnhandledException(e))
119 raise_from_original(exceptions.UnhandledException(e))
120
120
121 raise
121 raise
122 return wrapper
122 return wrapper
123
123
124
124
125 class MercurialFactory(RepoFactory):
125 class MercurialFactory(RepoFactory):
126 repo_type = 'hg'
126 repo_type = 'hg'
127
127
128 def _create_config(self, config, hooks=True):
128 def _create_config(self, config, hooks=True):
129 if not hooks:
129 if not hooks:
130 hooks_to_clean = frozenset((
130 hooks_to_clean = frozenset((
131 'changegroup.repo_size', 'preoutgoing.pre_pull',
131 'changegroup.repo_size', 'preoutgoing.pre_pull',
132 'outgoing.pull_logger', 'prechangegroup.pre_push'))
132 'outgoing.pull_logger', 'prechangegroup.pre_push'))
133 new_config = []
133 new_config = []
134 for section, option, value in config:
134 for section, option, value in config:
135 if section == 'hooks' and option in hooks_to_clean:
135 if section == 'hooks' and option in hooks_to_clean:
136 continue
136 continue
137 new_config.append((section, option, value))
137 new_config.append((section, option, value))
138 config = new_config
138 config = new_config
139
139
140 baseui = make_ui_from_config(config)
140 baseui = make_ui_from_config(config)
141 return baseui
141 return baseui
142
142
143 def _create_repo(self, wire, create):
143 def _create_repo(self, wire, create):
144 baseui = self._create_config(wire["config"])
144 baseui = self._create_config(wire["config"])
145 return instance(baseui, wire["path"], create)
145 return instance(baseui, wire["path"], create)
146
146
147 def repo(self, wire, create=False):
147 def repo(self, wire, create=False):
148 """
148 """
149 Get a repository instance for the given path.
149 Get a repository instance for the given path.
150 """
150 """
151 return self._create_repo(wire, create)
151 return self._create_repo(wire, create)
152
152
153
153
154 class HgRemote(RemoteBase):
154 class HgRemote(RemoteBase):
155
155
156 def __init__(self, factory):
156 def __init__(self, factory):
157 self._factory = factory
157 self._factory = factory
158 self._bulk_methods = {
158 self._bulk_methods = {
159 "affected_files": self.ctx_files,
159 "affected_files": self.ctx_files,
160 "author": self.ctx_user,
160 "author": self.ctx_user,
161 "branch": self.ctx_branch,
161 "branch": self.ctx_branch,
162 "children": self.ctx_children,
162 "children": self.ctx_children,
163 "date": self.ctx_date,
163 "date": self.ctx_date,
164 "message": self.ctx_description,
164 "message": self.ctx_description,
165 "parents": self.ctx_parents,
165 "parents": self.ctx_parents,
166 "status": self.ctx_status,
166 "status": self.ctx_status,
167 "obsolete": self.ctx_obsolete,
167 "obsolete": self.ctx_obsolete,
168 "phase": self.ctx_phase,
168 "phase": self.ctx_phase,
169 "hidden": self.ctx_hidden,
169 "hidden": self.ctx_hidden,
170 "_file_paths": self.ctx_list,
170 "_file_paths": self.ctx_list,
171 }
171 }
172
172
173 def _get_ctx(self, repo, ref):
173 def _get_ctx(self, repo, ref):
174 return get_ctx(repo, ref)
174 return get_ctx(repo, ref)
175
175
176 @reraise_safe_exceptions
176 @reraise_safe_exceptions
177 def discover_hg_version(self):
177 def discover_hg_version(self):
178 from mercurial import util
178 from mercurial import util
179 return util.version()
179 return util.version()
180
180
181 @reraise_safe_exceptions
181 @reraise_safe_exceptions
182 def is_empty(self, wire):
182 def is_empty(self, wire):
183 repo = self._factory.repo(wire)
183 repo = self._factory.repo(wire)
184
184
185 try:
185 try:
186 return len(repo) == 0
186 return len(repo) == 0
187 except Exception:
187 except Exception:
188 log.exception("failed to read object_store")
188 log.exception("failed to read object_store")
189 return False
189 return False
190
190
191 @reraise_safe_exceptions
191 @reraise_safe_exceptions
192 def archive_repo(self, archive_path, mtime, file_info, kind):
192 def archive_repo(self, archive_path, mtime, file_info, kind):
193 if kind == "tgz":
193 if kind == "tgz":
194 archiver = archival.tarit(archive_path, mtime, "gz")
194 archiver = archival.tarit(archive_path, mtime, "gz")
195 elif kind == "tbz2":
195 elif kind == "tbz2":
196 archiver = archival.tarit(archive_path, mtime, "bz2")
196 archiver = archival.tarit(archive_path, mtime, "bz2")
197 elif kind == 'zip':
197 elif kind == 'zip':
198 archiver = archival.zipit(archive_path, mtime)
198 archiver = archival.zipit(archive_path, mtime)
199 else:
199 else:
200 raise exceptions.ArchiveException()(
200 raise exceptions.ArchiveException()(
201 'Remote does not support: "%s".' % kind)
201 'Remote does not support: "%s".' % kind)
202
202
203 for f_path, f_mode, f_is_link, f_content in file_info:
203 for f_path, f_mode, f_is_link, f_content in file_info:
204 archiver.addfile(f_path, f_mode, f_is_link, f_content)
204 archiver.addfile(f_path, f_mode, f_is_link, f_content)
205 archiver.done()
205 archiver.done()
206
206
207 @reraise_safe_exceptions
207 @reraise_safe_exceptions
208 def bookmarks(self, wire):
208 def bookmarks(self, wire):
209 cache_on, context_uid, repo_id = self._cache_on(wire)
209 cache_on, context_uid, repo_id = self._cache_on(wire)
210 @self.region.conditional_cache_on_arguments(condition=cache_on)
210 @self.region.conditional_cache_on_arguments(condition=cache_on)
211 def _bookmarks(_context_uid, _repo_id):
211 def _bookmarks(_context_uid, _repo_id):
212 repo = self._factory.repo(wire)
212 repo = self._factory.repo(wire)
213 return dict(repo._bookmarks)
213 return dict(repo._bookmarks)
214
214
215 return _bookmarks(context_uid, repo_id)
215 return _bookmarks(context_uid, repo_id)
216
216
217 @reraise_safe_exceptions
217 @reraise_safe_exceptions
218 def branches(self, wire, normal, closed):
218 def branches(self, wire, normal, closed):
219 cache_on, context_uid, repo_id = self._cache_on(wire)
219 cache_on, context_uid, repo_id = self._cache_on(wire)
220 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 @self.region.conditional_cache_on_arguments(condition=cache_on)
221 def _branches(_context_uid, _repo_id, _normal, _closed):
221 def _branches(_context_uid, _repo_id, _normal, _closed):
222 repo = self._factory.repo(wire)
222 repo = self._factory.repo(wire)
223 iter_branches = repo.branchmap().iterbranches()
223 iter_branches = repo.branchmap().iterbranches()
224 bt = {}
224 bt = {}
225 for branch_name, _heads, tip, is_closed in iter_branches:
225 for branch_name, _heads, tip, is_closed in iter_branches:
226 if normal and not is_closed:
226 if normal and not is_closed:
227 bt[branch_name] = tip
227 bt[branch_name] = tip
228 if closed and is_closed:
228 if closed and is_closed:
229 bt[branch_name] = tip
229 bt[branch_name] = tip
230
230
231 return bt
231 return bt
232
232
233 return _branches(context_uid, repo_id, normal, closed)
233 return _branches(context_uid, repo_id, normal, closed)
234
234
235 @reraise_safe_exceptions
235 @reraise_safe_exceptions
236 def bulk_request(self, wire, commit_id, pre_load):
236 def bulk_request(self, wire, commit_id, pre_load):
237 cache_on, context_uid, repo_id = self._cache_on(wire)
237 cache_on, context_uid, repo_id = self._cache_on(wire)
238 @self.region.conditional_cache_on_arguments(condition=cache_on)
238 @self.region.conditional_cache_on_arguments(condition=cache_on)
239 def _bulk_request(_repo_id, _commit_id, _pre_load):
239 def _bulk_request(_repo_id, _commit_id, _pre_load):
240 result = {}
240 result = {}
241 for attr in pre_load:
241 for attr in pre_load:
242 try:
242 try:
243 method = self._bulk_methods[attr]
243 method = self._bulk_methods[attr]
244 result[attr] = method(wire, commit_id)
244 result[attr] = method(wire, commit_id)
245 except KeyError as e:
245 except KeyError as e:
246 raise exceptions.VcsException(e)(
246 raise exceptions.VcsException(e)(
247 'Unknown bulk attribute: "%s"' % attr)
247 'Unknown bulk attribute: "%s"' % attr)
248 return result
248 return result
249
249
250 return _bulk_request(repo_id, commit_id, sorted(pre_load))
250 return _bulk_request(repo_id, commit_id, sorted(pre_load))
251
251
252 @reraise_safe_exceptions
252 @reraise_safe_exceptions
253 def ctx_branch(self, wire, commit_id):
253 def ctx_branch(self, wire, commit_id):
254 cache_on, context_uid, repo_id = self._cache_on(wire)
254 cache_on, context_uid, repo_id = self._cache_on(wire)
255 @self.region.conditional_cache_on_arguments(condition=cache_on)
255 @self.region.conditional_cache_on_arguments(condition=cache_on)
256 def _ctx_branch(_repo_id, _commit_id):
256 def _ctx_branch(_repo_id, _commit_id):
257 repo = self._factory.repo(wire)
257 repo = self._factory.repo(wire)
258 ctx = self._get_ctx(repo, commit_id)
258 ctx = self._get_ctx(repo, commit_id)
259 return ctx.branch()
259 return ctx.branch()
260 return _ctx_branch(repo_id, commit_id)
260 return _ctx_branch(repo_id, commit_id)
261
261
262 @reraise_safe_exceptions
262 @reraise_safe_exceptions
263 def ctx_date(self, wire, commit_id):
263 def ctx_date(self, wire, commit_id):
264 cache_on, context_uid, repo_id = self._cache_on(wire)
264 cache_on, context_uid, repo_id = self._cache_on(wire)
265 @self.region.conditional_cache_on_arguments(condition=cache_on)
265 @self.region.conditional_cache_on_arguments(condition=cache_on)
266 def _ctx_date(_repo_id, _commit_id):
266 def _ctx_date(_repo_id, _commit_id):
267 repo = self._factory.repo(wire)
267 repo = self._factory.repo(wire)
268 ctx = self._get_ctx(repo, commit_id)
268 ctx = self._get_ctx(repo, commit_id)
269 return ctx.date()
269 return ctx.date()
270 return _ctx_date(repo_id, commit_id)
270 return _ctx_date(repo_id, commit_id)
271
271
272 @reraise_safe_exceptions
272 @reraise_safe_exceptions
273 def ctx_description(self, wire, revision):
273 def ctx_description(self, wire, revision):
274 repo = self._factory.repo(wire)
274 repo = self._factory.repo(wire)
275 ctx = self._get_ctx(repo, revision)
275 ctx = self._get_ctx(repo, revision)
276 return ctx.description()
276 return ctx.description()
277
277
278 @reraise_safe_exceptions
278 @reraise_safe_exceptions
279 def ctx_files(self, wire, commit_id):
279 def ctx_files(self, wire, commit_id):
280 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
281 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 def _ctx_files(_repo_id, _commit_id):
282 def _ctx_files(_repo_id, _commit_id):
283 repo = self._factory.repo(wire)
283 repo = self._factory.repo(wire)
284 ctx = self._get_ctx(repo, commit_id)
284 ctx = self._get_ctx(repo, commit_id)
285 return ctx.files()
285 return ctx.files()
286
286
287 return _ctx_files(repo_id, commit_id)
287 return _ctx_files(repo_id, commit_id)
288
288
289 @reraise_safe_exceptions
289 @reraise_safe_exceptions
290 def ctx_list(self, path, revision):
290 def ctx_list(self, path, revision):
291 repo = self._factory.repo(path)
291 repo = self._factory.repo(path)
292 ctx = self._get_ctx(repo, revision)
292 ctx = self._get_ctx(repo, revision)
293 return list(ctx)
293 return list(ctx)
294
294
295 @reraise_safe_exceptions
295 @reraise_safe_exceptions
296 def ctx_parents(self, wire, commit_id):
296 def ctx_parents(self, wire, commit_id):
297 cache_on, context_uid, repo_id = self._cache_on(wire)
297 cache_on, context_uid, repo_id = self._cache_on(wire)
298 @self.region.conditional_cache_on_arguments(condition=cache_on)
298 @self.region.conditional_cache_on_arguments(condition=cache_on)
299 def _ctx_parents(_repo_id, _commit_id):
299 def _ctx_parents(_repo_id, _commit_id):
300 repo = self._factory.repo(wire)
300 repo = self._factory.repo(wire)
301 ctx = self._get_ctx(repo, commit_id)
301 ctx = self._get_ctx(repo, commit_id)
302 return [parent.rev() for parent in ctx.parents()
302 return [parent.rev() for parent in ctx.parents()
303 if not (parent.hidden() or parent.obsolete())]
303 if not (parent.hidden() or parent.obsolete())]
304
304
305 return _ctx_parents(repo_id, commit_id)
305 return _ctx_parents(repo_id, commit_id)
306
306
307 @reraise_safe_exceptions
307 @reraise_safe_exceptions
308 def ctx_children(self, wire, commit_id):
308 def ctx_children(self, wire, commit_id):
309 cache_on, context_uid, repo_id = self._cache_on(wire)
309 cache_on, context_uid, repo_id = self._cache_on(wire)
310 @self.region.conditional_cache_on_arguments(condition=cache_on)
310 @self.region.conditional_cache_on_arguments(condition=cache_on)
311 def _ctx_children(_repo_id, _commit_id):
311 def _ctx_children(_repo_id, _commit_id):
312 repo = self._factory.repo(wire)
312 repo = self._factory.repo(wire)
313 ctx = self._get_ctx(repo, commit_id)
313 ctx = self._get_ctx(repo, commit_id)
314 return [child.rev() for child in ctx.children()
314 return [child.rev() for child in ctx.children()
315 if not (child.hidden() or child.obsolete())]
315 if not (child.hidden() or child.obsolete())]
316
316
317 return _ctx_children(repo_id, commit_id)
317 return _ctx_children(repo_id, commit_id)
318
318
319 @reraise_safe_exceptions
319 @reraise_safe_exceptions
320 def ctx_phase(self, wire, commit_id):
320 def ctx_phase(self, wire, commit_id):
321 cache_on, context_uid, repo_id = self._cache_on(wire)
321 cache_on, context_uid, repo_id = self._cache_on(wire)
322 @self.region.conditional_cache_on_arguments(condition=cache_on)
322 @self.region.conditional_cache_on_arguments(condition=cache_on)
323 def _ctx_phase(_context_uid, _repo_id, _commit_id):
323 def _ctx_phase(_context_uid, _repo_id, _commit_id):
324 repo = self._factory.repo(wire)
324 repo = self._factory.repo(wire)
325 ctx = self._get_ctx(repo, commit_id)
325 ctx = self._get_ctx(repo, commit_id)
326 # public=0, draft=1, secret=3
326 # public=0, draft=1, secret=3
327 return ctx.phase()
327 return ctx.phase()
328 return _ctx_phase(context_uid, repo_id, commit_id)
328 return _ctx_phase(context_uid, repo_id, commit_id)
329
329
330 @reraise_safe_exceptions
330 @reraise_safe_exceptions
331 def ctx_obsolete(self, wire, commit_id):
331 def ctx_obsolete(self, wire, commit_id):
332 cache_on, context_uid, repo_id = self._cache_on(wire)
332 cache_on, context_uid, repo_id = self._cache_on(wire)
333 @self.region.conditional_cache_on_arguments(condition=cache_on)
333 @self.region.conditional_cache_on_arguments(condition=cache_on)
334 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
334 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
335 repo = self._factory.repo(wire)
335 repo = self._factory.repo(wire)
336 ctx = self._get_ctx(repo, commit_id)
336 ctx = self._get_ctx(repo, commit_id)
337 return ctx.obsolete()
337 return ctx.obsolete()
338 return _ctx_obsolete(context_uid, repo_id, commit_id)
338 return _ctx_obsolete(context_uid, repo_id, commit_id)
339
339
340 @reraise_safe_exceptions
340 @reraise_safe_exceptions
341 def ctx_hidden(self, wire, commit_id):
341 def ctx_hidden(self, wire, commit_id):
342 cache_on, context_uid, repo_id = self._cache_on(wire)
342 cache_on, context_uid, repo_id = self._cache_on(wire)
343 @self.region.conditional_cache_on_arguments(condition=cache_on)
343 @self.region.conditional_cache_on_arguments(condition=cache_on)
344 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
344 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
345 repo = self._factory.repo(wire)
345 repo = self._factory.repo(wire)
346 ctx = self._get_ctx(repo, commit_id)
346 ctx = self._get_ctx(repo, commit_id)
347 return ctx.hidden()
347 return ctx.hidden()
348 return _ctx_hidden(context_uid, repo_id, commit_id)
348 return _ctx_hidden(context_uid, repo_id, commit_id)
349
349
350 @reraise_safe_exceptions
350 @reraise_safe_exceptions
351 def ctx_substate(self, wire, revision):
351 def ctx_substate(self, wire, revision):
352 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
353 ctx = self._get_ctx(repo, revision)
353 ctx = self._get_ctx(repo, revision)
354 return ctx.substate
354 return ctx.substate
355
355
356 @reraise_safe_exceptions
356 @reraise_safe_exceptions
357 def ctx_status(self, wire, revision):
357 def ctx_status(self, wire, revision):
358 repo = self._factory.repo(wire)
358 repo = self._factory.repo(wire)
359 ctx = self._get_ctx(repo, revision)
359 ctx = self._get_ctx(repo, revision)
360 status = repo[ctx.p1().node()].status(other=ctx.node())
360 status = repo[ctx.p1().node()].status(other=ctx.node())
361 # object of status (odd, custom named tuple in mercurial) is not
361 # object of status (odd, custom named tuple in mercurial) is not
362 # correctly serializable, we make it a list, as the underling
362 # correctly serializable, we make it a list, as the underling
363 # API expects this to be a list
363 # API expects this to be a list
364 return list(status)
364 return list(status)
365
365
366 @reraise_safe_exceptions
366 @reraise_safe_exceptions
367 def ctx_user(self, wire, revision):
367 def ctx_user(self, wire, revision):
368 repo = self._factory.repo(wire)
368 repo = self._factory.repo(wire)
369 ctx = self._get_ctx(repo, revision)
369 ctx = self._get_ctx(repo, revision)
370 return ctx.user()
370 return ctx.user()
371
371
372 @reraise_safe_exceptions
372 @reraise_safe_exceptions
373 def check_url(self, url, config):
373 def check_url(self, url, config):
374 _proto = None
374 _proto = None
375 if '+' in url[:url.find('://')]:
375 if '+' in url[:url.find('://')]:
376 _proto = url[0:url.find('+')]
376 _proto = url[0:url.find('+')]
377 url = url[url.find('+') + 1:]
377 url = url[url.find('+') + 1:]
378 handlers = []
378 handlers = []
379 url_obj = url_parser(url)
379 url_obj = url_parser(url)
380 test_uri, authinfo = url_obj.authinfo()
380 test_uri, authinfo = url_obj.authinfo()
381 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
381 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
382 url_obj.query = obfuscate_qs(url_obj.query)
382 url_obj.query = obfuscate_qs(url_obj.query)
383
383
384 cleaned_uri = str(url_obj)
384 cleaned_uri = str(url_obj)
385 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
385 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
386
386
387 if authinfo:
387 if authinfo:
388 # create a password manager
388 # create a password manager
389 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
389 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
390 passmgr.add_password(*authinfo)
390 passmgr.add_password(*authinfo)
391
391
392 handlers.extend((httpbasicauthhandler(passmgr),
392 handlers.extend((httpbasicauthhandler(passmgr),
393 httpdigestauthhandler(passmgr)))
393 httpdigestauthhandler(passmgr)))
394
394
395 o = urllib2.build_opener(*handlers)
395 o = urllib2.build_opener(*handlers)
396 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
396 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
397 ('Accept', 'application/mercurial-0.1')]
397 ('Accept', 'application/mercurial-0.1')]
398
398
399 q = {"cmd": 'between'}
399 q = {"cmd": 'between'}
400 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
400 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
401 qs = '?%s' % urllib.urlencode(q)
401 qs = '?%s' % urllib.urlencode(q)
402 cu = "%s%s" % (test_uri, qs)
402 cu = "%s%s" % (test_uri, qs)
403 req = urllib2.Request(cu, None, {})
403 req = urllib2.Request(cu, None, {})
404
404
405 try:
405 try:
406 log.debug("Trying to open URL %s", cleaned_uri)
406 log.debug("Trying to open URL %s", cleaned_uri)
407 resp = o.open(req)
407 resp = o.open(req)
408 if resp.code != 200:
408 if resp.code != 200:
409 raise exceptions.URLError()('Return Code is not 200')
409 raise exceptions.URLError()('Return Code is not 200')
410 except Exception as e:
410 except Exception as e:
411 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
411 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
412 # means it cannot be cloned
412 # means it cannot be cloned
413 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
413 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
414
414
415 # now check if it's a proper hg repo, but don't do it for svn
415 # now check if it's a proper hg repo, but don't do it for svn
416 try:
416 try:
417 if _proto == 'svn':
417 if _proto == 'svn':
418 pass
418 pass
419 else:
419 else:
420 # check for pure hg repos
420 # check for pure hg repos
421 log.debug(
421 log.debug(
422 "Verifying if URL is a Mercurial repository: %s",
422 "Verifying if URL is a Mercurial repository: %s",
423 cleaned_uri)
423 cleaned_uri)
424 ui = make_ui_from_config(config)
424 ui = make_ui_from_config(config)
425 peer_checker = makepeer(ui, url)
425 peer_checker = makepeer(ui, url)
426 peer_checker.lookup('tip')
426 peer_checker.lookup('tip')
427 except Exception as e:
427 except Exception as e:
428 log.warning("URL is not a valid Mercurial repository: %s",
428 log.warning("URL is not a valid Mercurial repository: %s",
429 cleaned_uri)
429 cleaned_uri)
430 raise exceptions.URLError(e)(
430 raise exceptions.URLError(e)(
431 "url [%s] does not look like an hg repo org_exc: %s"
431 "url [%s] does not look like an hg repo org_exc: %s"
432 % (cleaned_uri, e))
432 % (cleaned_uri, e))
433
433
434 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
434 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
435 return True
435 return True
436
436
437 @reraise_safe_exceptions
437 @reraise_safe_exceptions
438 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
438 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
439 repo = self._factory.repo(wire)
439 repo = self._factory.repo(wire)
440
440
441 if file_filter:
441 if file_filter:
442 match_filter = match(file_filter[0], '', [file_filter[1]])
442 match_filter = match(file_filter[0], '', [file_filter[1]])
443 else:
443 else:
444 match_filter = file_filter
444 match_filter = file_filter
445 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
445 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
446
446
447 try:
447 try:
448 return "".join(patch.diff(
448 return "".join(patch.diff(
449 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
449 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
450 except RepoLookupError as e:
450 except RepoLookupError as e:
451 raise exceptions.LookupException(e)()
451 raise exceptions.LookupException(e)()
452
452
453 @reraise_safe_exceptions
453 @reraise_safe_exceptions
454 def node_history(self, wire, revision, path, limit):
454 def node_history(self, wire, revision, path, limit):
455 cache_on, context_uid, repo_id = self._cache_on(wire)
455 cache_on, context_uid, repo_id = self._cache_on(wire)
456 @self.region.conditional_cache_on_arguments(condition=cache_on)
456 @self.region.conditional_cache_on_arguments(condition=cache_on)
457 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
457 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
458 repo = self._factory.repo(wire)
458 repo = self._factory.repo(wire)
459
459
460 ctx = self._get_ctx(repo, revision)
460 ctx = self._get_ctx(repo, revision)
461 fctx = ctx.filectx(path)
461 fctx = ctx.filectx(path)
462
462
463 def history_iter():
463 def history_iter():
464 limit_rev = fctx.rev()
464 limit_rev = fctx.rev()
465 for obj in reversed(list(fctx.filelog())):
465 for obj in reversed(list(fctx.filelog())):
466 obj = fctx.filectx(obj)
466 obj = fctx.filectx(obj)
467 ctx = obj.changectx()
467 ctx = obj.changectx()
468 if ctx.hidden() or ctx.obsolete():
468 if ctx.hidden() or ctx.obsolete():
469 continue
469 continue
470
470
471 if limit_rev >= obj.rev():
471 if limit_rev >= obj.rev():
472 yield obj
472 yield obj
473
473
474 history = []
474 history = []
475 for cnt, obj in enumerate(history_iter()):
475 for cnt, obj in enumerate(history_iter()):
476 if limit and cnt >= limit:
476 if limit and cnt >= limit:
477 break
477 break
478 history.append(hex(obj.node()))
478 history.append(hex(obj.node()))
479
479
480 return [x for x in history]
480 return [x for x in history]
481 return _node_history(context_uid, repo_id, revision, path, limit)
481 return _node_history(context_uid, repo_id, revision, path, limit)
482
482
483 @reraise_safe_exceptions
483 @reraise_safe_exceptions
484 def node_history_untill(self, wire, revision, path, limit):
484 def node_history_untill(self, wire, revision, path, limit):
485 cache_on, context_uid, repo_id = self._cache_on(wire)
485 cache_on, context_uid, repo_id = self._cache_on(wire)
486 @self.region.conditional_cache_on_arguments(condition=cache_on)
486 @self.region.conditional_cache_on_arguments(condition=cache_on)
487 def _node_history_until(_context_uid, _repo_id):
487 def _node_history_until(_context_uid, _repo_id):
488 repo = self._factory.repo(wire)
488 repo = self._factory.repo(wire)
489 ctx = self._get_ctx(repo, revision)
489 ctx = self._get_ctx(repo, revision)
490 fctx = ctx.filectx(path)
490 fctx = ctx.filectx(path)
491
491
492 file_log = list(fctx.filelog())
492 file_log = list(fctx.filelog())
493 if limit:
493 if limit:
494 # Limit to the last n items
494 # Limit to the last n items
495 file_log = file_log[-limit:]
495 file_log = file_log[-limit:]
496
496
497 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
497 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
498 return _node_history_until(context_uid, repo_id, revision, path, limit)
498 return _node_history_until(context_uid, repo_id, revision, path, limit)
499
499
500 @reraise_safe_exceptions
500 @reraise_safe_exceptions
501 def fctx_annotate(self, wire, revision, path):
501 def fctx_annotate(self, wire, revision, path):
502 repo = self._factory.repo(wire)
502 repo = self._factory.repo(wire)
503 ctx = self._get_ctx(repo, revision)
503 ctx = self._get_ctx(repo, revision)
504 fctx = ctx.filectx(path)
504 fctx = ctx.filectx(path)
505
505
506 result = []
506 result = []
507 for i, annotate_obj in enumerate(fctx.annotate(), 1):
507 for i, annotate_obj in enumerate(fctx.annotate(), 1):
508 ln_no = i
508 ln_no = i
509 sha = hex(annotate_obj.fctx.node())
509 sha = hex(annotate_obj.fctx.node())
510 content = annotate_obj.text
510 content = annotate_obj.text
511 result.append((ln_no, sha, content))
511 result.append((ln_no, sha, content))
512 return result
512 return result
513
513
514 @reraise_safe_exceptions
514 @reraise_safe_exceptions
515 def fctx_node_data(self, wire, revision, path):
515 def fctx_node_data(self, wire, revision, path):
516 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
517 ctx = self._get_ctx(repo, revision)
517 ctx = self._get_ctx(repo, revision)
518 fctx = ctx.filectx(path)
518 fctx = ctx.filectx(path)
519 return fctx.data()
519 return fctx.data()
520
520
521 @reraise_safe_exceptions
521 @reraise_safe_exceptions
522 def fctx_flags(self, wire, commit_id, path):
522 def fctx_flags(self, wire, commit_id, path):
523 cache_on, context_uid, repo_id = self._cache_on(wire)
523 cache_on, context_uid, repo_id = self._cache_on(wire)
524 @self.region.conditional_cache_on_arguments(condition=cache_on)
524 @self.region.conditional_cache_on_arguments(condition=cache_on)
525 def _fctx_flags(_repo_id, _commit_id, _path):
525 def _fctx_flags(_repo_id, _commit_id, _path):
526 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
527 ctx = self._get_ctx(repo, commit_id)
527 ctx = self._get_ctx(repo, commit_id)
528 fctx = ctx.filectx(path)
528 fctx = ctx.filectx(path)
529 return fctx.flags()
529 return fctx.flags()
530
530
531 return _fctx_flags(repo_id, commit_id, path)
531 return _fctx_flags(repo_id, commit_id, path)
532
532
533 @reraise_safe_exceptions
533 @reraise_safe_exceptions
534 def fctx_size(self, wire, commit_id, path):
534 def fctx_size(self, wire, commit_id, path):
535 cache_on, context_uid, repo_id = self._cache_on(wire)
535 cache_on, context_uid, repo_id = self._cache_on(wire)
536 @self.region.conditional_cache_on_arguments(condition=cache_on)
536 @self.region.conditional_cache_on_arguments(condition=cache_on)
537 def _fctx_size(_repo_id, _revision, _path):
537 def _fctx_size(_repo_id, _revision, _path):
538 repo = self._factory.repo(wire)
538 repo = self._factory.repo(wire)
539 ctx = self._get_ctx(repo, commit_id)
539 ctx = self._get_ctx(repo, commit_id)
540 fctx = ctx.filectx(path)
540 fctx = ctx.filectx(path)
541 return fctx.size()
541 return fctx.size()
542 return _fctx_size(repo_id, commit_id, path)
542 return _fctx_size(repo_id, commit_id, path)
543
543
544 @reraise_safe_exceptions
544 @reraise_safe_exceptions
545 def get_all_commit_ids(self, wire, name):
545 def get_all_commit_ids(self, wire, name):
546 cache_on, context_uid, repo_id = self._cache_on(wire)
546 cache_on, context_uid, repo_id = self._cache_on(wire)
547 @self.region.conditional_cache_on_arguments(condition=cache_on)
547 @self.region.conditional_cache_on_arguments(condition=cache_on)
548 def _get_all_commit_ids(_context_uid, _repo_id, _name):
548 def _get_all_commit_ids(_context_uid, _repo_id, _name):
549 repo = self._factory.repo(wire)
549 repo = self._factory.repo(wire)
550 repo = repo.filtered(name)
550 repo = repo.filtered(name)
551 revs = map(lambda x: hex(x[7]), repo.changelog.index)
551 revs = map(lambda x: hex(x[7]), repo.changelog.index)
552 return revs
552 return revs
553 return _get_all_commit_ids(context_uid, repo_id, name)
553 return _get_all_commit_ids(context_uid, repo_id, name)
554
554
555 @reraise_safe_exceptions
555 @reraise_safe_exceptions
556 def get_config_value(self, wire, section, name, untrusted=False):
556 def get_config_value(self, wire, section, name, untrusted=False):
557 repo = self._factory.repo(wire)
557 repo = self._factory.repo(wire)
558 return repo.ui.config(section, name, untrusted=untrusted)
558 return repo.ui.config(section, name, untrusted=untrusted)
559
559
560 @reraise_safe_exceptions
560 @reraise_safe_exceptions
561 def is_large_file(self, wire, path):
561 def is_large_file(self, wire, path):
562 cache_on, context_uid, repo_id = self._cache_on(wire)
562 cache_on, context_uid, repo_id = self._cache_on(wire)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
563 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 def _is_large_file(_context_uid, _repo_id, _path):
564 def _is_large_file(_context_uid, _repo_id, _path):
565 return largefiles.lfutil.isstandin(path)
565 return largefiles.lfutil.isstandin(path)
566
566
567 return _is_large_file(context_uid, repo_id, path)
567 return _is_large_file(context_uid, repo_id, path)
568
568
569 @reraise_safe_exceptions
569 @reraise_safe_exceptions
570 def is_binary(self, wire, revision, path):
571 cache_on, context_uid, repo_id = self._cache_on(wire)
572
573 @self.region.conditional_cache_on_arguments(condition=cache_on)
574 def _is_binary(_repo_id, _sha, _path):
575 repo = self._factory.repo(wire)
576 ctx = self._get_ctx(repo, revision)
577 fctx = ctx.filectx(path)
578 return fctx.isbinary()
579
580 return _is_binary(repo_id, revision, path)
581
582 @reraise_safe_exceptions
570 def in_largefiles_store(self, wire, sha):
583 def in_largefiles_store(self, wire, sha):
571 repo = self._factory.repo(wire)
584 repo = self._factory.repo(wire)
572 return largefiles.lfutil.instore(repo, sha)
585 return largefiles.lfutil.instore(repo, sha)
573
586
574 @reraise_safe_exceptions
587 @reraise_safe_exceptions
575 def in_user_cache(self, wire, sha):
588 def in_user_cache(self, wire, sha):
576 repo = self._factory.repo(wire)
589 repo = self._factory.repo(wire)
577 return largefiles.lfutil.inusercache(repo.ui, sha)
590 return largefiles.lfutil.inusercache(repo.ui, sha)
578
591
579 @reraise_safe_exceptions
592 @reraise_safe_exceptions
580 def store_path(self, wire, sha):
593 def store_path(self, wire, sha):
581 repo = self._factory.repo(wire)
594 repo = self._factory.repo(wire)
582 return largefiles.lfutil.storepath(repo, sha)
595 return largefiles.lfutil.storepath(repo, sha)
583
596
584 @reraise_safe_exceptions
597 @reraise_safe_exceptions
585 def link(self, wire, sha, path):
598 def link(self, wire, sha, path):
586 repo = self._factory.repo(wire)
599 repo = self._factory.repo(wire)
587 largefiles.lfutil.link(
600 largefiles.lfutil.link(
588 largefiles.lfutil.usercachepath(repo.ui, sha), path)
601 largefiles.lfutil.usercachepath(repo.ui, sha), path)
589
602
590 @reraise_safe_exceptions
603 @reraise_safe_exceptions
591 def localrepository(self, wire, create=False):
604 def localrepository(self, wire, create=False):
592 self._factory.repo(wire, create=create)
605 self._factory.repo(wire, create=create)
593
606
594 @reraise_safe_exceptions
607 @reraise_safe_exceptions
595 def lookup(self, wire, revision, both):
608 def lookup(self, wire, revision, both):
596 cache_on, context_uid, repo_id = self._cache_on(wire)
609 cache_on, context_uid, repo_id = self._cache_on(wire)
597 @self.region.conditional_cache_on_arguments(condition=cache_on)
610 @self.region.conditional_cache_on_arguments(condition=cache_on)
598 def _lookup(_context_uid, _repo_id, _revision, _both):
611 def _lookup(_context_uid, _repo_id, _revision, _both):
599
612
600 repo = self._factory.repo(wire)
613 repo = self._factory.repo(wire)
601 rev = _revision
614 rev = _revision
602 if isinstance(rev, int):
615 if isinstance(rev, int):
603 # NOTE(marcink):
616 # NOTE(marcink):
604 # since Mercurial doesn't support negative indexes properly
617 # since Mercurial doesn't support negative indexes properly
605 # we need to shift accordingly by one to get proper index, e.g
618 # we need to shift accordingly by one to get proper index, e.g
606 # repo[-1] => repo[-2]
619 # repo[-1] => repo[-2]
607 # repo[0] => repo[-1]
620 # repo[0] => repo[-1]
608 if rev <= 0:
621 if rev <= 0:
609 rev = rev + -1
622 rev = rev + -1
610 try:
623 try:
611 ctx = self._get_ctx(repo, rev)
624 ctx = self._get_ctx(repo, rev)
612 except (TypeError, RepoLookupError) as e:
625 except (TypeError, RepoLookupError) as e:
613 e._org_exc_tb = traceback.format_exc()
626 e._org_exc_tb = traceback.format_exc()
614 raise exceptions.LookupException(e)(rev)
627 raise exceptions.LookupException(e)(rev)
615 except LookupError as e:
628 except LookupError as e:
616 e._org_exc_tb = traceback.format_exc()
629 e._org_exc_tb = traceback.format_exc()
617 raise exceptions.LookupException(e)(e.name)
630 raise exceptions.LookupException(e)(e.name)
618
631
619 if not both:
632 if not both:
620 return ctx.hex()
633 return ctx.hex()
621
634
622 ctx = repo[ctx.hex()]
635 ctx = repo[ctx.hex()]
623 return ctx.hex(), ctx.rev()
636 return ctx.hex(), ctx.rev()
624
637
625 return _lookup(context_uid, repo_id, revision, both)
638 return _lookup(context_uid, repo_id, revision, both)
626
639
627 @reraise_safe_exceptions
640 @reraise_safe_exceptions
628 def sync_push(self, wire, url):
641 def sync_push(self, wire, url):
629 if not self.check_url(url, wire['config']):
642 if not self.check_url(url, wire['config']):
630 return
643 return
631
644
632 repo = self._factory.repo(wire)
645 repo = self._factory.repo(wire)
633
646
634 # Disable any prompts for this repo
647 # Disable any prompts for this repo
635 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
648 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
636
649
637 bookmarks = dict(repo._bookmarks).keys()
650 bookmarks = dict(repo._bookmarks).keys()
638 remote = peer(repo, {}, url)
651 remote = peer(repo, {}, url)
639 # Disable any prompts for this remote
652 # Disable any prompts for this remote
640 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
653 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
641
654
642 return exchange.push(
655 return exchange.push(
643 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
656 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
644
657
645 @reraise_safe_exceptions
658 @reraise_safe_exceptions
646 def revision(self, wire, rev):
659 def revision(self, wire, rev):
647 repo = self._factory.repo(wire)
660 repo = self._factory.repo(wire)
648 ctx = self._get_ctx(repo, rev)
661 ctx = self._get_ctx(repo, rev)
649 return ctx.rev()
662 return ctx.rev()
650
663
651 @reraise_safe_exceptions
664 @reraise_safe_exceptions
652 def rev_range(self, wire, commit_filter):
665 def rev_range(self, wire, commit_filter):
653 cache_on, context_uid, repo_id = self._cache_on(wire)
666 cache_on, context_uid, repo_id = self._cache_on(wire)
654
667
655 @self.region.conditional_cache_on_arguments(condition=cache_on)
668 @self.region.conditional_cache_on_arguments(condition=cache_on)
656 def _rev_range(_context_uid, _repo_id, _filter):
669 def _rev_range(_context_uid, _repo_id, _filter):
657 repo = self._factory.repo(wire)
670 repo = self._factory.repo(wire)
658 revisions = [rev for rev in revrange(repo, commit_filter)]
671 revisions = [rev for rev in revrange(repo, commit_filter)]
659 return revisions
672 return revisions
660
673
661 return _rev_range(context_uid, repo_id, sorted(commit_filter))
674 return _rev_range(context_uid, repo_id, sorted(commit_filter))
662
675
663 @reraise_safe_exceptions
676 @reraise_safe_exceptions
664 def rev_range_hash(self, wire, node):
677 def rev_range_hash(self, wire, node):
665 repo = self._factory.repo(wire)
678 repo = self._factory.repo(wire)
666
679
667 def get_revs(repo, rev_opt):
680 def get_revs(repo, rev_opt):
668 if rev_opt:
681 if rev_opt:
669 revs = revrange(repo, rev_opt)
682 revs = revrange(repo, rev_opt)
670 if len(revs) == 0:
683 if len(revs) == 0:
671 return (nullrev, nullrev)
684 return (nullrev, nullrev)
672 return max(revs), min(revs)
685 return max(revs), min(revs)
673 else:
686 else:
674 return len(repo) - 1, 0
687 return len(repo) - 1, 0
675
688
676 stop, start = get_revs(repo, [node + ':'])
689 stop, start = get_revs(repo, [node + ':'])
677 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
690 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
678 return revs
691 return revs
679
692
680 @reraise_safe_exceptions
693 @reraise_safe_exceptions
681 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
694 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
682 other_path = kwargs.pop('other_path', None)
695 other_path = kwargs.pop('other_path', None)
683
696
684 # case when we want to compare two independent repositories
697 # case when we want to compare two independent repositories
685 if other_path and other_path != wire["path"]:
698 if other_path and other_path != wire["path"]:
686 baseui = self._factory._create_config(wire["config"])
699 baseui = self._factory._create_config(wire["config"])
687 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
700 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
688 else:
701 else:
689 repo = self._factory.repo(wire)
702 repo = self._factory.repo(wire)
690 return list(repo.revs(rev_spec, *args))
703 return list(repo.revs(rev_spec, *args))
691
704
692 @reraise_safe_exceptions
705 @reraise_safe_exceptions
693 def verify(self, wire,):
706 def verify(self, wire,):
694 repo = self._factory.repo(wire)
707 repo = self._factory.repo(wire)
695 baseui = self._factory._create_config(wire['config'])
708 baseui = self._factory._create_config(wire['config'])
696 baseui.setconfig('ui', 'quiet', 'false')
709 baseui.setconfig('ui', 'quiet', 'false')
697 output = io.BytesIO()
710 output = io.BytesIO()
698
711
699 def write(data, **unused_kwargs):
712 def write(data, **unused_kwargs):
700 output.write(data)
713 output.write(data)
701 baseui.write = write
714 baseui.write = write
702
715
703 repo.ui = baseui
716 repo.ui = baseui
704 verify.verify(repo)
717 verify.verify(repo)
705 return output.getvalue()
718 return output.getvalue()
706
719
707 @reraise_safe_exceptions
720 @reraise_safe_exceptions
708 def tags(self, wire):
721 def tags(self, wire):
709 cache_on, context_uid, repo_id = self._cache_on(wire)
722 cache_on, context_uid, repo_id = self._cache_on(wire)
710 @self.region.conditional_cache_on_arguments(condition=cache_on)
723 @self.region.conditional_cache_on_arguments(condition=cache_on)
711 def _tags(_context_uid, _repo_id):
724 def _tags(_context_uid, _repo_id):
712 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
713 return repo.tags()
726 return repo.tags()
714
727
715 return _tags(context_uid, repo_id)
728 return _tags(context_uid, repo_id)
716
729
717 @reraise_safe_exceptions
730 @reraise_safe_exceptions
718 def update(self, wire, node=None, clean=False):
731 def update(self, wire, node=None, clean=False):
719 repo = self._factory.repo(wire)
732 repo = self._factory.repo(wire)
720 baseui = self._factory._create_config(wire['config'])
733 baseui = self._factory._create_config(wire['config'])
721 commands.update(baseui, repo, node=node, clean=clean)
734 commands.update(baseui, repo, node=node, clean=clean)
722
735
723 @reraise_safe_exceptions
736 @reraise_safe_exceptions
724 def identify(self, wire):
737 def identify(self, wire):
725 repo = self._factory.repo(wire)
738 repo = self._factory.repo(wire)
726 baseui = self._factory._create_config(wire['config'])
739 baseui = self._factory._create_config(wire['config'])
727 output = io.BytesIO()
740 output = io.BytesIO()
728 baseui.write = output.write
741 baseui.write = output.write
729 # This is required to get a full node id
742 # This is required to get a full node id
730 baseui.debugflag = True
743 baseui.debugflag = True
731 commands.identify(baseui, repo, id=True)
744 commands.identify(baseui, repo, id=True)
732
745
733 return output.getvalue()
746 return output.getvalue()
734
747
735 @reraise_safe_exceptions
748 @reraise_safe_exceptions
736 def heads(self, wire, branch=None):
749 def heads(self, wire, branch=None):
737 repo = self._factory.repo(wire)
750 repo = self._factory.repo(wire)
738 baseui = self._factory._create_config(wire['config'])
751 baseui = self._factory._create_config(wire['config'])
739 output = io.BytesIO()
752 output = io.BytesIO()
740
753
741 def write(data, **unused_kwargs):
754 def write(data, **unused_kwargs):
742 output.write(data)
755 output.write(data)
743
756
744 baseui.write = write
757 baseui.write = write
745 if branch:
758 if branch:
746 args = [branch]
759 args = [branch]
747 else:
760 else:
748 args = []
761 args = []
749 commands.heads(baseui, repo, template='{node} ', *args)
762 commands.heads(baseui, repo, template='{node} ', *args)
750
763
751 return output.getvalue()
764 return output.getvalue()
752
765
753 @reraise_safe_exceptions
766 @reraise_safe_exceptions
754 def ancestor(self, wire, revision1, revision2):
767 def ancestor(self, wire, revision1, revision2):
755 repo = self._factory.repo(wire)
768 repo = self._factory.repo(wire)
756 changelog = repo.changelog
769 changelog = repo.changelog
757 lookup = repo.lookup
770 lookup = repo.lookup
758 a = changelog.ancestor(lookup(revision1), lookup(revision2))
771 a = changelog.ancestor(lookup(revision1), lookup(revision2))
759 return hex(a)
772 return hex(a)
760
773
761 @reraise_safe_exceptions
774 @reraise_safe_exceptions
762 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
775 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
763 baseui = self._factory._create_config(wire["config"], hooks=hooks)
776 baseui = self._factory._create_config(wire["config"], hooks=hooks)
764 clone(baseui, source, dest, noupdate=not update_after_clone)
777 clone(baseui, source, dest, noupdate=not update_after_clone)
765
778
766 @reraise_safe_exceptions
779 @reraise_safe_exceptions
767 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
780 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
768
781
769 repo = self._factory.repo(wire)
782 repo = self._factory.repo(wire)
770 baseui = self._factory._create_config(wire['config'])
783 baseui = self._factory._create_config(wire['config'])
771 publishing = baseui.configbool('phases', 'publish')
784 publishing = baseui.configbool('phases', 'publish')
772 if publishing:
785 if publishing:
773 new_commit = 'public'
786 new_commit = 'public'
774 else:
787 else:
775 new_commit = 'draft'
788 new_commit = 'draft'
776
789
777 def _filectxfn(_repo, ctx, path):
790 def _filectxfn(_repo, ctx, path):
778 """
791 """
779 Marks given path as added/changed/removed in a given _repo. This is
792 Marks given path as added/changed/removed in a given _repo. This is
780 for internal mercurial commit function.
793 for internal mercurial commit function.
781 """
794 """
782
795
783 # check if this path is removed
796 # check if this path is removed
784 if path in removed:
797 if path in removed:
785 # returning None is a way to mark node for removal
798 # returning None is a way to mark node for removal
786 return None
799 return None
787
800
788 # check if this path is added
801 # check if this path is added
789 for node in updated:
802 for node in updated:
790 if node['path'] == path:
803 if node['path'] == path:
791 return memfilectx(
804 return memfilectx(
792 _repo,
805 _repo,
793 changectx=ctx,
806 changectx=ctx,
794 path=node['path'],
807 path=node['path'],
795 data=node['content'],
808 data=node['content'],
796 islink=False,
809 islink=False,
797 isexec=bool(node['mode'] & stat.S_IXUSR),
810 isexec=bool(node['mode'] & stat.S_IXUSR),
798 copysource=False)
811 copysource=False)
799
812
800 raise exceptions.AbortException()(
813 raise exceptions.AbortException()(
801 "Given path haven't been marked as added, "
814 "Given path haven't been marked as added, "
802 "changed or removed (%s)" % path)
815 "changed or removed (%s)" % path)
803
816
804 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
817 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
805
818
806 commit_ctx = memctx(
819 commit_ctx = memctx(
807 repo=repo,
820 repo=repo,
808 parents=parents,
821 parents=parents,
809 text=message,
822 text=message,
810 files=files,
823 files=files,
811 filectxfn=_filectxfn,
824 filectxfn=_filectxfn,
812 user=user,
825 user=user,
813 date=(commit_time, commit_timezone),
826 date=(commit_time, commit_timezone),
814 extra=extra)
827 extra=extra)
815
828
816 n = repo.commitctx(commit_ctx)
829 n = repo.commitctx(commit_ctx)
817 new_id = hex(n)
830 new_id = hex(n)
818
831
819 return new_id
832 return new_id
820
833
821 @reraise_safe_exceptions
834 @reraise_safe_exceptions
822 def pull(self, wire, url, commit_ids=None):
835 def pull(self, wire, url, commit_ids=None):
823 repo = self._factory.repo(wire)
836 repo = self._factory.repo(wire)
824 # Disable any prompts for this repo
837 # Disable any prompts for this repo
825 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
838 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
826
839
827 remote = peer(repo, {}, url)
840 remote = peer(repo, {}, url)
828 # Disable any prompts for this remote
841 # Disable any prompts for this remote
829 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
842 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
830
843
831 if commit_ids:
844 if commit_ids:
832 commit_ids = [bin(commit_id) for commit_id in commit_ids]
845 commit_ids = [bin(commit_id) for commit_id in commit_ids]
833
846
834 return exchange.pull(
847 return exchange.pull(
835 repo, remote, heads=commit_ids, force=None).cgresult
848 repo, remote, heads=commit_ids, force=None).cgresult
836
849
837 @reraise_safe_exceptions
850 @reraise_safe_exceptions
838 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
851 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
839 repo = self._factory.repo(wire)
852 repo = self._factory.repo(wire)
840 baseui = self._factory._create_config(wire['config'], hooks=hooks)
853 baseui = self._factory._create_config(wire['config'], hooks=hooks)
841
854
842 # Mercurial internally has a lot of logic that checks ONLY if
855 # Mercurial internally has a lot of logic that checks ONLY if
843 # option is defined, we just pass those if they are defined then
856 # option is defined, we just pass those if they are defined then
844 opts = {}
857 opts = {}
845 if bookmark:
858 if bookmark:
846 opts['bookmark'] = bookmark
859 opts['bookmark'] = bookmark
847 if branch:
860 if branch:
848 opts['branch'] = branch
861 opts['branch'] = branch
849 if revision:
862 if revision:
850 opts['rev'] = revision
863 opts['rev'] = revision
851
864
852 commands.pull(baseui, repo, source, **opts)
865 commands.pull(baseui, repo, source, **opts)
853
866
854 @reraise_safe_exceptions
867 @reraise_safe_exceptions
855 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
868 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
856 repo = self._factory.repo(wire)
869 repo = self._factory.repo(wire)
857 baseui = self._factory._create_config(wire['config'], hooks=hooks)
870 baseui = self._factory._create_config(wire['config'], hooks=hooks)
858 commands.push(baseui, repo, dest=dest_path, rev=revisions,
871 commands.push(baseui, repo, dest=dest_path, rev=revisions,
859 new_branch=push_branches)
872 new_branch=push_branches)
860
873
861 @reraise_safe_exceptions
874 @reraise_safe_exceptions
862 def strip(self, wire, revision, update, backup):
875 def strip(self, wire, revision, update, backup):
863 repo = self._factory.repo(wire)
876 repo = self._factory.repo(wire)
864 ctx = self._get_ctx(repo, revision)
877 ctx = self._get_ctx(repo, revision)
865 hgext_strip(
878 hgext_strip(
866 repo.baseui, repo, ctx.node(), update=update, backup=backup)
879 repo.baseui, repo, ctx.node(), update=update, backup=backup)
867
880
868 @reraise_safe_exceptions
881 @reraise_safe_exceptions
869 def merge(self, wire, revision):
882 def merge(self, wire, revision):
870 repo = self._factory.repo(wire)
883 repo = self._factory.repo(wire)
871 baseui = self._factory._create_config(wire['config'])
884 baseui = self._factory._create_config(wire['config'])
872 repo.ui.setconfig('ui', 'merge', 'internal:dump')
885 repo.ui.setconfig('ui', 'merge', 'internal:dump')
873
886
874 # In case of sub repositories are used mercurial prompts the user in
887 # In case of sub repositories are used mercurial prompts the user in
875 # case of merge conflicts or different sub repository sources. By
888 # case of merge conflicts or different sub repository sources. By
876 # setting the interactive flag to `False` mercurial doesn't prompt the
889 # setting the interactive flag to `False` mercurial doesn't prompt the
877 # used but instead uses a default value.
890 # used but instead uses a default value.
878 repo.ui.setconfig('ui', 'interactive', False)
891 repo.ui.setconfig('ui', 'interactive', False)
879 commands.merge(baseui, repo, rev=revision)
892 commands.merge(baseui, repo, rev=revision)
880
893
881 @reraise_safe_exceptions
894 @reraise_safe_exceptions
882 def merge_state(self, wire):
895 def merge_state(self, wire):
883 repo = self._factory.repo(wire)
896 repo = self._factory.repo(wire)
884 repo.ui.setconfig('ui', 'merge', 'internal:dump')
897 repo.ui.setconfig('ui', 'merge', 'internal:dump')
885
898
886 # In case of sub repositories are used mercurial prompts the user in
899 # In case of sub repositories are used mercurial prompts the user in
887 # case of merge conflicts or different sub repository sources. By
900 # case of merge conflicts or different sub repository sources. By
888 # setting the interactive flag to `False` mercurial doesn't prompt the
901 # setting the interactive flag to `False` mercurial doesn't prompt the
889 # used but instead uses a default value.
902 # used but instead uses a default value.
890 repo.ui.setconfig('ui', 'interactive', False)
903 repo.ui.setconfig('ui', 'interactive', False)
891 ms = hg_merge.mergestate(repo)
904 ms = hg_merge.mergestate(repo)
892 return [x for x in ms.unresolved()]
905 return [x for x in ms.unresolved()]
893
906
894 @reraise_safe_exceptions
907 @reraise_safe_exceptions
895 def commit(self, wire, message, username, close_branch=False):
908 def commit(self, wire, message, username, close_branch=False):
896 repo = self._factory.repo(wire)
909 repo = self._factory.repo(wire)
897 baseui = self._factory._create_config(wire['config'])
910 baseui = self._factory._create_config(wire['config'])
898 repo.ui.setconfig('ui', 'username', username)
911 repo.ui.setconfig('ui', 'username', username)
899 commands.commit(baseui, repo, message=message, close_branch=close_branch)
912 commands.commit(baseui, repo, message=message, close_branch=close_branch)
900
913
901 @reraise_safe_exceptions
914 @reraise_safe_exceptions
902 def rebase(self, wire, source=None, dest=None, abort=False):
915 def rebase(self, wire, source=None, dest=None, abort=False):
903 repo = self._factory.repo(wire)
916 repo = self._factory.repo(wire)
904 baseui = self._factory._create_config(wire['config'])
917 baseui = self._factory._create_config(wire['config'])
905 repo.ui.setconfig('ui', 'merge', 'internal:dump')
918 repo.ui.setconfig('ui', 'merge', 'internal:dump')
906 rebase.rebase(
919 rebase.rebase(
907 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
920 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
908
921
909 @reraise_safe_exceptions
922 @reraise_safe_exceptions
910 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
923 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
911 repo = self._factory.repo(wire)
924 repo = self._factory.repo(wire)
912 ctx = self._get_ctx(repo, revision)
925 ctx = self._get_ctx(repo, revision)
913 node = ctx.node()
926 node = ctx.node()
914
927
915 date = (tag_time, tag_timezone)
928 date = (tag_time, tag_timezone)
916 try:
929 try:
917 hg_tag.tag(repo, name, node, message, local, user, date)
930 hg_tag.tag(repo, name, node, message, local, user, date)
918 except Abort as e:
931 except Abort as e:
919 log.exception("Tag operation aborted")
932 log.exception("Tag operation aborted")
920 # Exception can contain unicode which we convert
933 # Exception can contain unicode which we convert
921 raise exceptions.AbortException(e)(repr(e))
934 raise exceptions.AbortException(e)(repr(e))
922
935
923 @reraise_safe_exceptions
936 @reraise_safe_exceptions
924 def bookmark(self, wire, bookmark, revision=None):
937 def bookmark(self, wire, bookmark, revision=None):
925 repo = self._factory.repo(wire)
938 repo = self._factory.repo(wire)
926 baseui = self._factory._create_config(wire['config'])
939 baseui = self._factory._create_config(wire['config'])
927 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
940 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
928
941
929 @reraise_safe_exceptions
942 @reraise_safe_exceptions
930 def install_hooks(self, wire, force=False):
943 def install_hooks(self, wire, force=False):
931 # we don't need any special hooks for Mercurial
944 # we don't need any special hooks for Mercurial
932 pass
945 pass
933
946
934 @reraise_safe_exceptions
947 @reraise_safe_exceptions
935 def get_hooks_info(self, wire):
948 def get_hooks_info(self, wire):
936 return {
949 return {
937 'pre_version': vcsserver.__version__,
950 'pre_version': vcsserver.__version__,
938 'post_version': vcsserver.__version__,
951 'post_version': vcsserver.__version__,
939 }
952 }
@@ -1,788 +1,799 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 from urllib2 import URLError
22 from urllib2 import URLError
23 import urlparse
23 import urlparse
24 import logging
24 import logging
25 import posixpath as vcspath
25 import posixpath as vcspath
26 import StringIO
26 import StringIO
27 import urllib
27 import urllib
28 import traceback
28 import traceback
29
29
30 import svn.client
30 import svn.client
31 import svn.core
31 import svn.core
32 import svn.delta
32 import svn.delta
33 import svn.diff
33 import svn.diff
34 import svn.fs
34 import svn.fs
35 import svn.repos
35 import svn.repos
36
36
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
38 from vcsserver.base import RepoFactory, raise_from_original
39 from vcsserver.vcs_base import RemoteBase
39 from vcsserver.vcs_base import RemoteBase
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 # Set of svn compatible version flags.
44 # Set of svn compatible version flags.
45 # Compare with subversion/svnadmin/svnadmin.c
45 # Compare with subversion/svnadmin/svnadmin.c
46 svn_compatible_versions = {
46 svn_compatible_versions = {
47 'pre-1.4-compatible',
47 'pre-1.4-compatible',
48 'pre-1.5-compatible',
48 'pre-1.5-compatible',
49 'pre-1.6-compatible',
49 'pre-1.6-compatible',
50 'pre-1.8-compatible',
50 'pre-1.8-compatible',
51 'pre-1.9-compatible'
51 'pre-1.9-compatible'
52 }
52 }
53
53
54 svn_compatible_versions_map = {
54 svn_compatible_versions_map = {
55 'pre-1.4-compatible': '1.3',
55 'pre-1.4-compatible': '1.3',
56 'pre-1.5-compatible': '1.4',
56 'pre-1.5-compatible': '1.4',
57 'pre-1.6-compatible': '1.5',
57 'pre-1.6-compatible': '1.5',
58 'pre-1.8-compatible': '1.7',
58 'pre-1.8-compatible': '1.7',
59 'pre-1.9-compatible': '1.8',
59 'pre-1.9-compatible': '1.8',
60 }
60 }
61
61
62
62
63 def reraise_safe_exceptions(func):
63 def reraise_safe_exceptions(func):
64 """Decorator for converting svn exceptions to something neutral."""
64 """Decorator for converting svn exceptions to something neutral."""
65 def wrapper(*args, **kwargs):
65 def wrapper(*args, **kwargs):
66 try:
66 try:
67 return func(*args, **kwargs)
67 return func(*args, **kwargs)
68 except Exception as e:
68 except Exception as e:
69 if not hasattr(e, '_vcs_kind'):
69 if not hasattr(e, '_vcs_kind'):
70 log.exception("Unhandled exception in svn remote call")
70 log.exception("Unhandled exception in svn remote call")
71 raise_from_original(exceptions.UnhandledException(e))
71 raise_from_original(exceptions.UnhandledException(e))
72 raise
72 raise
73 return wrapper
73 return wrapper
74
74
75
75
76 class SubversionFactory(RepoFactory):
76 class SubversionFactory(RepoFactory):
77 repo_type = 'svn'
77 repo_type = 'svn'
78
78
79 def _create_repo(self, wire, create, compatible_version):
79 def _create_repo(self, wire, create, compatible_version):
80 path = svn.core.svn_path_canonicalize(wire['path'])
80 path = svn.core.svn_path_canonicalize(wire['path'])
81 if create:
81 if create:
82 fs_config = {'compatible-version': '1.9'}
82 fs_config = {'compatible-version': '1.9'}
83 if compatible_version:
83 if compatible_version:
84 if compatible_version not in svn_compatible_versions:
84 if compatible_version not in svn_compatible_versions:
85 raise Exception('Unknown SVN compatible version "{}"'
85 raise Exception('Unknown SVN compatible version "{}"'
86 .format(compatible_version))
86 .format(compatible_version))
87 fs_config['compatible-version'] = \
87 fs_config['compatible-version'] = \
88 svn_compatible_versions_map[compatible_version]
88 svn_compatible_versions_map[compatible_version]
89
89
90 log.debug('Create SVN repo with config "%s"', fs_config)
90 log.debug('Create SVN repo with config "%s"', fs_config)
91 repo = svn.repos.create(path, "", "", None, fs_config)
91 repo = svn.repos.create(path, "", "", None, fs_config)
92 else:
92 else:
93 repo = svn.repos.open(path)
93 repo = svn.repos.open(path)
94
94
95 log.debug('Got SVN object: %s', repo)
95 log.debug('Got SVN object: %s', repo)
96 return repo
96 return repo
97
97
98 def repo(self, wire, create=False, compatible_version=None):
98 def repo(self, wire, create=False, compatible_version=None):
99 """
99 """
100 Get a repository instance for the given path.
100 Get a repository instance for the given path.
101 """
101 """
102 return self._create_repo(wire, create, compatible_version)
102 return self._create_repo(wire, create, compatible_version)
103
103
104
104
105 NODE_TYPE_MAPPING = {
105 NODE_TYPE_MAPPING = {
106 svn.core.svn_node_file: 'file',
106 svn.core.svn_node_file: 'file',
107 svn.core.svn_node_dir: 'dir',
107 svn.core.svn_node_dir: 'dir',
108 }
108 }
109
109
110
110
111 class SvnRemote(RemoteBase):
111 class SvnRemote(RemoteBase):
112
112
113 def __init__(self, factory, hg_factory=None):
113 def __init__(self, factory, hg_factory=None):
114 self._factory = factory
114 self._factory = factory
115 # TODO: Remove once we do not use internal Mercurial objects anymore
115 # TODO: Remove once we do not use internal Mercurial objects anymore
116 # for subversion
116 # for subversion
117 self._hg_factory = hg_factory
117 self._hg_factory = hg_factory
118
118
119 @reraise_safe_exceptions
119 @reraise_safe_exceptions
120 def discover_svn_version(self):
120 def discover_svn_version(self):
121 try:
121 try:
122 import svn.core
122 import svn.core
123 svn_ver = svn.core.SVN_VERSION
123 svn_ver = svn.core.SVN_VERSION
124 except ImportError:
124 except ImportError:
125 svn_ver = None
125 svn_ver = None
126 return svn_ver
126 return svn_ver
127
127
128 @reraise_safe_exceptions
128 @reraise_safe_exceptions
129 def is_empty(self, wire):
129 def is_empty(self, wire):
130
130
131 try:
131 try:
132 return self.lookup(wire, -1) == 0
132 return self.lookup(wire, -1) == 0
133 except Exception:
133 except Exception:
134 log.exception("failed to read object_store")
134 log.exception("failed to read object_store")
135 return False
135 return False
136
136
137 def check_url(self, url, config_items):
137 def check_url(self, url, config_items):
138 # this can throw exception if not installed, but we detect this
138 # this can throw exception if not installed, but we detect this
139 from hgsubversion import svnrepo
139 from hgsubversion import svnrepo
140
140
141 baseui = self._hg_factory._create_config(config_items)
141 baseui = self._hg_factory._create_config(config_items)
142 # uuid function get's only valid UUID from proper repo, else
142 # uuid function get's only valid UUID from proper repo, else
143 # throws exception
143 # throws exception
144 try:
144 try:
145 svnrepo.svnremoterepo(baseui, url).svn.uuid
145 svnrepo.svnremoterepo(baseui, url).svn.uuid
146 except Exception:
146 except Exception:
147 tb = traceback.format_exc()
147 tb = traceback.format_exc()
148 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
148 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
149 raise URLError(
149 raise URLError(
150 '"%s" is not a valid Subversion source url.' % (url, ))
150 '"%s" is not a valid Subversion source url.' % (url, ))
151 return True
151 return True
152
152
153 def is_path_valid_repository(self, wire, path):
153 def is_path_valid_repository(self, wire, path):
154
154
155 # NOTE(marcink): short circuit the check for SVN repo
155 # NOTE(marcink): short circuit the check for SVN repo
156 # the repos.open might be expensive to check, but we have one cheap
156 # the repos.open might be expensive to check, but we have one cheap
157 # pre condition that we can use, to check for 'format' file
157 # pre condition that we can use, to check for 'format' file
158
158
159 if not os.path.isfile(os.path.join(path, 'format')):
159 if not os.path.isfile(os.path.join(path, 'format')):
160 return False
160 return False
161
161
162 try:
162 try:
163 svn.repos.open(path)
163 svn.repos.open(path)
164 except svn.core.SubversionException:
164 except svn.core.SubversionException:
165 tb = traceback.format_exc()
165 tb = traceback.format_exc()
166 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
166 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
167 return False
167 return False
168 return True
168 return True
169
169
170 @reraise_safe_exceptions
170 @reraise_safe_exceptions
171 def verify(self, wire,):
171 def verify(self, wire,):
172 repo_path = wire['path']
172 repo_path = wire['path']
173 if not self.is_path_valid_repository(wire, repo_path):
173 if not self.is_path_valid_repository(wire, repo_path):
174 raise Exception(
174 raise Exception(
175 "Path %s is not a valid Subversion repository." % repo_path)
175 "Path %s is not a valid Subversion repository." % repo_path)
176
176
177 cmd = ['svnadmin', 'info', repo_path]
177 cmd = ['svnadmin', 'info', repo_path]
178 stdout, stderr = subprocessio.run_command(cmd)
178 stdout, stderr = subprocessio.run_command(cmd)
179 return stdout
179 return stdout
180
180
181 def lookup(self, wire, revision):
181 def lookup(self, wire, revision):
182 if revision not in [-1, None, 'HEAD']:
182 if revision not in [-1, None, 'HEAD']:
183 raise NotImplementedError
183 raise NotImplementedError
184 repo = self._factory.repo(wire)
184 repo = self._factory.repo(wire)
185 fs_ptr = svn.repos.fs(repo)
185 fs_ptr = svn.repos.fs(repo)
186 head = svn.fs.youngest_rev(fs_ptr)
186 head = svn.fs.youngest_rev(fs_ptr)
187 return head
187 return head
188
188
189 def lookup_interval(self, wire, start_ts, end_ts):
189 def lookup_interval(self, wire, start_ts, end_ts):
190 repo = self._factory.repo(wire)
190 repo = self._factory.repo(wire)
191 fsobj = svn.repos.fs(repo)
191 fsobj = svn.repos.fs(repo)
192 start_rev = None
192 start_rev = None
193 end_rev = None
193 end_rev = None
194 if start_ts:
194 if start_ts:
195 start_ts_svn = apr_time_t(start_ts)
195 start_ts_svn = apr_time_t(start_ts)
196 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
196 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
197 else:
197 else:
198 start_rev = 1
198 start_rev = 1
199 if end_ts:
199 if end_ts:
200 end_ts_svn = apr_time_t(end_ts)
200 end_ts_svn = apr_time_t(end_ts)
201 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
201 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
202 else:
202 else:
203 end_rev = svn.fs.youngest_rev(fsobj)
203 end_rev = svn.fs.youngest_rev(fsobj)
204 return start_rev, end_rev
204 return start_rev, end_rev
205
205
206 def revision_properties(self, wire, revision):
206 def revision_properties(self, wire, revision):
207
207
208 cache_on, context_uid, repo_id = self._cache_on(wire)
208 cache_on, context_uid, repo_id = self._cache_on(wire)
209 @self.region.conditional_cache_on_arguments(condition=cache_on)
209 @self.region.conditional_cache_on_arguments(condition=cache_on)
210 def _revision_properties(_repo_id, _revision):
210 def _revision_properties(_repo_id, _revision):
211 repo = self._factory.repo(wire)
211 repo = self._factory.repo(wire)
212 fs_ptr = svn.repos.fs(repo)
212 fs_ptr = svn.repos.fs(repo)
213 return svn.fs.revision_proplist(fs_ptr, revision)
213 return svn.fs.revision_proplist(fs_ptr, revision)
214 return _revision_properties(repo_id, revision)
214 return _revision_properties(repo_id, revision)
215
215
216 def revision_changes(self, wire, revision):
216 def revision_changes(self, wire, revision):
217
217
218 repo = self._factory.repo(wire)
218 repo = self._factory.repo(wire)
219 fsobj = svn.repos.fs(repo)
219 fsobj = svn.repos.fs(repo)
220 rev_root = svn.fs.revision_root(fsobj, revision)
220 rev_root = svn.fs.revision_root(fsobj, revision)
221
221
222 editor = svn.repos.ChangeCollector(fsobj, rev_root)
222 editor = svn.repos.ChangeCollector(fsobj, rev_root)
223 editor_ptr, editor_baton = svn.delta.make_editor(editor)
223 editor_ptr, editor_baton = svn.delta.make_editor(editor)
224 base_dir = ""
224 base_dir = ""
225 send_deltas = False
225 send_deltas = False
226 svn.repos.replay2(
226 svn.repos.replay2(
227 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
227 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
228 editor_ptr, editor_baton, None)
228 editor_ptr, editor_baton, None)
229
229
230 added = []
230 added = []
231 changed = []
231 changed = []
232 removed = []
232 removed = []
233
233
234 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
234 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
235 for path, change in editor.changes.iteritems():
235 for path, change in editor.changes.iteritems():
236 # TODO: Decide what to do with directory nodes. Subversion can add
236 # TODO: Decide what to do with directory nodes. Subversion can add
237 # empty directories.
237 # empty directories.
238
238
239 if change.item_kind == svn.core.svn_node_dir:
239 if change.item_kind == svn.core.svn_node_dir:
240 continue
240 continue
241 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
241 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
242 added.append(path)
242 added.append(path)
243 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
243 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
244 svn.repos.CHANGE_ACTION_REPLACE]:
244 svn.repos.CHANGE_ACTION_REPLACE]:
245 changed.append(path)
245 changed.append(path)
246 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
246 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
247 removed.append(path)
247 removed.append(path)
248 else:
248 else:
249 raise NotImplementedError(
249 raise NotImplementedError(
250 "Action %s not supported on path %s" % (
250 "Action %s not supported on path %s" % (
251 change.action, path))
251 change.action, path))
252
252
253 changes = {
253 changes = {
254 'added': added,
254 'added': added,
255 'changed': changed,
255 'changed': changed,
256 'removed': removed,
256 'removed': removed,
257 }
257 }
258 return changes
258 return changes
259
259
260 @reraise_safe_exceptions
260 @reraise_safe_exceptions
261 def node_history(self, wire, path, revision, limit):
261 def node_history(self, wire, path, revision, limit):
262 cache_on, context_uid, repo_id = self._cache_on(wire)
262 cache_on, context_uid, repo_id = self._cache_on(wire)
263 @self.region.conditional_cache_on_arguments(condition=cache_on)
263 @self.region.conditional_cache_on_arguments(condition=cache_on)
264 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
264 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
265 cross_copies = False
265 cross_copies = False
266 repo = self._factory.repo(wire)
266 repo = self._factory.repo(wire)
267 fsobj = svn.repos.fs(repo)
267 fsobj = svn.repos.fs(repo)
268 rev_root = svn.fs.revision_root(fsobj, revision)
268 rev_root = svn.fs.revision_root(fsobj, revision)
269
269
270 history_revisions = []
270 history_revisions = []
271 history = svn.fs.node_history(rev_root, path)
271 history = svn.fs.node_history(rev_root, path)
272 history = svn.fs.history_prev(history, cross_copies)
272 history = svn.fs.history_prev(history, cross_copies)
273 while history:
273 while history:
274 __, node_revision = svn.fs.history_location(history)
274 __, node_revision = svn.fs.history_location(history)
275 history_revisions.append(node_revision)
275 history_revisions.append(node_revision)
276 if limit and len(history_revisions) >= limit:
276 if limit and len(history_revisions) >= limit:
277 break
277 break
278 history = svn.fs.history_prev(history, cross_copies)
278 history = svn.fs.history_prev(history, cross_copies)
279 return history_revisions
279 return history_revisions
280 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
280 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
281
281
282 def node_properties(self, wire, path, revision):
282 def node_properties(self, wire, path, revision):
283 cache_on, context_uid, repo_id = self._cache_on(wire)
283 cache_on, context_uid, repo_id = self._cache_on(wire)
284 @self.region.conditional_cache_on_arguments(condition=cache_on)
284 @self.region.conditional_cache_on_arguments(condition=cache_on)
285 def _node_properties(_repo_id, _path, _revision):
285 def _node_properties(_repo_id, _path, _revision):
286 repo = self._factory.repo(wire)
286 repo = self._factory.repo(wire)
287 fsobj = svn.repos.fs(repo)
287 fsobj = svn.repos.fs(repo)
288 rev_root = svn.fs.revision_root(fsobj, revision)
288 rev_root = svn.fs.revision_root(fsobj, revision)
289 return svn.fs.node_proplist(rev_root, path)
289 return svn.fs.node_proplist(rev_root, path)
290 return _node_properties(repo_id, path, revision)
290 return _node_properties(repo_id, path, revision)
291
291
292 def file_annotate(self, wire, path, revision):
292 def file_annotate(self, wire, path, revision):
293 abs_path = 'file://' + urllib.pathname2url(
293 abs_path = 'file://' + urllib.pathname2url(
294 vcspath.join(wire['path'], path))
294 vcspath.join(wire['path'], path))
295 file_uri = svn.core.svn_path_canonicalize(abs_path)
295 file_uri = svn.core.svn_path_canonicalize(abs_path)
296
296
297 start_rev = svn_opt_revision_value_t(0)
297 start_rev = svn_opt_revision_value_t(0)
298 peg_rev = svn_opt_revision_value_t(revision)
298 peg_rev = svn_opt_revision_value_t(revision)
299 end_rev = peg_rev
299 end_rev = peg_rev
300
300
301 annotations = []
301 annotations = []
302
302
303 def receiver(line_no, revision, author, date, line, pool):
303 def receiver(line_no, revision, author, date, line, pool):
304 annotations.append((line_no, revision, line))
304 annotations.append((line_no, revision, line))
305
305
306 # TODO: Cannot use blame5, missing typemap function in the swig code
306 # TODO: Cannot use blame5, missing typemap function in the swig code
307 try:
307 try:
308 svn.client.blame2(
308 svn.client.blame2(
309 file_uri, peg_rev, start_rev, end_rev,
309 file_uri, peg_rev, start_rev, end_rev,
310 receiver, svn.client.create_context())
310 receiver, svn.client.create_context())
311 except svn.core.SubversionException as exc:
311 except svn.core.SubversionException as exc:
312 log.exception("Error during blame operation.")
312 log.exception("Error during blame operation.")
313 raise Exception(
313 raise Exception(
314 "Blame not supported or file does not exist at path %s. "
314 "Blame not supported or file does not exist at path %s. "
315 "Error %s." % (path, exc))
315 "Error %s." % (path, exc))
316
316
317 return annotations
317 return annotations
318
318
319 def get_node_type(self, wire, path, revision=None):
319 def get_node_type(self, wire, path, revision=None):
320
320
321 cache_on, context_uid, repo_id = self._cache_on(wire)
321 cache_on, context_uid, repo_id = self._cache_on(wire)
322 @self.region.conditional_cache_on_arguments(condition=cache_on)
322 @self.region.conditional_cache_on_arguments(condition=cache_on)
323 def _get_node_type(_repo_id, _path, _revision):
323 def _get_node_type(_repo_id, _path, _revision):
324 repo = self._factory.repo(wire)
324 repo = self._factory.repo(wire)
325 fs_ptr = svn.repos.fs(repo)
325 fs_ptr = svn.repos.fs(repo)
326 if _revision is None:
326 if _revision is None:
327 _revision = svn.fs.youngest_rev(fs_ptr)
327 _revision = svn.fs.youngest_rev(fs_ptr)
328 root = svn.fs.revision_root(fs_ptr, _revision)
328 root = svn.fs.revision_root(fs_ptr, _revision)
329 node = svn.fs.check_path(root, path)
329 node = svn.fs.check_path(root, path)
330 return NODE_TYPE_MAPPING.get(node, None)
330 return NODE_TYPE_MAPPING.get(node, None)
331 return _get_node_type(repo_id, path, revision)
331 return _get_node_type(repo_id, path, revision)
332
332
333 def get_nodes(self, wire, path, revision=None):
333 def get_nodes(self, wire, path, revision=None):
334
334
335 cache_on, context_uid, repo_id = self._cache_on(wire)
335 cache_on, context_uid, repo_id = self._cache_on(wire)
336 @self.region.conditional_cache_on_arguments(condition=cache_on)
336 @self.region.conditional_cache_on_arguments(condition=cache_on)
337 def _get_nodes(_repo_id, _path, _revision):
337 def _get_nodes(_repo_id, _path, _revision):
338 repo = self._factory.repo(wire)
338 repo = self._factory.repo(wire)
339 fsobj = svn.repos.fs(repo)
339 fsobj = svn.repos.fs(repo)
340 if _revision is None:
340 if _revision is None:
341 _revision = svn.fs.youngest_rev(fsobj)
341 _revision = svn.fs.youngest_rev(fsobj)
342 root = svn.fs.revision_root(fsobj, _revision)
342 root = svn.fs.revision_root(fsobj, _revision)
343 entries = svn.fs.dir_entries(root, path)
343 entries = svn.fs.dir_entries(root, path)
344 result = []
344 result = []
345 for entry_path, entry_info in entries.iteritems():
345 for entry_path, entry_info in entries.iteritems():
346 result.append(
346 result.append(
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
348 return result
348 return result
349 return _get_nodes(repo_id, path, revision)
349 return _get_nodes(repo_id, path, revision)
350
350
351 def get_file_content(self, wire, path, rev=None):
351 def get_file_content(self, wire, path, rev=None):
352 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
353 fsobj = svn.repos.fs(repo)
353 fsobj = svn.repos.fs(repo)
354 if rev is None:
354 if rev is None:
355 rev = svn.fs.youngest_revision(fsobj)
355 rev = svn.fs.youngest_revision(fsobj)
356 root = svn.fs.revision_root(fsobj, rev)
356 root = svn.fs.revision_root(fsobj, rev)
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
358 return content.read()
358 return content.read()
359
359
360 def get_file_size(self, wire, path, revision=None):
360 def get_file_size(self, wire, path, revision=None):
361
361
362 cache_on, context_uid, repo_id = self._cache_on(wire)
362 cache_on, context_uid, repo_id = self._cache_on(wire)
363 @self.region.conditional_cache_on_arguments(condition=cache_on)
363 @self.region.conditional_cache_on_arguments(condition=cache_on)
364 def _get_file_size(_repo_id, _path, _revision):
364 def _get_file_size(_repo_id, _path, _revision):
365 repo = self._factory.repo(wire)
365 repo = self._factory.repo(wire)
366 fsobj = svn.repos.fs(repo)
366 fsobj = svn.repos.fs(repo)
367 if _revision is None:
367 if _revision is None:
368 _revision = svn.fs.youngest_revision(fsobj)
368 _revision = svn.fs.youngest_revision(fsobj)
369 root = svn.fs.revision_root(fsobj, _revision)
369 root = svn.fs.revision_root(fsobj, _revision)
370 size = svn.fs.file_length(root, path)
370 size = svn.fs.file_length(root, path)
371 return size
371 return size
372 return _get_file_size(repo_id, path, revision)
372 return _get_file_size(repo_id, path, revision)
373
373
374 def create_repository(self, wire, compatible_version=None):
374 def create_repository(self, wire, compatible_version=None):
375 log.info('Creating Subversion repository in path "%s"', wire['path'])
375 log.info('Creating Subversion repository in path "%s"', wire['path'])
376 self._factory.repo(wire, create=True,
376 self._factory.repo(wire, create=True,
377 compatible_version=compatible_version)
377 compatible_version=compatible_version)
378
378
379 def get_url_and_credentials(self, src_url):
379 def get_url_and_credentials(self, src_url):
380 obj = urlparse.urlparse(src_url)
380 obj = urlparse.urlparse(src_url)
381 username = obj.username or None
381 username = obj.username or None
382 password = obj.password or None
382 password = obj.password or None
383 return username, password, src_url
383 return username, password, src_url
384
384
385 def import_remote_repository(self, wire, src_url):
385 def import_remote_repository(self, wire, src_url):
386 repo_path = wire['path']
386 repo_path = wire['path']
387 if not self.is_path_valid_repository(wire, repo_path):
387 if not self.is_path_valid_repository(wire, repo_path):
388 raise Exception(
388 raise Exception(
389 "Path %s is not a valid Subversion repository." % repo_path)
389 "Path %s is not a valid Subversion repository." % repo_path)
390
390
391 username, password, src_url = self.get_url_and_credentials(src_url)
391 username, password, src_url = self.get_url_and_credentials(src_url)
392 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
392 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
393 '--trust-server-cert-failures=unknown-ca']
393 '--trust-server-cert-failures=unknown-ca']
394 if username and password:
394 if username and password:
395 rdump_cmd += ['--username', username, '--password', password]
395 rdump_cmd += ['--username', username, '--password', password]
396 rdump_cmd += [src_url]
396 rdump_cmd += [src_url]
397
397
398 rdump = subprocess.Popen(
398 rdump = subprocess.Popen(
399 rdump_cmd,
399 rdump_cmd,
400 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
400 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
401 load = subprocess.Popen(
401 load = subprocess.Popen(
402 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
402 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
403
403
404 # TODO: johbo: This can be a very long operation, might be better
404 # TODO: johbo: This can be a very long operation, might be better
405 # to track some kind of status and provide an api to check if the
405 # to track some kind of status and provide an api to check if the
406 # import is done.
406 # import is done.
407 rdump.wait()
407 rdump.wait()
408 load.wait()
408 load.wait()
409
409
410 log.debug('Return process ended with code: %s', rdump.returncode)
410 log.debug('Return process ended with code: %s', rdump.returncode)
411 if rdump.returncode != 0:
411 if rdump.returncode != 0:
412 errors = rdump.stderr.read()
412 errors = rdump.stderr.read()
413 log.error('svnrdump dump failed: statuscode %s: message: %s',
413 log.error('svnrdump dump failed: statuscode %s: message: %s',
414 rdump.returncode, errors)
414 rdump.returncode, errors)
415 reason = 'UNKNOWN'
415 reason = 'UNKNOWN'
416 if 'svnrdump: E230001:' in errors:
416 if 'svnrdump: E230001:' in errors:
417 reason = 'INVALID_CERTIFICATE'
417 reason = 'INVALID_CERTIFICATE'
418
418
419 if reason == 'UNKNOWN':
419 if reason == 'UNKNOWN':
420 reason = 'UNKNOWN:{}'.format(errors)
420 reason = 'UNKNOWN:{}'.format(errors)
421 raise Exception(
421 raise Exception(
422 'Failed to dump the remote repository from %s. Reason:%s' % (
422 'Failed to dump the remote repository from %s. Reason:%s' % (
423 src_url, reason))
423 src_url, reason))
424 if load.returncode != 0:
424 if load.returncode != 0:
425 raise Exception(
425 raise Exception(
426 'Failed to load the dump of remote repository from %s.' %
426 'Failed to load the dump of remote repository from %s.' %
427 (src_url, ))
427 (src_url, ))
428
428
429 def commit(self, wire, message, author, timestamp, updated, removed):
429 def commit(self, wire, message, author, timestamp, updated, removed):
430 assert isinstance(message, str)
430 assert isinstance(message, str)
431 assert isinstance(author, str)
431 assert isinstance(author, str)
432
432
433 repo = self._factory.repo(wire)
433 repo = self._factory.repo(wire)
434 fsobj = svn.repos.fs(repo)
434 fsobj = svn.repos.fs(repo)
435
435
436 rev = svn.fs.youngest_rev(fsobj)
436 rev = svn.fs.youngest_rev(fsobj)
437 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
437 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
438 txn_root = svn.fs.txn_root(txn)
438 txn_root = svn.fs.txn_root(txn)
439
439
440 for node in updated:
440 for node in updated:
441 TxnNodeProcessor(node, txn_root).update()
441 TxnNodeProcessor(node, txn_root).update()
442 for node in removed:
442 for node in removed:
443 TxnNodeProcessor(node, txn_root).remove()
443 TxnNodeProcessor(node, txn_root).remove()
444
444
445 commit_id = svn.repos.fs_commit_txn(repo, txn)
445 commit_id = svn.repos.fs_commit_txn(repo, txn)
446
446
447 if timestamp:
447 if timestamp:
448 apr_time = apr_time_t(timestamp)
448 apr_time = apr_time_t(timestamp)
449 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
449 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
450 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
450 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
451
451
452 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
452 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
453 return commit_id
453 return commit_id
454
454
455 def diff(self, wire, rev1, rev2, path1=None, path2=None,
455 def diff(self, wire, rev1, rev2, path1=None, path2=None,
456 ignore_whitespace=False, context=3):
456 ignore_whitespace=False, context=3):
457
457
458 wire.update(cache=False)
458 wire.update(cache=False)
459 repo = self._factory.repo(wire)
459 repo = self._factory.repo(wire)
460 diff_creator = SvnDiffer(
460 diff_creator = SvnDiffer(
461 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
461 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
462 try:
462 try:
463 return diff_creator.generate_diff()
463 return diff_creator.generate_diff()
464 except svn.core.SubversionException as e:
464 except svn.core.SubversionException as e:
465 log.exception(
465 log.exception(
466 "Error during diff operation operation. "
466 "Error during diff operation operation. "
467 "Path might not exist %s, %s" % (path1, path2))
467 "Path might not exist %s, %s" % (path1, path2))
468 return ""
468 return ""
469
469
470 @reraise_safe_exceptions
470 @reraise_safe_exceptions
471 def is_large_file(self, wire, path):
471 def is_large_file(self, wire, path):
472 return False
472 return False
473
473
474 @reraise_safe_exceptions
474 @reraise_safe_exceptions
475 def is_binary(self, wire, rev, path):
476 cache_on, context_uid, repo_id = self._cache_on(wire)
477
478 @self.region.conditional_cache_on_arguments(condition=cache_on)
479 def _is_binary(_repo_id, _rev, _path):
480 raw_bytes = self.get_file_content(wire, path, rev)
481 return raw_bytes and '\0' in raw_bytes
482
483 return _is_binary(repo_id, rev, path)
484
485 @reraise_safe_exceptions
475 def run_svn_command(self, wire, cmd, **opts):
486 def run_svn_command(self, wire, cmd, **opts):
476 path = wire.get('path', None)
487 path = wire.get('path', None)
477
488
478 if path and os.path.isdir(path):
489 if path and os.path.isdir(path):
479 opts['cwd'] = path
490 opts['cwd'] = path
480
491
481 safe_call = False
492 safe_call = False
482 if '_safe' in opts:
493 if '_safe' in opts:
483 safe_call = True
494 safe_call = True
484
495
485 svnenv = os.environ.copy()
496 svnenv = os.environ.copy()
486 svnenv.update(opts.pop('extra_env', {}))
497 svnenv.update(opts.pop('extra_env', {}))
487
498
488 _opts = {'env': svnenv, 'shell': False}
499 _opts = {'env': svnenv, 'shell': False}
489
500
490 try:
501 try:
491 _opts.update(opts)
502 _opts.update(opts)
492 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
493
504
494 return ''.join(p), ''.join(p.error)
505 return ''.join(p), ''.join(p.error)
495 except (EnvironmentError, OSError) as err:
506 except (EnvironmentError, OSError) as err:
496 cmd = ' '.join(cmd) # human friendly CMD
507 cmd = ' '.join(cmd) # human friendly CMD
497 tb_err = ("Couldn't run svn command (%s).\n"
508 tb_err = ("Couldn't run svn command (%s).\n"
498 "Original error was:%s\n"
509 "Original error was:%s\n"
499 "Call options:%s\n"
510 "Call options:%s\n"
500 % (cmd, err, _opts))
511 % (cmd, err, _opts))
501 log.exception(tb_err)
512 log.exception(tb_err)
502 if safe_call:
513 if safe_call:
503 return '', err
514 return '', err
504 else:
515 else:
505 raise exceptions.VcsException()(tb_err)
516 raise exceptions.VcsException()(tb_err)
506
517
507 @reraise_safe_exceptions
518 @reraise_safe_exceptions
508 def install_hooks(self, wire, force=False):
519 def install_hooks(self, wire, force=False):
509 from vcsserver.hook_utils import install_svn_hooks
520 from vcsserver.hook_utils import install_svn_hooks
510 repo_path = wire['path']
521 repo_path = wire['path']
511 binary_dir = settings.BINARY_DIR
522 binary_dir = settings.BINARY_DIR
512 executable = None
523 executable = None
513 if binary_dir:
524 if binary_dir:
514 executable = os.path.join(binary_dir, 'python')
525 executable = os.path.join(binary_dir, 'python')
515 return install_svn_hooks(
526 return install_svn_hooks(
516 repo_path, executable=executable, force_create=force)
527 repo_path, executable=executable, force_create=force)
517
528
518 @reraise_safe_exceptions
529 @reraise_safe_exceptions
519 def get_hooks_info(self, wire):
530 def get_hooks_info(self, wire):
520 from vcsserver.hook_utils import (
531 from vcsserver.hook_utils import (
521 get_svn_pre_hook_version, get_svn_post_hook_version)
532 get_svn_pre_hook_version, get_svn_post_hook_version)
522 repo_path = wire['path']
533 repo_path = wire['path']
523 return {
534 return {
524 'pre_version': get_svn_pre_hook_version(repo_path),
535 'pre_version': get_svn_pre_hook_version(repo_path),
525 'post_version': get_svn_post_hook_version(repo_path),
536 'post_version': get_svn_post_hook_version(repo_path),
526 }
537 }
527
538
528
539
529 class SvnDiffer(object):
540 class SvnDiffer(object):
530 """
541 """
531 Utility to create diffs based on difflib and the Subversion api
542 Utility to create diffs based on difflib and the Subversion api
532 """
543 """
533
544
534 binary_content = False
545 binary_content = False
535
546
536 def __init__(
547 def __init__(
537 self, repo, src_rev, src_path, tgt_rev, tgt_path,
548 self, repo, src_rev, src_path, tgt_rev, tgt_path,
538 ignore_whitespace, context):
549 ignore_whitespace, context):
539 self.repo = repo
550 self.repo = repo
540 self.ignore_whitespace = ignore_whitespace
551 self.ignore_whitespace = ignore_whitespace
541 self.context = context
552 self.context = context
542
553
543 fsobj = svn.repos.fs(repo)
554 fsobj = svn.repos.fs(repo)
544
555
545 self.tgt_rev = tgt_rev
556 self.tgt_rev = tgt_rev
546 self.tgt_path = tgt_path or ''
557 self.tgt_path = tgt_path or ''
547 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
558 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
548 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
559 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
549
560
550 self.src_rev = src_rev
561 self.src_rev = src_rev
551 self.src_path = src_path or self.tgt_path
562 self.src_path = src_path or self.tgt_path
552 self.src_root = svn.fs.revision_root(fsobj, src_rev)
563 self.src_root = svn.fs.revision_root(fsobj, src_rev)
553 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
564 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
554
565
555 self._validate()
566 self._validate()
556
567
557 def _validate(self):
568 def _validate(self):
558 if (self.tgt_kind != svn.core.svn_node_none and
569 if (self.tgt_kind != svn.core.svn_node_none and
559 self.src_kind != svn.core.svn_node_none and
570 self.src_kind != svn.core.svn_node_none and
560 self.src_kind != self.tgt_kind):
571 self.src_kind != self.tgt_kind):
561 # TODO: johbo: proper error handling
572 # TODO: johbo: proper error handling
562 raise Exception(
573 raise Exception(
563 "Source and target are not compatible for diff generation. "
574 "Source and target are not compatible for diff generation. "
564 "Source type: %s, target type: %s" %
575 "Source type: %s, target type: %s" %
565 (self.src_kind, self.tgt_kind))
576 (self.src_kind, self.tgt_kind))
566
577
567 def generate_diff(self):
578 def generate_diff(self):
568 buf = StringIO.StringIO()
579 buf = StringIO.StringIO()
569 if self.tgt_kind == svn.core.svn_node_dir:
580 if self.tgt_kind == svn.core.svn_node_dir:
570 self._generate_dir_diff(buf)
581 self._generate_dir_diff(buf)
571 else:
582 else:
572 self._generate_file_diff(buf)
583 self._generate_file_diff(buf)
573 return buf.getvalue()
584 return buf.getvalue()
574
585
575 def _generate_dir_diff(self, buf):
586 def _generate_dir_diff(self, buf):
576 editor = DiffChangeEditor()
587 editor = DiffChangeEditor()
577 editor_ptr, editor_baton = svn.delta.make_editor(editor)
588 editor_ptr, editor_baton = svn.delta.make_editor(editor)
578 svn.repos.dir_delta2(
589 svn.repos.dir_delta2(
579 self.src_root,
590 self.src_root,
580 self.src_path,
591 self.src_path,
581 '', # src_entry
592 '', # src_entry
582 self.tgt_root,
593 self.tgt_root,
583 self.tgt_path,
594 self.tgt_path,
584 editor_ptr, editor_baton,
595 editor_ptr, editor_baton,
585 authorization_callback_allow_all,
596 authorization_callback_allow_all,
586 False, # text_deltas
597 False, # text_deltas
587 svn.core.svn_depth_infinity, # depth
598 svn.core.svn_depth_infinity, # depth
588 False, # entry_props
599 False, # entry_props
589 False, # ignore_ancestry
600 False, # ignore_ancestry
590 )
601 )
591
602
592 for path, __, change in sorted(editor.changes):
603 for path, __, change in sorted(editor.changes):
593 self._generate_node_diff(
604 self._generate_node_diff(
594 buf, change, path, self.tgt_path, path, self.src_path)
605 buf, change, path, self.tgt_path, path, self.src_path)
595
606
596 def _generate_file_diff(self, buf):
607 def _generate_file_diff(self, buf):
597 change = None
608 change = None
598 if self.src_kind == svn.core.svn_node_none:
609 if self.src_kind == svn.core.svn_node_none:
599 change = "add"
610 change = "add"
600 elif self.tgt_kind == svn.core.svn_node_none:
611 elif self.tgt_kind == svn.core.svn_node_none:
601 change = "delete"
612 change = "delete"
602 tgt_base, tgt_path = vcspath.split(self.tgt_path)
613 tgt_base, tgt_path = vcspath.split(self.tgt_path)
603 src_base, src_path = vcspath.split(self.src_path)
614 src_base, src_path = vcspath.split(self.src_path)
604 self._generate_node_diff(
615 self._generate_node_diff(
605 buf, change, tgt_path, tgt_base, src_path, src_base)
616 buf, change, tgt_path, tgt_base, src_path, src_base)
606
617
607 def _generate_node_diff(
618 def _generate_node_diff(
608 self, buf, change, tgt_path, tgt_base, src_path, src_base):
619 self, buf, change, tgt_path, tgt_base, src_path, src_base):
609
620
610 if self.src_rev == self.tgt_rev and tgt_base == src_base:
621 if self.src_rev == self.tgt_rev and tgt_base == src_base:
611 # makes consistent behaviour with git/hg to return empty diff if
622 # makes consistent behaviour with git/hg to return empty diff if
612 # we compare same revisions
623 # we compare same revisions
613 return
624 return
614
625
615 tgt_full_path = vcspath.join(tgt_base, tgt_path)
626 tgt_full_path = vcspath.join(tgt_base, tgt_path)
616 src_full_path = vcspath.join(src_base, src_path)
627 src_full_path = vcspath.join(src_base, src_path)
617
628
618 self.binary_content = False
629 self.binary_content = False
619 mime_type = self._get_mime_type(tgt_full_path)
630 mime_type = self._get_mime_type(tgt_full_path)
620
631
621 if mime_type and not mime_type.startswith('text'):
632 if mime_type and not mime_type.startswith('text'):
622 self.binary_content = True
633 self.binary_content = True
623 buf.write("=" * 67 + '\n')
634 buf.write("=" * 67 + '\n')
624 buf.write("Cannot display: file marked as a binary type.\n")
635 buf.write("Cannot display: file marked as a binary type.\n")
625 buf.write("svn:mime-type = %s\n" % mime_type)
636 buf.write("svn:mime-type = %s\n" % mime_type)
626 buf.write("Index: %s\n" % (tgt_path, ))
637 buf.write("Index: %s\n" % (tgt_path, ))
627 buf.write("=" * 67 + '\n')
638 buf.write("=" * 67 + '\n')
628 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
639 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
629 'tgt_path': tgt_path})
640 'tgt_path': tgt_path})
630
641
631 if change == 'add':
642 if change == 'add':
632 # TODO: johbo: SVN is missing a zero here compared to git
643 # TODO: johbo: SVN is missing a zero here compared to git
633 buf.write("new file mode 10644\n")
644 buf.write("new file mode 10644\n")
634
645
635 #TODO(marcink): intro to binary detection of svn patches
646 #TODO(marcink): intro to binary detection of svn patches
636 # if self.binary_content:
647 # if self.binary_content:
637 # buf.write('GIT binary patch\n')
648 # buf.write('GIT binary patch\n')
638
649
639 buf.write("--- /dev/null\t(revision 0)\n")
650 buf.write("--- /dev/null\t(revision 0)\n")
640 src_lines = []
651 src_lines = []
641 else:
652 else:
642 if change == 'delete':
653 if change == 'delete':
643 buf.write("deleted file mode 10644\n")
654 buf.write("deleted file mode 10644\n")
644
655
645 #TODO(marcink): intro to binary detection of svn patches
656 #TODO(marcink): intro to binary detection of svn patches
646 # if self.binary_content:
657 # if self.binary_content:
647 # buf.write('GIT binary patch\n')
658 # buf.write('GIT binary patch\n')
648
659
649 buf.write("--- a/%s\t(revision %s)\n" % (
660 buf.write("--- a/%s\t(revision %s)\n" % (
650 src_path, self.src_rev))
661 src_path, self.src_rev))
651 src_lines = self._svn_readlines(self.src_root, src_full_path)
662 src_lines = self._svn_readlines(self.src_root, src_full_path)
652
663
653 if change == 'delete':
664 if change == 'delete':
654 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
665 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
655 tgt_lines = []
666 tgt_lines = []
656 else:
667 else:
657 buf.write("+++ b/%s\t(revision %s)\n" % (
668 buf.write("+++ b/%s\t(revision %s)\n" % (
658 tgt_path, self.tgt_rev))
669 tgt_path, self.tgt_rev))
659 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
670 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
660
671
661 if not self.binary_content:
672 if not self.binary_content:
662 udiff = svn_diff.unified_diff(
673 udiff = svn_diff.unified_diff(
663 src_lines, tgt_lines, context=self.context,
674 src_lines, tgt_lines, context=self.context,
664 ignore_blank_lines=self.ignore_whitespace,
675 ignore_blank_lines=self.ignore_whitespace,
665 ignore_case=False,
676 ignore_case=False,
666 ignore_space_changes=self.ignore_whitespace)
677 ignore_space_changes=self.ignore_whitespace)
667 buf.writelines(udiff)
678 buf.writelines(udiff)
668
679
669 def _get_mime_type(self, path):
680 def _get_mime_type(self, path):
670 try:
681 try:
671 mime_type = svn.fs.node_prop(
682 mime_type = svn.fs.node_prop(
672 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
683 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
673 except svn.core.SubversionException:
684 except svn.core.SubversionException:
674 mime_type = svn.fs.node_prop(
685 mime_type = svn.fs.node_prop(
675 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
686 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
676 return mime_type
687 return mime_type
677
688
678 def _svn_readlines(self, fs_root, node_path):
689 def _svn_readlines(self, fs_root, node_path):
679 if self.binary_content:
690 if self.binary_content:
680 return []
691 return []
681 node_kind = svn.fs.check_path(fs_root, node_path)
692 node_kind = svn.fs.check_path(fs_root, node_path)
682 if node_kind not in (
693 if node_kind not in (
683 svn.core.svn_node_file, svn.core.svn_node_symlink):
694 svn.core.svn_node_file, svn.core.svn_node_symlink):
684 return []
695 return []
685 content = svn.core.Stream(
696 content = svn.core.Stream(
686 svn.fs.file_contents(fs_root, node_path)).read()
697 svn.fs.file_contents(fs_root, node_path)).read()
687 return content.splitlines(True)
698 return content.splitlines(True)
688
699
689
700
690 class DiffChangeEditor(svn.delta.Editor):
701 class DiffChangeEditor(svn.delta.Editor):
691 """
702 """
692 Records changes between two given revisions
703 Records changes between two given revisions
693 """
704 """
694
705
695 def __init__(self):
706 def __init__(self):
696 self.changes = []
707 self.changes = []
697
708
698 def delete_entry(self, path, revision, parent_baton, pool=None):
709 def delete_entry(self, path, revision, parent_baton, pool=None):
699 self.changes.append((path, None, 'delete'))
710 self.changes.append((path, None, 'delete'))
700
711
701 def add_file(
712 def add_file(
702 self, path, parent_baton, copyfrom_path, copyfrom_revision,
713 self, path, parent_baton, copyfrom_path, copyfrom_revision,
703 file_pool=None):
714 file_pool=None):
704 self.changes.append((path, 'file', 'add'))
715 self.changes.append((path, 'file', 'add'))
705
716
706 def open_file(self, path, parent_baton, base_revision, file_pool=None):
717 def open_file(self, path, parent_baton, base_revision, file_pool=None):
707 self.changes.append((path, 'file', 'change'))
718 self.changes.append((path, 'file', 'change'))
708
719
709
720
710 def authorization_callback_allow_all(root, path, pool):
721 def authorization_callback_allow_all(root, path, pool):
711 return True
722 return True
712
723
713
724
714 class TxnNodeProcessor(object):
725 class TxnNodeProcessor(object):
715 """
726 """
716 Utility to process the change of one node within a transaction root.
727 Utility to process the change of one node within a transaction root.
717
728
718 It encapsulates the knowledge of how to add, update or remove
729 It encapsulates the knowledge of how to add, update or remove
719 a node for a given transaction root. The purpose is to support the method
730 a node for a given transaction root. The purpose is to support the method
720 `SvnRemote.commit`.
731 `SvnRemote.commit`.
721 """
732 """
722
733
723 def __init__(self, node, txn_root):
734 def __init__(self, node, txn_root):
724 assert isinstance(node['path'], str)
735 assert isinstance(node['path'], str)
725
736
726 self.node = node
737 self.node = node
727 self.txn_root = txn_root
738 self.txn_root = txn_root
728
739
729 def update(self):
740 def update(self):
730 self._ensure_parent_dirs()
741 self._ensure_parent_dirs()
731 self._add_file_if_node_does_not_exist()
742 self._add_file_if_node_does_not_exist()
732 self._update_file_content()
743 self._update_file_content()
733 self._update_file_properties()
744 self._update_file_properties()
734
745
735 def remove(self):
746 def remove(self):
736 svn.fs.delete(self.txn_root, self.node['path'])
747 svn.fs.delete(self.txn_root, self.node['path'])
737 # TODO: Clean up directory if empty
748 # TODO: Clean up directory if empty
738
749
739 def _ensure_parent_dirs(self):
750 def _ensure_parent_dirs(self):
740 curdir = vcspath.dirname(self.node['path'])
751 curdir = vcspath.dirname(self.node['path'])
741 dirs_to_create = []
752 dirs_to_create = []
742 while not self._svn_path_exists(curdir):
753 while not self._svn_path_exists(curdir):
743 dirs_to_create.append(curdir)
754 dirs_to_create.append(curdir)
744 curdir = vcspath.dirname(curdir)
755 curdir = vcspath.dirname(curdir)
745
756
746 for curdir in reversed(dirs_to_create):
757 for curdir in reversed(dirs_to_create):
747 log.debug('Creating missing directory "%s"', curdir)
758 log.debug('Creating missing directory "%s"', curdir)
748 svn.fs.make_dir(self.txn_root, curdir)
759 svn.fs.make_dir(self.txn_root, curdir)
749
760
750 def _svn_path_exists(self, path):
761 def _svn_path_exists(self, path):
751 path_status = svn.fs.check_path(self.txn_root, path)
762 path_status = svn.fs.check_path(self.txn_root, path)
752 return path_status != svn.core.svn_node_none
763 return path_status != svn.core.svn_node_none
753
764
754 def _add_file_if_node_does_not_exist(self):
765 def _add_file_if_node_does_not_exist(self):
755 kind = svn.fs.check_path(self.txn_root, self.node['path'])
766 kind = svn.fs.check_path(self.txn_root, self.node['path'])
756 if kind == svn.core.svn_node_none:
767 if kind == svn.core.svn_node_none:
757 svn.fs.make_file(self.txn_root, self.node['path'])
768 svn.fs.make_file(self.txn_root, self.node['path'])
758
769
759 def _update_file_content(self):
770 def _update_file_content(self):
760 assert isinstance(self.node['content'], str)
771 assert isinstance(self.node['content'], str)
761 handler, baton = svn.fs.apply_textdelta(
772 handler, baton = svn.fs.apply_textdelta(
762 self.txn_root, self.node['path'], None, None)
773 self.txn_root, self.node['path'], None, None)
763 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
774 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
764
775
765 def _update_file_properties(self):
776 def _update_file_properties(self):
766 properties = self.node.get('properties', {})
777 properties = self.node.get('properties', {})
767 for key, value in properties.iteritems():
778 for key, value in properties.iteritems():
768 svn.fs.change_node_prop(
779 svn.fs.change_node_prop(
769 self.txn_root, self.node['path'], key, value)
780 self.txn_root, self.node['path'], key, value)
770
781
771
782
772 def apr_time_t(timestamp):
783 def apr_time_t(timestamp):
773 """
784 """
774 Convert a Python timestamp into APR timestamp type apr_time_t
785 Convert a Python timestamp into APR timestamp type apr_time_t
775 """
786 """
776 return timestamp * 1E6
787 return timestamp * 1E6
777
788
778
789
779 def svn_opt_revision_value_t(num):
790 def svn_opt_revision_value_t(num):
780 """
791 """
781 Put `num` into a `svn_opt_revision_value_t` structure.
792 Put `num` into a `svn_opt_revision_value_t` structure.
782 """
793 """
783 value = svn.core.svn_opt_revision_value_t()
794 value = svn.core.svn_opt_revision_value_t()
784 value.number = num
795 value.number = num
785 revision = svn.core.svn_opt_revision_t()
796 revision = svn.core.svn_opt_revision_t()
786 revision.kind = svn.core.svn_opt_revision_number
797 revision.kind = svn.core.svn_opt_revision_number
787 revision.value = value
798 revision.value = value
788 return revision
799 return revision
General Comments 0
You need to be logged in to leave comments. Login now