##// END OF EJS Templates
vcs: use single base for shared functions of Remote objects
marcink -
r749:34388801 default
parent child Browse files
Show More
@@ -0,0 +1,28 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18
19 class RemoteBase(object):
20 EMPTY_COMMIT = '0' * 40
21
22 def _cache_on(self, wire):
23 context = wire.get('context', '')
24 context_uid = '{}'.format(context)
25 repo_id = wire.get('repo_id', '')
26 cache = wire.get('cache', True)
27 cache_on = context and cache
28 return cache_on, context_uid, repo_id
@@ -1,1094 +1,1085 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib
25 import urllib
26 import urllib2
26 import urllib2
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from dulwich import index, objects
32 from dulwich import index, objects
33 from dulwich.client import HttpGitClient, LocalGitClient
33 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.errors import (
34 from dulwich.errors import (
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 MissingCommitError, ObjectMissing, HangupException,
36 MissingCommitError, ObjectMissing, HangupException,
37 UnexpectedCommandError)
37 UnexpectedCommandError)
38 from dulwich.repo import Repo as DulwichRepo
38 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.server import update_server_info
39 from dulwich.server import update_server_info
40
40
41 from vcsserver import exceptions, settings, subprocessio
41 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver.utils import safe_str, safe_int
42 from vcsserver.utils import safe_str, safe_int
43 from vcsserver.base import RepoFactory, obfuscate_qs
43 from vcsserver.base import RepoFactory, obfuscate_qs
44 from vcsserver.hgcompat import (
44 from vcsserver.hgcompat import (
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 from vcsserver.git_lfs.lib import LFSOidStore
46 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.vcs_base import RemoteBase
47
48
48 DIR_STAT = stat.S_IFDIR
49 DIR_STAT = stat.S_IFDIR
49 FILE_MODE = stat.S_IFMT
50 FILE_MODE = stat.S_IFMT
50 GIT_LINK = objects.S_IFGITLINK
51 GIT_LINK = objects.S_IFGITLINK
51 PEELED_REF_MARKER = '^{}'
52 PEELED_REF_MARKER = '^{}'
52
53
53
54
54 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
55
56
56
57
57 def str_to_dulwich(value):
58 def str_to_dulwich(value):
58 """
59 """
59 Dulwich 0.10.1a requires `unicode` objects to be passed in.
60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
60 """
61 """
61 return value.decode(settings.WIRE_ENCODING)
62 return value.decode(settings.WIRE_ENCODING)
62
63
63
64
64 def reraise_safe_exceptions(func):
65 def reraise_safe_exceptions(func):
65 """Converts Dulwich exceptions to something neutral."""
66 """Converts Dulwich exceptions to something neutral."""
66
67
67 @wraps(func)
68 @wraps(func)
68 def wrapper(*args, **kwargs):
69 def wrapper(*args, **kwargs):
69 try:
70 try:
70 return func(*args, **kwargs)
71 return func(*args, **kwargs)
71 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
72 exc = exceptions.LookupException(org_exc=e)
73 exc = exceptions.LookupException(org_exc=e)
73 raise exc(safe_str(e))
74 raise exc(safe_str(e))
74 except (HangupException, UnexpectedCommandError) as e:
75 except (HangupException, UnexpectedCommandError) as e:
75 exc = exceptions.VcsException(org_exc=e)
76 exc = exceptions.VcsException(org_exc=e)
76 raise exc(safe_str(e))
77 raise exc(safe_str(e))
77 except Exception as e:
78 except Exception as e:
78 # NOTE(marcink): becuase of how dulwich handles some exceptions
79 # NOTE(marcink): becuase of how dulwich handles some exceptions
79 # (KeyError on empty repos), we cannot track this and catch all
80 # (KeyError on empty repos), we cannot track this and catch all
80 # exceptions, it's an exceptions from other handlers
81 # exceptions, it's an exceptions from other handlers
81 #if not hasattr(e, '_vcs_kind'):
82 #if not hasattr(e, '_vcs_kind'):
82 #log.exception("Unhandled exception in git remote call")
83 #log.exception("Unhandled exception in git remote call")
83 #raise_from_original(exceptions.UnhandledException)
84 #raise_from_original(exceptions.UnhandledException)
84 raise
85 raise
85 return wrapper
86 return wrapper
86
87
87
88
88 class Repo(DulwichRepo):
89 class Repo(DulwichRepo):
89 """
90 """
90 A wrapper for dulwich Repo class.
91 A wrapper for dulwich Repo class.
91
92
92 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
93 "Too many open files" error. We need to close all opened file descriptors
94 "Too many open files" error. We need to close all opened file descriptors
94 once the repo object is destroyed.
95 once the repo object is destroyed.
95 """
96 """
96 def __del__(self):
97 def __del__(self):
97 if hasattr(self, 'object_store'):
98 if hasattr(self, 'object_store'):
98 self.close()
99 self.close()
99
100
100
101
101 class Repository(LibGit2Repo):
102 class Repository(LibGit2Repo):
102
103
103 def __enter__(self):
104 def __enter__(self):
104 return self
105 return self
105
106
106 def __exit__(self, exc_type, exc_val, exc_tb):
107 def __exit__(self, exc_type, exc_val, exc_tb):
107 self.free()
108 self.free()
108
109
109
110
110 class GitFactory(RepoFactory):
111 class GitFactory(RepoFactory):
111 repo_type = 'git'
112 repo_type = 'git'
112
113
113 def _create_repo(self, wire, create, use_libgit2=False):
114 def _create_repo(self, wire, create, use_libgit2=False):
114 if use_libgit2:
115 if use_libgit2:
115 return Repository(wire['path'])
116 return Repository(wire['path'])
116 else:
117 else:
117 repo_path = str_to_dulwich(wire['path'])
118 repo_path = str_to_dulwich(wire['path'])
118 return Repo(repo_path)
119 return Repo(repo_path)
119
120
120 def repo(self, wire, create=False, use_libgit2=False):
121 def repo(self, wire, create=False, use_libgit2=False):
121 """
122 """
122 Get a repository instance for the given path.
123 Get a repository instance for the given path.
123 """
124 """
124 return self._create_repo(wire, create, use_libgit2)
125 return self._create_repo(wire, create, use_libgit2)
125
126
126 def repo_libgit2(self, wire):
127 def repo_libgit2(self, wire):
127 return self.repo(wire, use_libgit2=True)
128 return self.repo(wire, use_libgit2=True)
128
129
129
130
130 class GitRemote(object):
131 class GitRemote(RemoteBase):
131 EMPTY_COMMIT = '0' * 40
132
132
133 def __init__(self, factory):
133 def __init__(self, factory):
134 self._factory = factory
134 self._factory = factory
135 self._bulk_methods = {
135 self._bulk_methods = {
136 "date": self.date,
136 "date": self.date,
137 "author": self.author,
137 "author": self.author,
138 "branch": self.branch,
138 "branch": self.branch,
139 "message": self.message,
139 "message": self.message,
140 "parents": self.parents,
140 "parents": self.parents,
141 "_commit": self.revision,
141 "_commit": self.revision,
142 }
142 }
143 self.region = self._factory._cache_region
143 self.region = self._factory._cache_region
144
144
145 def _wire_to_config(self, wire):
145 def _wire_to_config(self, wire):
146 if 'config' in wire:
146 if 'config' in wire:
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
148 return {}
148 return {}
149
149
150 def _remote_conf(self, config):
150 def _remote_conf(self, config):
151 params = [
151 params = [
152 '-c', 'core.askpass=""',
152 '-c', 'core.askpass=""',
153 ]
153 ]
154 ssl_cert_dir = config.get('vcs_ssl_dir')
154 ssl_cert_dir = config.get('vcs_ssl_dir')
155 if ssl_cert_dir:
155 if ssl_cert_dir:
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
157 return params
157 return params
158
158
159 def _cache_on(self, wire):
160 context = wire.get('context', '')
161 context_uid = '{}'.format(context)
162 repo_id = wire.get('repo_id', '')
163 cache = wire.get('cache', True)
164 cache_on = context and cache
165 return cache_on, context_uid, repo_id
166
167 @reraise_safe_exceptions
159 @reraise_safe_exceptions
168 def discover_git_version(self):
160 def discover_git_version(self):
169 stdout, _ = self.run_git_command(
161 stdout, _ = self.run_git_command(
170 {}, ['--version'], _bare=True, _safe=True)
162 {}, ['--version'], _bare=True, _safe=True)
171 prefix = 'git version'
163 prefix = 'git version'
172 if stdout.startswith(prefix):
164 if stdout.startswith(prefix):
173 stdout = stdout[len(prefix):]
165 stdout = stdout[len(prefix):]
174 return stdout.strip()
166 return stdout.strip()
175
167
176 @reraise_safe_exceptions
168 @reraise_safe_exceptions
177 def is_empty(self, wire):
169 def is_empty(self, wire):
178 repo_init = self._factory.repo_libgit2(wire)
170 repo_init = self._factory.repo_libgit2(wire)
179 with repo_init as repo:
171 with repo_init as repo:
180
172
181 try:
173 try:
182 has_head = repo.head.name
174 has_head = repo.head.name
183 if has_head:
175 if has_head:
184 return False
176 return False
185
177
186 # NOTE(marcink): check again using more expensive method
178 # NOTE(marcink): check again using more expensive method
187 return repo.is_empty
179 return repo.is_empty
188 except Exception:
180 except Exception:
189 pass
181 pass
190
182
191 return True
183 return True
192
184
193 @reraise_safe_exceptions
185 @reraise_safe_exceptions
194 def assert_correct_path(self, wire):
186 def assert_correct_path(self, wire):
195 cache_on, context_uid, repo_id = self._cache_on(wire)
187 cache_on, context_uid, repo_id = self._cache_on(wire)
196 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 @self.region.conditional_cache_on_arguments(condition=cache_on)
197 def _assert_correct_path(_context_uid, _repo_id):
189 def _assert_correct_path(_context_uid, _repo_id):
198 try:
190 try:
199 repo_init = self._factory.repo_libgit2(wire)
191 repo_init = self._factory.repo_libgit2(wire)
200 with repo_init as repo:
192 with repo_init as repo:
201 pass
193 pass
202 except pygit2.GitError:
194 except pygit2.GitError:
203 path = wire.get('path')
195 path = wire.get('path')
204 tb = traceback.format_exc()
196 tb = traceback.format_exc()
205 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
206 return False
198 return False
207
199
208 return True
200 return True
209 return _assert_correct_path(context_uid, repo_id)
201 return _assert_correct_path(context_uid, repo_id)
210
202
211 @reraise_safe_exceptions
203 @reraise_safe_exceptions
212 def bare(self, wire):
204 def bare(self, wire):
213 repo_init = self._factory.repo_libgit2(wire)
205 repo_init = self._factory.repo_libgit2(wire)
214 with repo_init as repo:
206 with repo_init as repo:
215 return repo.is_bare
207 return repo.is_bare
216
208
217 @reraise_safe_exceptions
209 @reraise_safe_exceptions
218 def blob_as_pretty_string(self, wire, sha):
210 def blob_as_pretty_string(self, wire, sha):
219 repo_init = self._factory.repo_libgit2(wire)
211 repo_init = self._factory.repo_libgit2(wire)
220 with repo_init as repo:
212 with repo_init as repo:
221 blob_obj = repo[sha]
213 blob_obj = repo[sha]
222 blob = blob_obj.data
214 blob = blob_obj.data
223 return blob
215 return blob
224
216
225 @reraise_safe_exceptions
217 @reraise_safe_exceptions
226 def blob_raw_length(self, wire, sha):
218 def blob_raw_length(self, wire, sha):
227 cache_on, context_uid, repo_id = self._cache_on(wire)
219 cache_on, context_uid, repo_id = self._cache_on(wire)
228 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 @self.region.conditional_cache_on_arguments(condition=cache_on)
229 def _blob_raw_length(_repo_id, _sha):
221 def _blob_raw_length(_repo_id, _sha):
230
222
231 repo_init = self._factory.repo_libgit2(wire)
223 repo_init = self._factory.repo_libgit2(wire)
232 with repo_init as repo:
224 with repo_init as repo:
233 blob = repo[sha]
225 blob = repo[sha]
234 return blob.size
226 return blob.size
235
227
236 return _blob_raw_length(repo_id, sha)
228 return _blob_raw_length(repo_id, sha)
237
229
238 def _parse_lfs_pointer(self, raw_content):
230 def _parse_lfs_pointer(self, raw_content):
239
231
240 spec_string = 'version https://git-lfs.github.com/spec'
232 spec_string = 'version https://git-lfs.github.com/spec'
241 if raw_content and raw_content.startswith(spec_string):
233 if raw_content and raw_content.startswith(spec_string):
242 pattern = re.compile(r"""
234 pattern = re.compile(r"""
243 (?:\n)?
235 (?:\n)?
244 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
245 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
246 ^size[ ](?P<oid_size>[0-9]+)\n
238 ^size[ ](?P<oid_size>[0-9]+)\n
247 (?:\n)?
239 (?:\n)?
248 """, re.VERBOSE | re.MULTILINE)
240 """, re.VERBOSE | re.MULTILINE)
249 match = pattern.match(raw_content)
241 match = pattern.match(raw_content)
250 if match:
242 if match:
251 return match.groupdict()
243 return match.groupdict()
252
244
253 return {}
245 return {}
254
246
255 @reraise_safe_exceptions
247 @reraise_safe_exceptions
256 def is_large_file(self, wire, commit_id):
248 def is_large_file(self, wire, commit_id):
257
249
258 cache_on, context_uid, repo_id = self._cache_on(wire)
250 cache_on, context_uid, repo_id = self._cache_on(wire)
259 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 @self.region.conditional_cache_on_arguments(condition=cache_on)
260 def _is_large_file(_repo_id, _sha):
252 def _is_large_file(_repo_id, _sha):
261 repo_init = self._factory.repo_libgit2(wire)
253 repo_init = self._factory.repo_libgit2(wire)
262 with repo_init as repo:
254 with repo_init as repo:
263 blob = repo[commit_id]
255 blob = repo[commit_id]
264 if blob.is_binary:
256 if blob.is_binary:
265 return {}
257 return {}
266
258
267 return self._parse_lfs_pointer(blob.data)
259 return self._parse_lfs_pointer(blob.data)
268
260
269 return _is_large_file(repo_id, commit_id)
261 return _is_large_file(repo_id, commit_id)
270
262
271 @reraise_safe_exceptions
263 @reraise_safe_exceptions
272 def in_largefiles_store(self, wire, oid):
264 def in_largefiles_store(self, wire, oid):
273 conf = self._wire_to_config(wire)
265 conf = self._wire_to_config(wire)
274 repo_init = self._factory.repo_libgit2(wire)
266 repo_init = self._factory.repo_libgit2(wire)
275 with repo_init as repo:
267 with repo_init as repo:
276 repo_name = repo.path
268 repo_name = repo.path
277
269
278 store_location = conf.get('vcs_git_lfs_store_location')
270 store_location = conf.get('vcs_git_lfs_store_location')
279 if store_location:
271 if store_location:
280
272
281 store = LFSOidStore(
273 store = LFSOidStore(
282 oid=oid, repo=repo_name, store_location=store_location)
274 oid=oid, repo=repo_name, store_location=store_location)
283 return store.has_oid()
275 return store.has_oid()
284
276
285 return False
277 return False
286
278
287 @reraise_safe_exceptions
279 @reraise_safe_exceptions
288 def store_path(self, wire, oid):
280 def store_path(self, wire, oid):
289 conf = self._wire_to_config(wire)
281 conf = self._wire_to_config(wire)
290 repo_init = self._factory.repo_libgit2(wire)
282 repo_init = self._factory.repo_libgit2(wire)
291 with repo_init as repo:
283 with repo_init as repo:
292 repo_name = repo.path
284 repo_name = repo.path
293
285
294 store_location = conf.get('vcs_git_lfs_store_location')
286 store_location = conf.get('vcs_git_lfs_store_location')
295 if store_location:
287 if store_location:
296 store = LFSOidStore(
288 store = LFSOidStore(
297 oid=oid, repo=repo_name, store_location=store_location)
289 oid=oid, repo=repo_name, store_location=store_location)
298 return store.oid_path
290 return store.oid_path
299 raise ValueError('Unable to fetch oid with path {}'.format(oid))
291 raise ValueError('Unable to fetch oid with path {}'.format(oid))
300
292
301 @reraise_safe_exceptions
293 @reraise_safe_exceptions
302 def bulk_request(self, wire, rev, pre_load):
294 def bulk_request(self, wire, rev, pre_load):
303 cache_on, context_uid, repo_id = self._cache_on(wire)
295 cache_on, context_uid, repo_id = self._cache_on(wire)
304 @self.region.conditional_cache_on_arguments(condition=cache_on)
296 @self.region.conditional_cache_on_arguments(condition=cache_on)
305 def _bulk_request(_repo_id, _rev, _pre_load):
297 def _bulk_request(_repo_id, _rev, _pre_load):
306 result = {}
298 result = {}
307 for attr in pre_load:
299 for attr in pre_load:
308 try:
300 try:
309 method = self._bulk_methods[attr]
301 method = self._bulk_methods[attr]
310 args = [wire, rev]
302 args = [wire, rev]
311 result[attr] = method(*args)
303 result[attr] = method(*args)
312 except KeyError as e:
304 except KeyError as e:
313 raise exceptions.VcsException(e)(
305 raise exceptions.VcsException(e)(
314 "Unknown bulk attribute: %s" % attr)
306 "Unknown bulk attribute: %s" % attr)
315 return result
307 return result
316
308
317 return _bulk_request(repo_id, rev, sorted(pre_load))
309 return _bulk_request(repo_id, rev, sorted(pre_load))
318
310
319 def _build_opener(self, url):
311 def _build_opener(self, url):
320 handlers = []
312 handlers = []
321 url_obj = url_parser(url)
313 url_obj = url_parser(url)
322 _, authinfo = url_obj.authinfo()
314 _, authinfo = url_obj.authinfo()
323
315
324 if authinfo:
316 if authinfo:
325 # create a password manager
317 # create a password manager
326 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
318 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
327 passmgr.add_password(*authinfo)
319 passmgr.add_password(*authinfo)
328
320
329 handlers.extend((httpbasicauthhandler(passmgr),
321 handlers.extend((httpbasicauthhandler(passmgr),
330 httpdigestauthhandler(passmgr)))
322 httpdigestauthhandler(passmgr)))
331
323
332 return urllib2.build_opener(*handlers)
324 return urllib2.build_opener(*handlers)
333
325
334 def _type_id_to_name(self, type_id):
326 def _type_id_to_name(self, type_id):
335 return {
327 return {
336 1: b'commit',
328 1: b'commit',
337 2: b'tree',
329 2: b'tree',
338 3: b'blob',
330 3: b'blob',
339 4: b'tag'
331 4: b'tag'
340 }[type_id]
332 }[type_id]
341
333
342 @reraise_safe_exceptions
334 @reraise_safe_exceptions
343 def check_url(self, url, config):
335 def check_url(self, url, config):
344 url_obj = url_parser(url)
336 url_obj = url_parser(url)
345 test_uri, _ = url_obj.authinfo()
337 test_uri, _ = url_obj.authinfo()
346 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
338 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
347 url_obj.query = obfuscate_qs(url_obj.query)
339 url_obj.query = obfuscate_qs(url_obj.query)
348 cleaned_uri = str(url_obj)
340 cleaned_uri = str(url_obj)
349 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
341 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
350
342
351 if not test_uri.endswith('info/refs'):
343 if not test_uri.endswith('info/refs'):
352 test_uri = test_uri.rstrip('/') + '/info/refs'
344 test_uri = test_uri.rstrip('/') + '/info/refs'
353
345
354 o = self._build_opener(url)
346 o = self._build_opener(url)
355 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
347 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
356
348
357 q = {"service": 'git-upload-pack'}
349 q = {"service": 'git-upload-pack'}
358 qs = '?%s' % urllib.urlencode(q)
350 qs = '?%s' % urllib.urlencode(q)
359 cu = "%s%s" % (test_uri, qs)
351 cu = "%s%s" % (test_uri, qs)
360 req = urllib2.Request(cu, None, {})
352 req = urllib2.Request(cu, None, {})
361
353
362 try:
354 try:
363 log.debug("Trying to open URL %s", cleaned_uri)
355 log.debug("Trying to open URL %s", cleaned_uri)
364 resp = o.open(req)
356 resp = o.open(req)
365 if resp.code != 200:
357 if resp.code != 200:
366 raise exceptions.URLError()('Return Code is not 200')
358 raise exceptions.URLError()('Return Code is not 200')
367 except Exception as e:
359 except Exception as e:
368 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
360 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
369 # means it cannot be cloned
361 # means it cannot be cloned
370 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
362 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
371
363
372 # now detect if it's proper git repo
364 # now detect if it's proper git repo
373 gitdata = resp.read()
365 gitdata = resp.read()
374 if 'service=git-upload-pack' in gitdata:
366 if 'service=git-upload-pack' in gitdata:
375 pass
367 pass
376 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
368 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
377 # old style git can return some other format !
369 # old style git can return some other format !
378 pass
370 pass
379 else:
371 else:
380 raise exceptions.URLError()(
372 raise exceptions.URLError()(
381 "url [%s] does not look like an git" % (cleaned_uri,))
373 "url [%s] does not look like an git" % (cleaned_uri,))
382
374
383 return True
375 return True
384
376
385 @reraise_safe_exceptions
377 @reraise_safe_exceptions
386 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
378 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
387 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
379 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
388 remote_refs = self.pull(wire, url, apply_refs=False)
380 remote_refs = self.pull(wire, url, apply_refs=False)
389 repo = self._factory.repo(wire)
381 repo = self._factory.repo(wire)
390 if isinstance(valid_refs, list):
382 if isinstance(valid_refs, list):
391 valid_refs = tuple(valid_refs)
383 valid_refs = tuple(valid_refs)
392
384
393 for k in remote_refs:
385 for k in remote_refs:
394 # only parse heads/tags and skip so called deferred tags
386 # only parse heads/tags and skip so called deferred tags
395 if k.startswith(valid_refs) and not k.endswith(deferred):
387 if k.startswith(valid_refs) and not k.endswith(deferred):
396 repo[k] = remote_refs[k]
388 repo[k] = remote_refs[k]
397
389
398 if update_after_clone:
390 if update_after_clone:
399 # we want to checkout HEAD
391 # we want to checkout HEAD
400 repo["HEAD"] = remote_refs["HEAD"]
392 repo["HEAD"] = remote_refs["HEAD"]
401 index.build_index_from_tree(repo.path, repo.index_path(),
393 index.build_index_from_tree(repo.path, repo.index_path(),
402 repo.object_store, repo["HEAD"].tree)
394 repo.object_store, repo["HEAD"].tree)
403
395
404 @reraise_safe_exceptions
396 @reraise_safe_exceptions
405 def branch(self, wire, commit_id):
397 def branch(self, wire, commit_id):
406 cache_on, context_uid, repo_id = self._cache_on(wire)
398 cache_on, context_uid, repo_id = self._cache_on(wire)
407 cache_on = False
408 @self.region.conditional_cache_on_arguments(condition=cache_on)
399 @self.region.conditional_cache_on_arguments(condition=cache_on)
409 def _branch(_context_uid, _repo_id, _commit_id):
400 def _branch(_context_uid, _repo_id, _commit_id):
410 regex = re.compile('^refs/heads')
401 regex = re.compile('^refs/heads')
411
402
412 def filter_with(ref):
403 def filter_with(ref):
413 return regex.match(ref[0]) and ref[1] == _commit_id
404 return regex.match(ref[0]) and ref[1] == _commit_id
414
405
415 branches = filter(filter_with, self.get_refs(wire).items())
406 branches = filter(filter_with, self.get_refs(wire).items())
416 return [x[0].split('refs/heads/')[-1] for x in branches]
407 return [x[0].split('refs/heads/')[-1] for x in branches]
417
408
418 return _branch(context_uid, repo_id, commit_id)
409 return _branch(context_uid, repo_id, commit_id)
419
410
420 @reraise_safe_exceptions
411 @reraise_safe_exceptions
421 def commit_branches(self, wire, commit_id):
412 def commit_branches(self, wire, commit_id):
422 cache_on, context_uid, repo_id = self._cache_on(wire)
413 cache_on, context_uid, repo_id = self._cache_on(wire)
423 @self.region.conditional_cache_on_arguments(condition=cache_on)
414 @self.region.conditional_cache_on_arguments(condition=cache_on)
424 def _commit_branches(_context_uid, _repo_id, _commit_id):
415 def _commit_branches(_context_uid, _repo_id, _commit_id):
425 repo_init = self._factory.repo_libgit2(wire)
416 repo_init = self._factory.repo_libgit2(wire)
426 with repo_init as repo:
417 with repo_init as repo:
427 branches = [x for x in repo.branches.with_commit(_commit_id)]
418 branches = [x for x in repo.branches.with_commit(_commit_id)]
428 return branches
419 return branches
429
420
430 return _commit_branches(context_uid, repo_id, commit_id)
421 return _commit_branches(context_uid, repo_id, commit_id)
431
422
432 @reraise_safe_exceptions
423 @reraise_safe_exceptions
433 def add_object(self, wire, content):
424 def add_object(self, wire, content):
434 repo_init = self._factory.repo_libgit2(wire)
425 repo_init = self._factory.repo_libgit2(wire)
435 with repo_init as repo:
426 with repo_init as repo:
436 blob = objects.Blob()
427 blob = objects.Blob()
437 blob.set_raw_string(content)
428 blob.set_raw_string(content)
438 repo.object_store.add_object(blob)
429 repo.object_store.add_object(blob)
439 return blob.id
430 return blob.id
440
431
441 # TODO: this is quite complex, check if that can be simplified
432 # TODO: this is quite complex, check if that can be simplified
442 @reraise_safe_exceptions
433 @reraise_safe_exceptions
443 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
434 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
444 repo = self._factory.repo(wire)
435 repo = self._factory.repo(wire)
445 object_store = repo.object_store
436 object_store = repo.object_store
446
437
447 # Create tree and populates it with blobs
438 # Create tree and populates it with blobs
448 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
439 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
449
440
450 for node in updated:
441 for node in updated:
451 # Compute subdirs if needed
442 # Compute subdirs if needed
452 dirpath, nodename = vcspath.split(node['path'])
443 dirpath, nodename = vcspath.split(node['path'])
453 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
444 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
454 parent = commit_tree
445 parent = commit_tree
455 ancestors = [('', parent)]
446 ancestors = [('', parent)]
456
447
457 # Tries to dig for the deepest existing tree
448 # Tries to dig for the deepest existing tree
458 while dirnames:
449 while dirnames:
459 curdir = dirnames.pop(0)
450 curdir = dirnames.pop(0)
460 try:
451 try:
461 dir_id = parent[curdir][1]
452 dir_id = parent[curdir][1]
462 except KeyError:
453 except KeyError:
463 # put curdir back into dirnames and stops
454 # put curdir back into dirnames and stops
464 dirnames.insert(0, curdir)
455 dirnames.insert(0, curdir)
465 break
456 break
466 else:
457 else:
467 # If found, updates parent
458 # If found, updates parent
468 parent = repo[dir_id]
459 parent = repo[dir_id]
469 ancestors.append((curdir, parent))
460 ancestors.append((curdir, parent))
470 # Now parent is deepest existing tree and we need to create
461 # Now parent is deepest existing tree and we need to create
471 # subtrees for dirnames (in reverse order)
462 # subtrees for dirnames (in reverse order)
472 # [this only applies for nodes from added]
463 # [this only applies for nodes from added]
473 new_trees = []
464 new_trees = []
474
465
475 blob = objects.Blob.from_string(node['content'])
466 blob = objects.Blob.from_string(node['content'])
476
467
477 if dirnames:
468 if dirnames:
478 # If there are trees which should be created we need to build
469 # If there are trees which should be created we need to build
479 # them now (in reverse order)
470 # them now (in reverse order)
480 reversed_dirnames = list(reversed(dirnames))
471 reversed_dirnames = list(reversed(dirnames))
481 curtree = objects.Tree()
472 curtree = objects.Tree()
482 curtree[node['node_path']] = node['mode'], blob.id
473 curtree[node['node_path']] = node['mode'], blob.id
483 new_trees.append(curtree)
474 new_trees.append(curtree)
484 for dirname in reversed_dirnames[:-1]:
475 for dirname in reversed_dirnames[:-1]:
485 newtree = objects.Tree()
476 newtree = objects.Tree()
486 newtree[dirname] = (DIR_STAT, curtree.id)
477 newtree[dirname] = (DIR_STAT, curtree.id)
487 new_trees.append(newtree)
478 new_trees.append(newtree)
488 curtree = newtree
479 curtree = newtree
489 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
480 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
490 else:
481 else:
491 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
482 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
492
483
493 new_trees.append(parent)
484 new_trees.append(parent)
494 # Update ancestors
485 # Update ancestors
495 reversed_ancestors = reversed(
486 reversed_ancestors = reversed(
496 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
487 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
497 for parent, tree, path in reversed_ancestors:
488 for parent, tree, path in reversed_ancestors:
498 parent[path] = (DIR_STAT, tree.id)
489 parent[path] = (DIR_STAT, tree.id)
499 object_store.add_object(tree)
490 object_store.add_object(tree)
500
491
501 object_store.add_object(blob)
492 object_store.add_object(blob)
502 for tree in new_trees:
493 for tree in new_trees:
503 object_store.add_object(tree)
494 object_store.add_object(tree)
504
495
505 for node_path in removed:
496 for node_path in removed:
506 paths = node_path.split('/')
497 paths = node_path.split('/')
507 tree = commit_tree
498 tree = commit_tree
508 trees = [tree]
499 trees = [tree]
509 # Traverse deep into the forest...
500 # Traverse deep into the forest...
510 for path in paths:
501 for path in paths:
511 try:
502 try:
512 obj = repo[tree[path][1]]
503 obj = repo[tree[path][1]]
513 if isinstance(obj, objects.Tree):
504 if isinstance(obj, objects.Tree):
514 trees.append(obj)
505 trees.append(obj)
515 tree = obj
506 tree = obj
516 except KeyError:
507 except KeyError:
517 break
508 break
518 # Cut down the blob and all rotten trees on the way back...
509 # Cut down the blob and all rotten trees on the way back...
519 for path, tree in reversed(zip(paths, trees)):
510 for path, tree in reversed(zip(paths, trees)):
520 del tree[path]
511 del tree[path]
521 if tree:
512 if tree:
522 # This tree still has elements - don't remove it or any
513 # This tree still has elements - don't remove it or any
523 # of it's parents
514 # of it's parents
524 break
515 break
525
516
526 object_store.add_object(commit_tree)
517 object_store.add_object(commit_tree)
527
518
528 # Create commit
519 # Create commit
529 commit = objects.Commit()
520 commit = objects.Commit()
530 commit.tree = commit_tree.id
521 commit.tree = commit_tree.id
531 for k, v in commit_data.iteritems():
522 for k, v in commit_data.iteritems():
532 setattr(commit, k, v)
523 setattr(commit, k, v)
533 object_store.add_object(commit)
524 object_store.add_object(commit)
534
525
535 self.create_branch(wire, branch, commit.id)
526 self.create_branch(wire, branch, commit.id)
536
527
537 # dulwich set-ref
528 # dulwich set-ref
538 ref = 'refs/heads/%s' % branch
529 ref = 'refs/heads/%s' % branch
539 repo.refs[ref] = commit.id
530 repo.refs[ref] = commit.id
540
531
541 return commit.id
532 return commit.id
542
533
543 @reraise_safe_exceptions
534 @reraise_safe_exceptions
544 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
535 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
545 if url != 'default' and '://' not in url:
536 if url != 'default' and '://' not in url:
546 client = LocalGitClient(url)
537 client = LocalGitClient(url)
547 else:
538 else:
548 url_obj = url_parser(url)
539 url_obj = url_parser(url)
549 o = self._build_opener(url)
540 o = self._build_opener(url)
550 url, _ = url_obj.authinfo()
541 url, _ = url_obj.authinfo()
551 client = HttpGitClient(base_url=url, opener=o)
542 client = HttpGitClient(base_url=url, opener=o)
552 repo = self._factory.repo(wire)
543 repo = self._factory.repo(wire)
553
544
554 determine_wants = repo.object_store.determine_wants_all
545 determine_wants = repo.object_store.determine_wants_all
555 if refs:
546 if refs:
556 def determine_wants_requested(references):
547 def determine_wants_requested(references):
557 return [references[r] for r in references if r in refs]
548 return [references[r] for r in references if r in refs]
558 determine_wants = determine_wants_requested
549 determine_wants = determine_wants_requested
559
550
560 try:
551 try:
561 remote_refs = client.fetch(
552 remote_refs = client.fetch(
562 path=url, target=repo, determine_wants=determine_wants)
553 path=url, target=repo, determine_wants=determine_wants)
563 except NotGitRepository as e:
554 except NotGitRepository as e:
564 log.warning(
555 log.warning(
565 'Trying to fetch from "%s" failed, not a Git repository.', url)
556 'Trying to fetch from "%s" failed, not a Git repository.', url)
566 # Exception can contain unicode which we convert
557 # Exception can contain unicode which we convert
567 raise exceptions.AbortException(e)(repr(e))
558 raise exceptions.AbortException(e)(repr(e))
568
559
569 # mikhail: client.fetch() returns all the remote refs, but fetches only
560 # mikhail: client.fetch() returns all the remote refs, but fetches only
570 # refs filtered by `determine_wants` function. We need to filter result
561 # refs filtered by `determine_wants` function. We need to filter result
571 # as well
562 # as well
572 if refs:
563 if refs:
573 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
564 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
574
565
575 if apply_refs:
566 if apply_refs:
576 # TODO: johbo: Needs proper test coverage with a git repository
567 # TODO: johbo: Needs proper test coverage with a git repository
577 # that contains a tag object, so that we would end up with
568 # that contains a tag object, so that we would end up with
578 # a peeled ref at this point.
569 # a peeled ref at this point.
579 for k in remote_refs:
570 for k in remote_refs:
580 if k.endswith(PEELED_REF_MARKER):
571 if k.endswith(PEELED_REF_MARKER):
581 log.debug("Skipping peeled reference %s", k)
572 log.debug("Skipping peeled reference %s", k)
582 continue
573 continue
583 repo[k] = remote_refs[k]
574 repo[k] = remote_refs[k]
584
575
585 if refs and not update_after:
576 if refs and not update_after:
586 # mikhail: explicitly set the head to the last ref.
577 # mikhail: explicitly set the head to the last ref.
587 repo['HEAD'] = remote_refs[refs[-1]]
578 repo['HEAD'] = remote_refs[refs[-1]]
588
579
589 if update_after:
580 if update_after:
590 # we want to checkout HEAD
581 # we want to checkout HEAD
591 repo["HEAD"] = remote_refs["HEAD"]
582 repo["HEAD"] = remote_refs["HEAD"]
592 index.build_index_from_tree(repo.path, repo.index_path(),
583 index.build_index_from_tree(repo.path, repo.index_path(),
593 repo.object_store, repo["HEAD"].tree)
584 repo.object_store, repo["HEAD"].tree)
594 return remote_refs
585 return remote_refs
595
586
596 @reraise_safe_exceptions
587 @reraise_safe_exceptions
597 def sync_fetch(self, wire, url, refs=None):
588 def sync_fetch(self, wire, url, refs=None):
598 repo = self._factory.repo(wire)
589 repo = self._factory.repo(wire)
599 if refs and not isinstance(refs, (list, tuple)):
590 if refs and not isinstance(refs, (list, tuple)):
600 refs = [refs]
591 refs = [refs]
601 config = self._wire_to_config(wire)
592 config = self._wire_to_config(wire)
602 # get all remote refs we'll use to fetch later
593 # get all remote refs we'll use to fetch later
603 output, __ = self.run_git_command(
594 output, __ = self.run_git_command(
604 wire, ['ls-remote', url], fail_on_stderr=False,
595 wire, ['ls-remote', url], fail_on_stderr=False,
605 _copts=self._remote_conf(config),
596 _copts=self._remote_conf(config),
606 extra_env={'GIT_TERMINAL_PROMPT': '0'})
597 extra_env={'GIT_TERMINAL_PROMPT': '0'})
607
598
608 remote_refs = collections.OrderedDict()
599 remote_refs = collections.OrderedDict()
609 fetch_refs = []
600 fetch_refs = []
610
601
611 for ref_line in output.splitlines():
602 for ref_line in output.splitlines():
612 sha, ref = ref_line.split('\t')
603 sha, ref = ref_line.split('\t')
613 sha = sha.strip()
604 sha = sha.strip()
614 if ref in remote_refs:
605 if ref in remote_refs:
615 # duplicate, skip
606 # duplicate, skip
616 continue
607 continue
617 if ref.endswith(PEELED_REF_MARKER):
608 if ref.endswith(PEELED_REF_MARKER):
618 log.debug("Skipping peeled reference %s", ref)
609 log.debug("Skipping peeled reference %s", ref)
619 continue
610 continue
620 # don't sync HEAD
611 # don't sync HEAD
621 if ref in ['HEAD']:
612 if ref in ['HEAD']:
622 continue
613 continue
623
614
624 remote_refs[ref] = sha
615 remote_refs[ref] = sha
625
616
626 if refs and sha in refs:
617 if refs and sha in refs:
627 # we filter fetch using our specified refs
618 # we filter fetch using our specified refs
628 fetch_refs.append('{}:{}'.format(ref, ref))
619 fetch_refs.append('{}:{}'.format(ref, ref))
629 elif not refs:
620 elif not refs:
630 fetch_refs.append('{}:{}'.format(ref, ref))
621 fetch_refs.append('{}:{}'.format(ref, ref))
631 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
622 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
632 if fetch_refs:
623 if fetch_refs:
633 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
624 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
634 fetch_refs_chunks = list(chunk)
625 fetch_refs_chunks = list(chunk)
635 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
626 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
636 _out, _err = self.run_git_command(
627 _out, _err = self.run_git_command(
637 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
628 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
638 fail_on_stderr=False,
629 fail_on_stderr=False,
639 _copts=self._remote_conf(config),
630 _copts=self._remote_conf(config),
640 extra_env={'GIT_TERMINAL_PROMPT': '0'})
631 extra_env={'GIT_TERMINAL_PROMPT': '0'})
641
632
642 return remote_refs
633 return remote_refs
643
634
644 @reraise_safe_exceptions
635 @reraise_safe_exceptions
645 def sync_push(self, wire, url, refs=None):
636 def sync_push(self, wire, url, refs=None):
646 if not self.check_url(url, wire):
637 if not self.check_url(url, wire):
647 return
638 return
648 config = self._wire_to_config(wire)
639 config = self._wire_to_config(wire)
649 self._factory.repo(wire)
640 self._factory.repo(wire)
650 self.run_git_command(
641 self.run_git_command(
651 wire, ['push', url, '--mirror'], fail_on_stderr=False,
642 wire, ['push', url, '--mirror'], fail_on_stderr=False,
652 _copts=self._remote_conf(config),
643 _copts=self._remote_conf(config),
653 extra_env={'GIT_TERMINAL_PROMPT': '0'})
644 extra_env={'GIT_TERMINAL_PROMPT': '0'})
654
645
655 @reraise_safe_exceptions
646 @reraise_safe_exceptions
656 def get_remote_refs(self, wire, url):
647 def get_remote_refs(self, wire, url):
657 repo = Repo(url)
648 repo = Repo(url)
658 return repo.get_refs()
649 return repo.get_refs()
659
650
660 @reraise_safe_exceptions
651 @reraise_safe_exceptions
661 def get_description(self, wire):
652 def get_description(self, wire):
662 repo = self._factory.repo(wire)
653 repo = self._factory.repo(wire)
663 return repo.get_description()
654 return repo.get_description()
664
655
665 @reraise_safe_exceptions
656 @reraise_safe_exceptions
666 def get_missing_revs(self, wire, rev1, rev2, path2):
657 def get_missing_revs(self, wire, rev1, rev2, path2):
667 repo = self._factory.repo(wire)
658 repo = self._factory.repo(wire)
668 LocalGitClient(thin_packs=False).fetch(path2, repo)
659 LocalGitClient(thin_packs=False).fetch(path2, repo)
669
660
670 wire_remote = wire.copy()
661 wire_remote = wire.copy()
671 wire_remote['path'] = path2
662 wire_remote['path'] = path2
672 repo_remote = self._factory.repo(wire_remote)
663 repo_remote = self._factory.repo(wire_remote)
673 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
664 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
674
665
675 revs = [
666 revs = [
676 x.commit.id
667 x.commit.id
677 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
668 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
678 return revs
669 return revs
679
670
680 @reraise_safe_exceptions
671 @reraise_safe_exceptions
681 def get_object(self, wire, sha):
672 def get_object(self, wire, sha):
682 cache_on, context_uid, repo_id = self._cache_on(wire)
673 cache_on, context_uid, repo_id = self._cache_on(wire)
683 @self.region.conditional_cache_on_arguments(condition=cache_on)
674 @self.region.conditional_cache_on_arguments(condition=cache_on)
684 def _get_object(_context_uid, _repo_id, _sha):
675 def _get_object(_context_uid, _repo_id, _sha):
685 repo_init = self._factory.repo_libgit2(wire)
676 repo_init = self._factory.repo_libgit2(wire)
686 with repo_init as repo:
677 with repo_init as repo:
687
678
688 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
679 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
689 try:
680 try:
690 commit = repo.revparse_single(sha)
681 commit = repo.revparse_single(sha)
691 except (KeyError, ValueError) as e:
682 except (KeyError, ValueError) as e:
692 raise exceptions.LookupException(e)(missing_commit_err)
683 raise exceptions.LookupException(e)(missing_commit_err)
693
684
694 is_tag = False
685 is_tag = False
695 if isinstance(commit, pygit2.Tag):
686 if isinstance(commit, pygit2.Tag):
696 commit = repo.get(commit.target)
687 commit = repo.get(commit.target)
697 is_tag = True
688 is_tag = True
698
689
699 if not is_tag:
690 if not is_tag:
700 # check for dangling commit
691 # check for dangling commit
701 branches = [x for x in repo.branches.with_commit(commit.hex)]
692 branches = [x for x in repo.branches.with_commit(commit.hex)]
702 if not branches:
693 if not branches:
703 raise exceptions.LookupException(None)(missing_commit_err)
694 raise exceptions.LookupException(None)(missing_commit_err)
704
695
705 commit_id = commit.hex
696 commit_id = commit.hex
706 type_id = commit.type
697 type_id = commit.type
707
698
708 return {
699 return {
709 'id': commit_id,
700 'id': commit_id,
710 'type': self._type_id_to_name(type_id),
701 'type': self._type_id_to_name(type_id),
711 'commit_id': commit_id,
702 'commit_id': commit_id,
712 'idx': 0
703 'idx': 0
713 }
704 }
714
705
715 return _get_object(context_uid, repo_id, sha)
706 return _get_object(context_uid, repo_id, sha)
716
707
717 @reraise_safe_exceptions
708 @reraise_safe_exceptions
718 def get_refs(self, wire):
709 def get_refs(self, wire):
719 cache_on, context_uid, repo_id = self._cache_on(wire)
710 cache_on, context_uid, repo_id = self._cache_on(wire)
720 @self.region.conditional_cache_on_arguments(condition=cache_on)
711 @self.region.conditional_cache_on_arguments(condition=cache_on)
721 def _get_refs(_context_uid, _repo_id):
712 def _get_refs(_context_uid, _repo_id):
722
713
723 repo_init = self._factory.repo_libgit2(wire)
714 repo_init = self._factory.repo_libgit2(wire)
724 with repo_init as repo:
715 with repo_init as repo:
725 regex = re.compile('^refs/(heads|tags)/')
716 regex = re.compile('^refs/(heads|tags)/')
726 return {x.name: x.target.hex for x in
717 return {x.name: x.target.hex for x in
727 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
718 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
728
719
729 return _get_refs(context_uid, repo_id)
720 return _get_refs(context_uid, repo_id)
730
721
731 @reraise_safe_exceptions
722 @reraise_safe_exceptions
732 def get_branch_pointers(self, wire):
723 def get_branch_pointers(self, wire):
733 cache_on, context_uid, repo_id = self._cache_on(wire)
724 cache_on, context_uid, repo_id = self._cache_on(wire)
734 @self.region.conditional_cache_on_arguments(condition=cache_on)
725 @self.region.conditional_cache_on_arguments(condition=cache_on)
735 def _get_branch_pointers(_context_uid, _repo_id):
726 def _get_branch_pointers(_context_uid, _repo_id):
736
727
737 repo_init = self._factory.repo_libgit2(wire)
728 repo_init = self._factory.repo_libgit2(wire)
738 regex = re.compile('^refs/heads')
729 regex = re.compile('^refs/heads')
739 with repo_init as repo:
730 with repo_init as repo:
740 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
731 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
741 return {x.target.hex: x.shorthand for x in branches}
732 return {x.target.hex: x.shorthand for x in branches}
742
733
743 return _get_branch_pointers(context_uid, repo_id)
734 return _get_branch_pointers(context_uid, repo_id)
744
735
745 @reraise_safe_exceptions
736 @reraise_safe_exceptions
746 def head(self, wire, show_exc=True):
737 def head(self, wire, show_exc=True):
747 cache_on, context_uid, repo_id = self._cache_on(wire)
738 cache_on, context_uid, repo_id = self._cache_on(wire)
748 @self.region.conditional_cache_on_arguments(condition=cache_on)
739 @self.region.conditional_cache_on_arguments(condition=cache_on)
749 def _head(_context_uid, _repo_id, _show_exc):
740 def _head(_context_uid, _repo_id, _show_exc):
750 repo_init = self._factory.repo_libgit2(wire)
741 repo_init = self._factory.repo_libgit2(wire)
751 with repo_init as repo:
742 with repo_init as repo:
752 try:
743 try:
753 return repo.head.peel().hex
744 return repo.head.peel().hex
754 except Exception:
745 except Exception:
755 if show_exc:
746 if show_exc:
756 raise
747 raise
757 return _head(context_uid, repo_id, show_exc)
748 return _head(context_uid, repo_id, show_exc)
758
749
759 @reraise_safe_exceptions
750 @reraise_safe_exceptions
760 def init(self, wire):
751 def init(self, wire):
761 repo_path = str_to_dulwich(wire['path'])
752 repo_path = str_to_dulwich(wire['path'])
762 self.repo = Repo.init(repo_path)
753 self.repo = Repo.init(repo_path)
763
754
764 @reraise_safe_exceptions
755 @reraise_safe_exceptions
765 def init_bare(self, wire):
756 def init_bare(self, wire):
766 repo_path = str_to_dulwich(wire['path'])
757 repo_path = str_to_dulwich(wire['path'])
767 self.repo = Repo.init_bare(repo_path)
758 self.repo = Repo.init_bare(repo_path)
768
759
769 @reraise_safe_exceptions
760 @reraise_safe_exceptions
770 def revision(self, wire, rev):
761 def revision(self, wire, rev):
771
762
772 cache_on, context_uid, repo_id = self._cache_on(wire)
763 cache_on, context_uid, repo_id = self._cache_on(wire)
773 @self.region.conditional_cache_on_arguments(condition=cache_on)
764 @self.region.conditional_cache_on_arguments(condition=cache_on)
774 def _revision(_context_uid, _repo_id, _rev):
765 def _revision(_context_uid, _repo_id, _rev):
775 repo_init = self._factory.repo_libgit2(wire)
766 repo_init = self._factory.repo_libgit2(wire)
776 with repo_init as repo:
767 with repo_init as repo:
777 commit = repo[rev]
768 commit = repo[rev]
778 obj_data = {
769 obj_data = {
779 'id': commit.id.hex,
770 'id': commit.id.hex,
780 }
771 }
781 # tree objects itself don't have tree_id attribute
772 # tree objects itself don't have tree_id attribute
782 if hasattr(commit, 'tree_id'):
773 if hasattr(commit, 'tree_id'):
783 obj_data['tree'] = commit.tree_id.hex
774 obj_data['tree'] = commit.tree_id.hex
784
775
785 return obj_data
776 return obj_data
786 return _revision(context_uid, repo_id, rev)
777 return _revision(context_uid, repo_id, rev)
787
778
788 @reraise_safe_exceptions
779 @reraise_safe_exceptions
789 def date(self, wire, commit_id):
780 def date(self, wire, commit_id):
790 cache_on, context_uid, repo_id = self._cache_on(wire)
781 cache_on, context_uid, repo_id = self._cache_on(wire)
791 @self.region.conditional_cache_on_arguments(condition=cache_on)
782 @self.region.conditional_cache_on_arguments(condition=cache_on)
792 def _date(_repo_id, _commit_id):
783 def _date(_repo_id, _commit_id):
793 repo_init = self._factory.repo_libgit2(wire)
784 repo_init = self._factory.repo_libgit2(wire)
794 with repo_init as repo:
785 with repo_init as repo:
795 commit = repo[commit_id]
786 commit = repo[commit_id]
796 # TODO(marcink): check dulwich difference of offset vs timezone
787 # TODO(marcink): check dulwich difference of offset vs timezone
797 return [commit.commit_time, commit.commit_time_offset]
788 return [commit.commit_time, commit.commit_time_offset]
798 return _date(repo_id, commit_id)
789 return _date(repo_id, commit_id)
799
790
800 @reraise_safe_exceptions
791 @reraise_safe_exceptions
801 def author(self, wire, commit_id):
792 def author(self, wire, commit_id):
802 cache_on, context_uid, repo_id = self._cache_on(wire)
793 cache_on, context_uid, repo_id = self._cache_on(wire)
803 @self.region.conditional_cache_on_arguments(condition=cache_on)
794 @self.region.conditional_cache_on_arguments(condition=cache_on)
804 def _author(_repo_id, _commit_id):
795 def _author(_repo_id, _commit_id):
805 repo_init = self._factory.repo_libgit2(wire)
796 repo_init = self._factory.repo_libgit2(wire)
806 with repo_init as repo:
797 with repo_init as repo:
807 commit = repo[commit_id]
798 commit = repo[commit_id]
808 if commit.author.email:
799 if commit.author.email:
809 return u"{} <{}>".format(commit.author.name, commit.author.email)
800 return u"{} <{}>".format(commit.author.name, commit.author.email)
810
801
811 return u"{}".format(commit.author.raw_name)
802 return u"{}".format(commit.author.raw_name)
812 return _author(repo_id, commit_id)
803 return _author(repo_id, commit_id)
813
804
814 @reraise_safe_exceptions
805 @reraise_safe_exceptions
815 def message(self, wire, commit_id):
806 def message(self, wire, commit_id):
816 cache_on, context_uid, repo_id = self._cache_on(wire)
807 cache_on, context_uid, repo_id = self._cache_on(wire)
817 @self.region.conditional_cache_on_arguments(condition=cache_on)
808 @self.region.conditional_cache_on_arguments(condition=cache_on)
818 def _message(_repo_id, _commit_id):
809 def _message(_repo_id, _commit_id):
819 repo_init = self._factory.repo_libgit2(wire)
810 repo_init = self._factory.repo_libgit2(wire)
820 with repo_init as repo:
811 with repo_init as repo:
821 commit = repo[commit_id]
812 commit = repo[commit_id]
822 return commit.message
813 return commit.message
823 return _message(repo_id, commit_id)
814 return _message(repo_id, commit_id)
824
815
825 @reraise_safe_exceptions
816 @reraise_safe_exceptions
826 def parents(self, wire, commit_id):
817 def parents(self, wire, commit_id):
827 cache_on, context_uid, repo_id = self._cache_on(wire)
818 cache_on, context_uid, repo_id = self._cache_on(wire)
828 @self.region.conditional_cache_on_arguments(condition=cache_on)
819 @self.region.conditional_cache_on_arguments(condition=cache_on)
829 def _parents(_repo_id, _commit_id):
820 def _parents(_repo_id, _commit_id):
830 repo_init = self._factory.repo_libgit2(wire)
821 repo_init = self._factory.repo_libgit2(wire)
831 with repo_init as repo:
822 with repo_init as repo:
832 commit = repo[commit_id]
823 commit = repo[commit_id]
833 return [x.hex for x in commit.parent_ids]
824 return [x.hex for x in commit.parent_ids]
834 return _parents(repo_id, commit_id)
825 return _parents(repo_id, commit_id)
835
826
836 @reraise_safe_exceptions
827 @reraise_safe_exceptions
837 def children(self, wire, commit_id):
828 def children(self, wire, commit_id):
838 cache_on, context_uid, repo_id = self._cache_on(wire)
829 cache_on, context_uid, repo_id = self._cache_on(wire)
839 @self.region.conditional_cache_on_arguments(condition=cache_on)
830 @self.region.conditional_cache_on_arguments(condition=cache_on)
840 def _children(_repo_id, _commit_id):
831 def _children(_repo_id, _commit_id):
841 output, __ = self.run_git_command(
832 output, __ = self.run_git_command(
842 wire, ['rev-list', '--all', '--children'])
833 wire, ['rev-list', '--all', '--children'])
843
834
844 child_ids = []
835 child_ids = []
845 pat = re.compile(r'^%s' % commit_id)
836 pat = re.compile(r'^%s' % commit_id)
846 for l in output.splitlines():
837 for l in output.splitlines():
847 if pat.match(l):
838 if pat.match(l):
848 found_ids = l.split(' ')[1:]
839 found_ids = l.split(' ')[1:]
849 child_ids.extend(found_ids)
840 child_ids.extend(found_ids)
850
841
851 return child_ids
842 return child_ids
852 return _children(repo_id, commit_id)
843 return _children(repo_id, commit_id)
853
844
854 @reraise_safe_exceptions
845 @reraise_safe_exceptions
855 def set_refs(self, wire, key, value):
846 def set_refs(self, wire, key, value):
856 repo_init = self._factory.repo_libgit2(wire)
847 repo_init = self._factory.repo_libgit2(wire)
857 with repo_init as repo:
848 with repo_init as repo:
858 repo.references.create(key, value, force=True)
849 repo.references.create(key, value, force=True)
859
850
860 @reraise_safe_exceptions
851 @reraise_safe_exceptions
861 def create_branch(self, wire, branch_name, commit_id, force=False):
852 def create_branch(self, wire, branch_name, commit_id, force=False):
862 repo_init = self._factory.repo_libgit2(wire)
853 repo_init = self._factory.repo_libgit2(wire)
863 with repo_init as repo:
854 with repo_init as repo:
864 commit = repo[commit_id]
855 commit = repo[commit_id]
865
856
866 if force:
857 if force:
867 repo.branches.local.create(branch_name, commit, force=force)
858 repo.branches.local.create(branch_name, commit, force=force)
868 elif not repo.branches.get(branch_name):
859 elif not repo.branches.get(branch_name):
869 # create only if that branch isn't existing
860 # create only if that branch isn't existing
870 repo.branches.local.create(branch_name, commit, force=force)
861 repo.branches.local.create(branch_name, commit, force=force)
871
862
872 @reraise_safe_exceptions
863 @reraise_safe_exceptions
873 def remove_ref(self, wire, key):
864 def remove_ref(self, wire, key):
874 repo_init = self._factory.repo_libgit2(wire)
865 repo_init = self._factory.repo_libgit2(wire)
875 with repo_init as repo:
866 with repo_init as repo:
876 repo.references.delete(key)
867 repo.references.delete(key)
877
868
878 @reraise_safe_exceptions
869 @reraise_safe_exceptions
879 def tag_remove(self, wire, tag_name):
870 def tag_remove(self, wire, tag_name):
880 repo_init = self._factory.repo_libgit2(wire)
871 repo_init = self._factory.repo_libgit2(wire)
881 with repo_init as repo:
872 with repo_init as repo:
882 key = 'refs/tags/{}'.format(tag_name)
873 key = 'refs/tags/{}'.format(tag_name)
883 repo.references.delete(key)
874 repo.references.delete(key)
884
875
885 @reraise_safe_exceptions
876 @reraise_safe_exceptions
886 def tree_changes(self, wire, source_id, target_id):
877 def tree_changes(self, wire, source_id, target_id):
887 # TODO(marcink): remove this seems it's only used by tests
878 # TODO(marcink): remove this seems it's only used by tests
888 repo = self._factory.repo(wire)
879 repo = self._factory.repo(wire)
889 source = repo[source_id].tree if source_id else None
880 source = repo[source_id].tree if source_id else None
890 target = repo[target_id].tree
881 target = repo[target_id].tree
891 result = repo.object_store.tree_changes(source, target)
882 result = repo.object_store.tree_changes(source, target)
892 return list(result)
883 return list(result)
893
884
894 @reraise_safe_exceptions
885 @reraise_safe_exceptions
895 def tree_and_type_for_path(self, wire, commit_id, path):
886 def tree_and_type_for_path(self, wire, commit_id, path):
896
887
897 cache_on, context_uid, repo_id = self._cache_on(wire)
888 cache_on, context_uid, repo_id = self._cache_on(wire)
898 @self.region.conditional_cache_on_arguments(condition=cache_on)
889 @self.region.conditional_cache_on_arguments(condition=cache_on)
899 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
890 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
900 repo_init = self._factory.repo_libgit2(wire)
891 repo_init = self._factory.repo_libgit2(wire)
901
892
902 with repo_init as repo:
893 with repo_init as repo:
903 commit = repo[commit_id]
894 commit = repo[commit_id]
904 try:
895 try:
905 tree = commit.tree[path]
896 tree = commit.tree[path]
906 except KeyError:
897 except KeyError:
907 return None, None, None
898 return None, None, None
908
899
909 return tree.id.hex, tree.type, tree.filemode
900 return tree.id.hex, tree.type, tree.filemode
910 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
901 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
911
902
912 @reraise_safe_exceptions
903 @reraise_safe_exceptions
913 def tree_items(self, wire, tree_id):
904 def tree_items(self, wire, tree_id):
914 cache_on, context_uid, repo_id = self._cache_on(wire)
905 cache_on, context_uid, repo_id = self._cache_on(wire)
915 @self.region.conditional_cache_on_arguments(condition=cache_on)
906 @self.region.conditional_cache_on_arguments(condition=cache_on)
916 def _tree_items(_repo_id, _tree_id):
907 def _tree_items(_repo_id, _tree_id):
917
908
918 repo_init = self._factory.repo_libgit2(wire)
909 repo_init = self._factory.repo_libgit2(wire)
919 with repo_init as repo:
910 with repo_init as repo:
920 try:
911 try:
921 tree = repo[tree_id]
912 tree = repo[tree_id]
922 except KeyError:
913 except KeyError:
923 raise ObjectMissing('No tree with id: {}'.format(tree_id))
914 raise ObjectMissing('No tree with id: {}'.format(tree_id))
924
915
925 result = []
916 result = []
926 for item in tree:
917 for item in tree:
927 item_sha = item.hex
918 item_sha = item.hex
928 item_mode = item.filemode
919 item_mode = item.filemode
929 item_type = item.type
920 item_type = item.type
930
921
931 if item_type == 'commit':
922 if item_type == 'commit':
932 # NOTE(marcink): submodules we translate to 'link' for backward compat
923 # NOTE(marcink): submodules we translate to 'link' for backward compat
933 item_type = 'link'
924 item_type = 'link'
934
925
935 result.append((item.name, item_mode, item_sha, item_type))
926 result.append((item.name, item_mode, item_sha, item_type))
936 return result
927 return result
937 return _tree_items(repo_id, tree_id)
928 return _tree_items(repo_id, tree_id)
938
929
939 @reraise_safe_exceptions
930 @reraise_safe_exceptions
940 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
931 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
941
932
942 flags = [
933 flags = [
943 '-U%s' % context, '--full-index', '--binary', '-p',
934 '-U%s' % context, '--full-index', '--binary', '-p',
944 '-M', '--abbrev=40']
935 '-M', '--abbrev=40']
945
936
946 if opt_ignorews:
937 if opt_ignorews:
947 flags.append('-w')
938 flags.append('-w')
948
939
949 if commit_id_1 == self.EMPTY_COMMIT:
940 if commit_id_1 == self.EMPTY_COMMIT:
950 cmd = ['show'] + flags + [commit_id_2]
941 cmd = ['show'] + flags + [commit_id_2]
951 else:
942 else:
952 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
943 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
953
944
954 if file_filter:
945 if file_filter:
955 cmd.extend(['--', file_filter])
946 cmd.extend(['--', file_filter])
956
947
957 diff, __ = self.run_git_command(wire, cmd)
948 diff, __ = self.run_git_command(wire, cmd)
958 # If we used 'show' command, strip first few lines (until actual diff
949 # If we used 'show' command, strip first few lines (until actual diff
959 # starts)
950 # starts)
960 if commit_id_1 == self.EMPTY_COMMIT:
951 if commit_id_1 == self.EMPTY_COMMIT:
961 lines = diff.splitlines()
952 lines = diff.splitlines()
962 x = 0
953 x = 0
963 for line in lines:
954 for line in lines:
964 if line.startswith('diff'):
955 if line.startswith('diff'):
965 break
956 break
966 x += 1
957 x += 1
967 # Append new line just like 'diff' command do
958 # Append new line just like 'diff' command do
968 diff = '\n'.join(lines[x:]) + '\n'
959 diff = '\n'.join(lines[x:]) + '\n'
969 return diff
960 return diff
970
961
971 @reraise_safe_exceptions
962 @reraise_safe_exceptions
972 def node_history(self, wire, commit_id, path, limit):
963 def node_history(self, wire, commit_id, path, limit):
973 cache_on, context_uid, repo_id = self._cache_on(wire)
964 cache_on, context_uid, repo_id = self._cache_on(wire)
974 @self.region.conditional_cache_on_arguments(condition=cache_on)
965 @self.region.conditional_cache_on_arguments(condition=cache_on)
975 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
966 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
976 # optimize for n==1, rev-list is much faster for that use-case
967 # optimize for n==1, rev-list is much faster for that use-case
977 if limit == 1:
968 if limit == 1:
978 cmd = ['rev-list', '-1', commit_id, '--', path]
969 cmd = ['rev-list', '-1', commit_id, '--', path]
979 else:
970 else:
980 cmd = ['log']
971 cmd = ['log']
981 if limit:
972 if limit:
982 cmd.extend(['-n', str(safe_int(limit, 0))])
973 cmd.extend(['-n', str(safe_int(limit, 0))])
983 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
974 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
984
975
985 output, __ = self.run_git_command(wire, cmd)
976 output, __ = self.run_git_command(wire, cmd)
986 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
977 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
987
978
988 return [x for x in commit_ids]
979 return [x for x in commit_ids]
989 return _node_history(context_uid, repo_id, commit_id, path, limit)
980 return _node_history(context_uid, repo_id, commit_id, path, limit)
990
981
991 @reraise_safe_exceptions
982 @reraise_safe_exceptions
992 def node_annotate(self, wire, commit_id, path):
983 def node_annotate(self, wire, commit_id, path):
993
984
994 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
985 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
995 # -l ==> outputs long shas (and we need all 40 characters)
986 # -l ==> outputs long shas (and we need all 40 characters)
996 # --root ==> doesn't put '^' character for boundaries
987 # --root ==> doesn't put '^' character for boundaries
997 # -r commit_id ==> blames for the given commit
988 # -r commit_id ==> blames for the given commit
998 output, __ = self.run_git_command(wire, cmd)
989 output, __ = self.run_git_command(wire, cmd)
999
990
1000 result = []
991 result = []
1001 for i, blame_line in enumerate(output.split('\n')[:-1]):
992 for i, blame_line in enumerate(output.split('\n')[:-1]):
1002 line_no = i + 1
993 line_no = i + 1
1003 commit_id, line = re.split(r' ', blame_line, 1)
994 commit_id, line = re.split(r' ', blame_line, 1)
1004 result.append((line_no, commit_id, line))
995 result.append((line_no, commit_id, line))
1005 return result
996 return result
1006
997
1007 @reraise_safe_exceptions
998 @reraise_safe_exceptions
1008 def update_server_info(self, wire):
999 def update_server_info(self, wire):
1009 repo = self._factory.repo(wire)
1000 repo = self._factory.repo(wire)
1010 update_server_info(repo)
1001 update_server_info(repo)
1011
1002
1012 @reraise_safe_exceptions
1003 @reraise_safe_exceptions
1013 def get_all_commit_ids(self, wire):
1004 def get_all_commit_ids(self, wire):
1014
1005
1015 cache_on, context_uid, repo_id = self._cache_on(wire)
1006 cache_on, context_uid, repo_id = self._cache_on(wire)
1016 @self.region.conditional_cache_on_arguments(condition=cache_on)
1007 @self.region.conditional_cache_on_arguments(condition=cache_on)
1017 def _get_all_commit_ids(_context_uid, _repo_id):
1008 def _get_all_commit_ids(_context_uid, _repo_id):
1018
1009
1019 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1010 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1020 try:
1011 try:
1021 output, __ = self.run_git_command(wire, cmd)
1012 output, __ = self.run_git_command(wire, cmd)
1022 return output.splitlines()
1013 return output.splitlines()
1023 except Exception:
1014 except Exception:
1024 # Can be raised for empty repositories
1015 # Can be raised for empty repositories
1025 return []
1016 return []
1026 return _get_all_commit_ids(context_uid, repo_id)
1017 return _get_all_commit_ids(context_uid, repo_id)
1027
1018
1028 @reraise_safe_exceptions
1019 @reraise_safe_exceptions
1029 def run_git_command(self, wire, cmd, **opts):
1020 def run_git_command(self, wire, cmd, **opts):
1030 path = wire.get('path', None)
1021 path = wire.get('path', None)
1031
1022
1032 if path and os.path.isdir(path):
1023 if path and os.path.isdir(path):
1033 opts['cwd'] = path
1024 opts['cwd'] = path
1034
1025
1035 if '_bare' in opts:
1026 if '_bare' in opts:
1036 _copts = []
1027 _copts = []
1037 del opts['_bare']
1028 del opts['_bare']
1038 else:
1029 else:
1039 _copts = ['-c', 'core.quotepath=false', ]
1030 _copts = ['-c', 'core.quotepath=false', ]
1040 safe_call = False
1031 safe_call = False
1041 if '_safe' in opts:
1032 if '_safe' in opts:
1042 # no exc on failure
1033 # no exc on failure
1043 del opts['_safe']
1034 del opts['_safe']
1044 safe_call = True
1035 safe_call = True
1045
1036
1046 if '_copts' in opts:
1037 if '_copts' in opts:
1047 _copts.extend(opts['_copts'] or [])
1038 _copts.extend(opts['_copts'] or [])
1048 del opts['_copts']
1039 del opts['_copts']
1049
1040
1050 gitenv = os.environ.copy()
1041 gitenv = os.environ.copy()
1051 gitenv.update(opts.pop('extra_env', {}))
1042 gitenv.update(opts.pop('extra_env', {}))
1052 # need to clean fix GIT_DIR !
1043 # need to clean fix GIT_DIR !
1053 if 'GIT_DIR' in gitenv:
1044 if 'GIT_DIR' in gitenv:
1054 del gitenv['GIT_DIR']
1045 del gitenv['GIT_DIR']
1055 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1046 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1056 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1047 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1057
1048
1058 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1049 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1059 _opts = {'env': gitenv, 'shell': False}
1050 _opts = {'env': gitenv, 'shell': False}
1060
1051
1061 try:
1052 try:
1062 _opts.update(opts)
1053 _opts.update(opts)
1063 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
1054 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
1064
1055
1065 return ''.join(p), ''.join(p.error)
1056 return ''.join(p), ''.join(p.error)
1066 except (EnvironmentError, OSError) as err:
1057 except (EnvironmentError, OSError) as err:
1067 cmd = ' '.join(cmd) # human friendly CMD
1058 cmd = ' '.join(cmd) # human friendly CMD
1068 tb_err = ("Couldn't run git command (%s).\n"
1059 tb_err = ("Couldn't run git command (%s).\n"
1069 "Original error was:%s\n"
1060 "Original error was:%s\n"
1070 "Call options:%s\n"
1061 "Call options:%s\n"
1071 % (cmd, err, _opts))
1062 % (cmd, err, _opts))
1072 log.exception(tb_err)
1063 log.exception(tb_err)
1073 if safe_call:
1064 if safe_call:
1074 return '', err
1065 return '', err
1075 else:
1066 else:
1076 raise exceptions.VcsException()(tb_err)
1067 raise exceptions.VcsException()(tb_err)
1077
1068
1078 @reraise_safe_exceptions
1069 @reraise_safe_exceptions
1079 def install_hooks(self, wire, force=False):
1070 def install_hooks(self, wire, force=False):
1080 from vcsserver.hook_utils import install_git_hooks
1071 from vcsserver.hook_utils import install_git_hooks
1081 bare = self.bare(wire)
1072 bare = self.bare(wire)
1082 path = wire['path']
1073 path = wire['path']
1083 return install_git_hooks(path, bare, force_create=force)
1074 return install_git_hooks(path, bare, force_create=force)
1084
1075
1085 @reraise_safe_exceptions
1076 @reraise_safe_exceptions
1086 def get_hooks_info(self, wire):
1077 def get_hooks_info(self, wire):
1087 from vcsserver.hook_utils import (
1078 from vcsserver.hook_utils import (
1088 get_git_pre_hook_version, get_git_post_hook_version)
1079 get_git_pre_hook_version, get_git_post_hook_version)
1089 bare = self.bare(wire)
1080 bare = self.bare(wire)
1090 path = wire['path']
1081 path = wire['path']
1091 return {
1082 return {
1092 'pre_version': get_git_pre_hook_version(path, bare),
1083 'pre_version': get_git_pre_hook_version(path, bare),
1093 'post_version': get_git_post_hook_version(path, bare),
1084 'post_version': get_git_post_hook_version(path, bare),
1094 }
1085 }
@@ -1,946 +1,939 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23 import traceback
23 import traceback
24
24
25 from hgext import largefiles, rebase
25 from hgext import largefiles, rebase
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29 from mercurial import verify
29 from mercurial import verify
30
30
31 import vcsserver
31 import vcsserver
32 from vcsserver import exceptions
32 from vcsserver import exceptions
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
33 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 from vcsserver.hgcompat import (
34 from vcsserver.hgcompat import (
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
35 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
36 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
37 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
38 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 RepoLookupError, InterventionRequired, RequirementError)
39 RepoLookupError, InterventionRequired, RequirementError)
40 from vcsserver.vcs_base import RemoteBase
40
41
41 log = logging.getLogger(__name__)
42 log = logging.getLogger(__name__)
42
43
43
44
44 def make_ui_from_config(repo_config):
45 def make_ui_from_config(repo_config):
45
46
46 class LoggingUI(ui.ui):
47 class LoggingUI(ui.ui):
47 def status(self, *msg, **opts):
48 def status(self, *msg, **opts):
48 log.info(' '.join(msg).rstrip('\n'))
49 log.info(' '.join(msg).rstrip('\n'))
49 super(LoggingUI, self).status(*msg, **opts)
50 super(LoggingUI, self).status(*msg, **opts)
50
51
51 def warn(self, *msg, **opts):
52 def warn(self, *msg, **opts):
52 log.warn(' '.join(msg).rstrip('\n'))
53 log.warn(' '.join(msg).rstrip('\n'))
53 super(LoggingUI, self).warn(*msg, **opts)
54 super(LoggingUI, self).warn(*msg, **opts)
54
55
55 def error(self, *msg, **opts):
56 def error(self, *msg, **opts):
56 log.error(' '.join(msg).rstrip('\n'))
57 log.error(' '.join(msg).rstrip('\n'))
57 super(LoggingUI, self).error(*msg, **opts)
58 super(LoggingUI, self).error(*msg, **opts)
58
59
59 def note(self, *msg, **opts):
60 def note(self, *msg, **opts):
60 log.info(' '.join(msg).rstrip('\n'))
61 log.info(' '.join(msg).rstrip('\n'))
61 super(LoggingUI, self).note(*msg, **opts)
62 super(LoggingUI, self).note(*msg, **opts)
62
63
63 def debug(self, *msg, **opts):
64 def debug(self, *msg, **opts):
64 log.debug(' '.join(msg).rstrip('\n'))
65 log.debug(' '.join(msg).rstrip('\n'))
65 super(LoggingUI, self).debug(*msg, **opts)
66 super(LoggingUI, self).debug(*msg, **opts)
66
67
67 baseui = LoggingUI()
68 baseui = LoggingUI()
68
69
69 # clean the baseui object
70 # clean the baseui object
70 baseui._ocfg = hgconfig.config()
71 baseui._ocfg = hgconfig.config()
71 baseui._ucfg = hgconfig.config()
72 baseui._ucfg = hgconfig.config()
72 baseui._tcfg = hgconfig.config()
73 baseui._tcfg = hgconfig.config()
73
74
74 for section, option, value in repo_config:
75 for section, option, value in repo_config:
75 baseui.setconfig(section, option, value)
76 baseui.setconfig(section, option, value)
76
77
77 # make our hgweb quiet so it doesn't print output
78 # make our hgweb quiet so it doesn't print output
78 baseui.setconfig('ui', 'quiet', 'true')
79 baseui.setconfig('ui', 'quiet', 'true')
79
80
80 baseui.setconfig('ui', 'paginate', 'never')
81 baseui.setconfig('ui', 'paginate', 'never')
81 # for better Error reporting of Mercurial
82 # for better Error reporting of Mercurial
82 baseui.setconfig('ui', 'message-output', 'stderr')
83 baseui.setconfig('ui', 'message-output', 'stderr')
83
84
84 # force mercurial to only use 1 thread, otherwise it may try to set a
85 # force mercurial to only use 1 thread, otherwise it may try to set a
85 # signal in a non-main thread, thus generating a ValueError.
86 # signal in a non-main thread, thus generating a ValueError.
86 baseui.setconfig('worker', 'numcpus', 1)
87 baseui.setconfig('worker', 'numcpus', 1)
87
88
88 # If there is no config for the largefiles extension, we explicitly disable
89 # If there is no config for the largefiles extension, we explicitly disable
89 # it here. This overrides settings from repositories hgrc file. Recent
90 # it here. This overrides settings from repositories hgrc file. Recent
90 # mercurial versions enable largefiles in hgrc on clone from largefile
91 # mercurial versions enable largefiles in hgrc on clone from largefile
91 # repo.
92 # repo.
92 if not baseui.hasconfig('extensions', 'largefiles'):
93 if not baseui.hasconfig('extensions', 'largefiles'):
93 log.debug('Explicitly disable largefiles extension for repo.')
94 log.debug('Explicitly disable largefiles extension for repo.')
94 baseui.setconfig('extensions', 'largefiles', '!')
95 baseui.setconfig('extensions', 'largefiles', '!')
95
96
96 return baseui
97 return baseui
97
98
98
99
99 def reraise_safe_exceptions(func):
100 def reraise_safe_exceptions(func):
100 """Decorator for converting mercurial exceptions to something neutral."""
101 """Decorator for converting mercurial exceptions to something neutral."""
101
102
102 def wrapper(*args, **kwargs):
103 def wrapper(*args, **kwargs):
103 try:
104 try:
104 return func(*args, **kwargs)
105 return func(*args, **kwargs)
105 except (Abort, InterventionRequired) as e:
106 except (Abort, InterventionRequired) as e:
106 raise_from_original(exceptions.AbortException(e))
107 raise_from_original(exceptions.AbortException(e))
107 except RepoLookupError as e:
108 except RepoLookupError as e:
108 raise_from_original(exceptions.LookupException(e))
109 raise_from_original(exceptions.LookupException(e))
109 except RequirementError as e:
110 except RequirementError as e:
110 raise_from_original(exceptions.RequirementException(e))
111 raise_from_original(exceptions.RequirementException(e))
111 except RepoError as e:
112 except RepoError as e:
112 raise_from_original(exceptions.VcsException(e))
113 raise_from_original(exceptions.VcsException(e))
113 except LookupError as e:
114 except LookupError as e:
114 raise_from_original(exceptions.LookupException(e))
115 raise_from_original(exceptions.LookupException(e))
115 except Exception as e:
116 except Exception as e:
116 if not hasattr(e, '_vcs_kind'):
117 if not hasattr(e, '_vcs_kind'):
117 log.exception("Unhandled exception in hg remote call")
118 log.exception("Unhandled exception in hg remote call")
118 raise_from_original(exceptions.UnhandledException(e))
119 raise_from_original(exceptions.UnhandledException(e))
119
120
120 raise
121 raise
121 return wrapper
122 return wrapper
122
123
123
124
124 class MercurialFactory(RepoFactory):
125 class MercurialFactory(RepoFactory):
125 repo_type = 'hg'
126 repo_type = 'hg'
126
127
127 def _create_config(self, config, hooks=True):
128 def _create_config(self, config, hooks=True):
128 if not hooks:
129 if not hooks:
129 hooks_to_clean = frozenset((
130 hooks_to_clean = frozenset((
130 'changegroup.repo_size', 'preoutgoing.pre_pull',
131 'changegroup.repo_size', 'preoutgoing.pre_pull',
131 'outgoing.pull_logger', 'prechangegroup.pre_push'))
132 'outgoing.pull_logger', 'prechangegroup.pre_push'))
132 new_config = []
133 new_config = []
133 for section, option, value in config:
134 for section, option, value in config:
134 if section == 'hooks' and option in hooks_to_clean:
135 if section == 'hooks' and option in hooks_to_clean:
135 continue
136 continue
136 new_config.append((section, option, value))
137 new_config.append((section, option, value))
137 config = new_config
138 config = new_config
138
139
139 baseui = make_ui_from_config(config)
140 baseui = make_ui_from_config(config)
140 return baseui
141 return baseui
141
142
142 def _create_repo(self, wire, create):
143 def _create_repo(self, wire, create):
143 baseui = self._create_config(wire["config"])
144 baseui = self._create_config(wire["config"])
144 return instance(baseui, wire["path"], create)
145 return instance(baseui, wire["path"], create)
145
146
146 def repo(self, wire, create=False):
147 def repo(self, wire, create=False):
147 """
148 """
148 Get a repository instance for the given path.
149 Get a repository instance for the given path.
149 """
150 """
150 return self._create_repo(wire, create)
151 return self._create_repo(wire, create)
151
152
152
153
153 class HgRemote(object):
154 class HgRemote(RemoteBase):
154
155
155 def __init__(self, factory):
156 def __init__(self, factory):
156 self._factory = factory
157 self._factory = factory
157 self._bulk_methods = {
158 self._bulk_methods = {
158 "affected_files": self.ctx_files,
159 "affected_files": self.ctx_files,
159 "author": self.ctx_user,
160 "author": self.ctx_user,
160 "branch": self.ctx_branch,
161 "branch": self.ctx_branch,
161 "children": self.ctx_children,
162 "children": self.ctx_children,
162 "date": self.ctx_date,
163 "date": self.ctx_date,
163 "message": self.ctx_description,
164 "message": self.ctx_description,
164 "parents": self.ctx_parents,
165 "parents": self.ctx_parents,
165 "status": self.ctx_status,
166 "status": self.ctx_status,
166 "obsolete": self.ctx_obsolete,
167 "obsolete": self.ctx_obsolete,
167 "phase": self.ctx_phase,
168 "phase": self.ctx_phase,
168 "hidden": self.ctx_hidden,
169 "hidden": self.ctx_hidden,
169 "_file_paths": self.ctx_list,
170 "_file_paths": self.ctx_list,
170 }
171 }
171 self.region = self._factory._cache_region
172 self.region = self._factory._cache_region
172
173
173 def _get_ctx(self, repo, ref):
174 def _get_ctx(self, repo, ref):
174 return get_ctx(repo, ref)
175 return get_ctx(repo, ref)
175
176
176 def _cache_on(self, wire):
177 context = wire.get('context', '')
178 context_uid = '{}'.format(context)
179 repo_id = wire.get('repo_id', '')
180 cache = wire.get('cache', True)
181 cache_on = context and cache
182 return cache_on, context_uid, repo_id
183
184 @reraise_safe_exceptions
177 @reraise_safe_exceptions
185 def discover_hg_version(self):
178 def discover_hg_version(self):
186 from mercurial import util
179 from mercurial import util
187 return util.version()
180 return util.version()
188
181
189 @reraise_safe_exceptions
182 @reraise_safe_exceptions
190 def is_empty(self, wire):
183 def is_empty(self, wire):
191 repo = self._factory.repo(wire)
184 repo = self._factory.repo(wire)
192
185
193 try:
186 try:
194 return len(repo) == 0
187 return len(repo) == 0
195 except Exception:
188 except Exception:
196 log.exception("failed to read object_store")
189 log.exception("failed to read object_store")
197 return False
190 return False
198
191
199 @reraise_safe_exceptions
192 @reraise_safe_exceptions
200 def archive_repo(self, archive_path, mtime, file_info, kind):
193 def archive_repo(self, archive_path, mtime, file_info, kind):
201 if kind == "tgz":
194 if kind == "tgz":
202 archiver = archival.tarit(archive_path, mtime, "gz")
195 archiver = archival.tarit(archive_path, mtime, "gz")
203 elif kind == "tbz2":
196 elif kind == "tbz2":
204 archiver = archival.tarit(archive_path, mtime, "bz2")
197 archiver = archival.tarit(archive_path, mtime, "bz2")
205 elif kind == 'zip':
198 elif kind == 'zip':
206 archiver = archival.zipit(archive_path, mtime)
199 archiver = archival.zipit(archive_path, mtime)
207 else:
200 else:
208 raise exceptions.ArchiveException()(
201 raise exceptions.ArchiveException()(
209 'Remote does not support: "%s".' % kind)
202 'Remote does not support: "%s".' % kind)
210
203
211 for f_path, f_mode, f_is_link, f_content in file_info:
204 for f_path, f_mode, f_is_link, f_content in file_info:
212 archiver.addfile(f_path, f_mode, f_is_link, f_content)
205 archiver.addfile(f_path, f_mode, f_is_link, f_content)
213 archiver.done()
206 archiver.done()
214
207
215 @reraise_safe_exceptions
208 @reraise_safe_exceptions
216 def bookmarks(self, wire):
209 def bookmarks(self, wire):
217 cache_on, context_uid, repo_id = self._cache_on(wire)
210 cache_on, context_uid, repo_id = self._cache_on(wire)
218 @self.region.conditional_cache_on_arguments(condition=cache_on)
211 @self.region.conditional_cache_on_arguments(condition=cache_on)
219 def _bookmarks(_context_uid, _repo_id):
212 def _bookmarks(_context_uid, _repo_id):
220 repo = self._factory.repo(wire)
213 repo = self._factory.repo(wire)
221 return dict(repo._bookmarks)
214 return dict(repo._bookmarks)
222
215
223 return _bookmarks(context_uid, repo_id)
216 return _bookmarks(context_uid, repo_id)
224
217
225 @reraise_safe_exceptions
218 @reraise_safe_exceptions
226 def branches(self, wire, normal, closed):
219 def branches(self, wire, normal, closed):
227 cache_on, context_uid, repo_id = self._cache_on(wire)
220 cache_on, context_uid, repo_id = self._cache_on(wire)
228 @self.region.conditional_cache_on_arguments(condition=cache_on)
221 @self.region.conditional_cache_on_arguments(condition=cache_on)
229 def _branches(_context_uid, _repo_id, _normal, _closed):
222 def _branches(_context_uid, _repo_id, _normal, _closed):
230 repo = self._factory.repo(wire)
223 repo = self._factory.repo(wire)
231 iter_branches = repo.branchmap().iterbranches()
224 iter_branches = repo.branchmap().iterbranches()
232 bt = {}
225 bt = {}
233 for branch_name, _heads, tip, is_closed in iter_branches:
226 for branch_name, _heads, tip, is_closed in iter_branches:
234 if normal and not is_closed:
227 if normal and not is_closed:
235 bt[branch_name] = tip
228 bt[branch_name] = tip
236 if closed and is_closed:
229 if closed and is_closed:
237 bt[branch_name] = tip
230 bt[branch_name] = tip
238
231
239 return bt
232 return bt
240
233
241 return _branches(context_uid, repo_id, normal, closed)
234 return _branches(context_uid, repo_id, normal, closed)
242
235
243 @reraise_safe_exceptions
236 @reraise_safe_exceptions
244 def bulk_request(self, wire, commit_id, pre_load):
237 def bulk_request(self, wire, commit_id, pre_load):
245 cache_on, context_uid, repo_id = self._cache_on(wire)
238 cache_on, context_uid, repo_id = self._cache_on(wire)
246 @self.region.conditional_cache_on_arguments(condition=cache_on)
239 @self.region.conditional_cache_on_arguments(condition=cache_on)
247 def _bulk_request(_repo_id, _commit_id, _pre_load):
240 def _bulk_request(_repo_id, _commit_id, _pre_load):
248 result = {}
241 result = {}
249 for attr in pre_load:
242 for attr in pre_load:
250 try:
243 try:
251 method = self._bulk_methods[attr]
244 method = self._bulk_methods[attr]
252 result[attr] = method(wire, commit_id)
245 result[attr] = method(wire, commit_id)
253 except KeyError as e:
246 except KeyError as e:
254 raise exceptions.VcsException(e)(
247 raise exceptions.VcsException(e)(
255 'Unknown bulk attribute: "%s"' % attr)
248 'Unknown bulk attribute: "%s"' % attr)
256 return result
249 return result
257
250
258 return _bulk_request(repo_id, commit_id, sorted(pre_load))
251 return _bulk_request(repo_id, commit_id, sorted(pre_load))
259
252
260 @reraise_safe_exceptions
253 @reraise_safe_exceptions
261 def ctx_branch(self, wire, commit_id):
254 def ctx_branch(self, wire, commit_id):
262 cache_on, context_uid, repo_id = self._cache_on(wire)
255 cache_on, context_uid, repo_id = self._cache_on(wire)
263 @self.region.conditional_cache_on_arguments(condition=cache_on)
256 @self.region.conditional_cache_on_arguments(condition=cache_on)
264 def _ctx_branch(_repo_id, _commit_id):
257 def _ctx_branch(_repo_id, _commit_id):
265 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
266 ctx = self._get_ctx(repo, commit_id)
259 ctx = self._get_ctx(repo, commit_id)
267 return ctx.branch()
260 return ctx.branch()
268 return _ctx_branch(repo_id, commit_id)
261 return _ctx_branch(repo_id, commit_id)
269
262
270 @reraise_safe_exceptions
263 @reraise_safe_exceptions
271 def ctx_date(self, wire, commit_id):
264 def ctx_date(self, wire, commit_id):
272 cache_on, context_uid, repo_id = self._cache_on(wire)
265 cache_on, context_uid, repo_id = self._cache_on(wire)
273 @self.region.conditional_cache_on_arguments(condition=cache_on)
266 @self.region.conditional_cache_on_arguments(condition=cache_on)
274 def _ctx_date(_repo_id, _commit_id):
267 def _ctx_date(_repo_id, _commit_id):
275 repo = self._factory.repo(wire)
268 repo = self._factory.repo(wire)
276 ctx = self._get_ctx(repo, commit_id)
269 ctx = self._get_ctx(repo, commit_id)
277 return ctx.date()
270 return ctx.date()
278 return _ctx_date(repo_id, commit_id)
271 return _ctx_date(repo_id, commit_id)
279
272
280 @reraise_safe_exceptions
273 @reraise_safe_exceptions
281 def ctx_description(self, wire, revision):
274 def ctx_description(self, wire, revision):
282 repo = self._factory.repo(wire)
275 repo = self._factory.repo(wire)
283 ctx = self._get_ctx(repo, revision)
276 ctx = self._get_ctx(repo, revision)
284 return ctx.description()
277 return ctx.description()
285
278
286 @reraise_safe_exceptions
279 @reraise_safe_exceptions
287 def ctx_files(self, wire, commit_id):
280 def ctx_files(self, wire, commit_id):
288 cache_on, context_uid, repo_id = self._cache_on(wire)
281 cache_on, context_uid, repo_id = self._cache_on(wire)
289 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 @self.region.conditional_cache_on_arguments(condition=cache_on)
290 def _ctx_files(_repo_id, _commit_id):
283 def _ctx_files(_repo_id, _commit_id):
291 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
292 ctx = self._get_ctx(repo, commit_id)
285 ctx = self._get_ctx(repo, commit_id)
293 return ctx.files()
286 return ctx.files()
294
287
295 return _ctx_files(repo_id, commit_id)
288 return _ctx_files(repo_id, commit_id)
296
289
297 @reraise_safe_exceptions
290 @reraise_safe_exceptions
298 def ctx_list(self, path, revision):
291 def ctx_list(self, path, revision):
299 repo = self._factory.repo(path)
292 repo = self._factory.repo(path)
300 ctx = self._get_ctx(repo, revision)
293 ctx = self._get_ctx(repo, revision)
301 return list(ctx)
294 return list(ctx)
302
295
303 @reraise_safe_exceptions
296 @reraise_safe_exceptions
304 def ctx_parents(self, wire, commit_id):
297 def ctx_parents(self, wire, commit_id):
305 cache_on, context_uid, repo_id = self._cache_on(wire)
298 cache_on, context_uid, repo_id = self._cache_on(wire)
306 @self.region.conditional_cache_on_arguments(condition=cache_on)
299 @self.region.conditional_cache_on_arguments(condition=cache_on)
307 def _ctx_parents(_repo_id, _commit_id):
300 def _ctx_parents(_repo_id, _commit_id):
308 repo = self._factory.repo(wire)
301 repo = self._factory.repo(wire)
309 ctx = self._get_ctx(repo, commit_id)
302 ctx = self._get_ctx(repo, commit_id)
310 return [parent.rev() for parent in ctx.parents()
303 return [parent.rev() for parent in ctx.parents()
311 if not (parent.hidden() or parent.obsolete())]
304 if not (parent.hidden() or parent.obsolete())]
312
305
313 return _ctx_parents(repo_id, commit_id)
306 return _ctx_parents(repo_id, commit_id)
314
307
315 @reraise_safe_exceptions
308 @reraise_safe_exceptions
316 def ctx_children(self, wire, commit_id):
309 def ctx_children(self, wire, commit_id):
317 cache_on, context_uid, repo_id = self._cache_on(wire)
310 cache_on, context_uid, repo_id = self._cache_on(wire)
318 @self.region.conditional_cache_on_arguments(condition=cache_on)
311 @self.region.conditional_cache_on_arguments(condition=cache_on)
319 def _ctx_children(_repo_id, _commit_id):
312 def _ctx_children(_repo_id, _commit_id):
320 repo = self._factory.repo(wire)
313 repo = self._factory.repo(wire)
321 ctx = self._get_ctx(repo, commit_id)
314 ctx = self._get_ctx(repo, commit_id)
322 return [child.rev() for child in ctx.children()
315 return [child.rev() for child in ctx.children()
323 if not (child.hidden() or child.obsolete())]
316 if not (child.hidden() or child.obsolete())]
324
317
325 return _ctx_children(repo_id, commit_id)
318 return _ctx_children(repo_id, commit_id)
326
319
327 @reraise_safe_exceptions
320 @reraise_safe_exceptions
328 def ctx_phase(self, wire, commit_id):
321 def ctx_phase(self, wire, commit_id):
329 cache_on, context_uid, repo_id = self._cache_on(wire)
322 cache_on, context_uid, repo_id = self._cache_on(wire)
330 @self.region.conditional_cache_on_arguments(condition=cache_on)
323 @self.region.conditional_cache_on_arguments(condition=cache_on)
331 def _ctx_phase(_context_uid, _repo_id, _commit_id):
324 def _ctx_phase(_context_uid, _repo_id, _commit_id):
332 repo = self._factory.repo(wire)
325 repo = self._factory.repo(wire)
333 ctx = self._get_ctx(repo, commit_id)
326 ctx = self._get_ctx(repo, commit_id)
334 # public=0, draft=1, secret=3
327 # public=0, draft=1, secret=3
335 return ctx.phase()
328 return ctx.phase()
336 return _ctx_phase(context_uid, repo_id, commit_id)
329 return _ctx_phase(context_uid, repo_id, commit_id)
337
330
338 @reraise_safe_exceptions
331 @reraise_safe_exceptions
339 def ctx_obsolete(self, wire, commit_id):
332 def ctx_obsolete(self, wire, commit_id):
340 cache_on, context_uid, repo_id = self._cache_on(wire)
333 cache_on, context_uid, repo_id = self._cache_on(wire)
341 @self.region.conditional_cache_on_arguments(condition=cache_on)
334 @self.region.conditional_cache_on_arguments(condition=cache_on)
342 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
335 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
343 repo = self._factory.repo(wire)
336 repo = self._factory.repo(wire)
344 ctx = self._get_ctx(repo, commit_id)
337 ctx = self._get_ctx(repo, commit_id)
345 return ctx.obsolete()
338 return ctx.obsolete()
346 return _ctx_obsolete(context_uid, repo_id, commit_id)
339 return _ctx_obsolete(context_uid, repo_id, commit_id)
347
340
348 @reraise_safe_exceptions
341 @reraise_safe_exceptions
349 def ctx_hidden(self, wire, commit_id):
342 def ctx_hidden(self, wire, commit_id):
350 cache_on, context_uid, repo_id = self._cache_on(wire)
343 cache_on, context_uid, repo_id = self._cache_on(wire)
351 @self.region.conditional_cache_on_arguments(condition=cache_on)
344 @self.region.conditional_cache_on_arguments(condition=cache_on)
352 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
345 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
353 repo = self._factory.repo(wire)
346 repo = self._factory.repo(wire)
354 ctx = self._get_ctx(repo, commit_id)
347 ctx = self._get_ctx(repo, commit_id)
355 return ctx.hidden()
348 return ctx.hidden()
356 return _ctx_hidden(context_uid, repo_id, commit_id)
349 return _ctx_hidden(context_uid, repo_id, commit_id)
357
350
358 @reraise_safe_exceptions
351 @reraise_safe_exceptions
359 def ctx_substate(self, wire, revision):
352 def ctx_substate(self, wire, revision):
360 repo = self._factory.repo(wire)
353 repo = self._factory.repo(wire)
361 ctx = self._get_ctx(repo, revision)
354 ctx = self._get_ctx(repo, revision)
362 return ctx.substate
355 return ctx.substate
363
356
364 @reraise_safe_exceptions
357 @reraise_safe_exceptions
365 def ctx_status(self, wire, revision):
358 def ctx_status(self, wire, revision):
366 repo = self._factory.repo(wire)
359 repo = self._factory.repo(wire)
367 ctx = self._get_ctx(repo, revision)
360 ctx = self._get_ctx(repo, revision)
368 status = repo[ctx.p1().node()].status(other=ctx.node())
361 status = repo[ctx.p1().node()].status(other=ctx.node())
369 # object of status (odd, custom named tuple in mercurial) is not
362 # object of status (odd, custom named tuple in mercurial) is not
370 # correctly serializable, we make it a list, as the underling
363 # correctly serializable, we make it a list, as the underling
371 # API expects this to be a list
364 # API expects this to be a list
372 return list(status)
365 return list(status)
373
366
374 @reraise_safe_exceptions
367 @reraise_safe_exceptions
375 def ctx_user(self, wire, revision):
368 def ctx_user(self, wire, revision):
376 repo = self._factory.repo(wire)
369 repo = self._factory.repo(wire)
377 ctx = self._get_ctx(repo, revision)
370 ctx = self._get_ctx(repo, revision)
378 return ctx.user()
371 return ctx.user()
379
372
380 @reraise_safe_exceptions
373 @reraise_safe_exceptions
381 def check_url(self, url, config):
374 def check_url(self, url, config):
382 _proto = None
375 _proto = None
383 if '+' in url[:url.find('://')]:
376 if '+' in url[:url.find('://')]:
384 _proto = url[0:url.find('+')]
377 _proto = url[0:url.find('+')]
385 url = url[url.find('+') + 1:]
378 url = url[url.find('+') + 1:]
386 handlers = []
379 handlers = []
387 url_obj = url_parser(url)
380 url_obj = url_parser(url)
388 test_uri, authinfo = url_obj.authinfo()
381 test_uri, authinfo = url_obj.authinfo()
389 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
382 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
390 url_obj.query = obfuscate_qs(url_obj.query)
383 url_obj.query = obfuscate_qs(url_obj.query)
391
384
392 cleaned_uri = str(url_obj)
385 cleaned_uri = str(url_obj)
393 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
386 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
394
387
395 if authinfo:
388 if authinfo:
396 # create a password manager
389 # create a password manager
397 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
390 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
398 passmgr.add_password(*authinfo)
391 passmgr.add_password(*authinfo)
399
392
400 handlers.extend((httpbasicauthhandler(passmgr),
393 handlers.extend((httpbasicauthhandler(passmgr),
401 httpdigestauthhandler(passmgr)))
394 httpdigestauthhandler(passmgr)))
402
395
403 o = urllib2.build_opener(*handlers)
396 o = urllib2.build_opener(*handlers)
404 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
397 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
405 ('Accept', 'application/mercurial-0.1')]
398 ('Accept', 'application/mercurial-0.1')]
406
399
407 q = {"cmd": 'between'}
400 q = {"cmd": 'between'}
408 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
401 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
409 qs = '?%s' % urllib.urlencode(q)
402 qs = '?%s' % urllib.urlencode(q)
410 cu = "%s%s" % (test_uri, qs)
403 cu = "%s%s" % (test_uri, qs)
411 req = urllib2.Request(cu, None, {})
404 req = urllib2.Request(cu, None, {})
412
405
413 try:
406 try:
414 log.debug("Trying to open URL %s", cleaned_uri)
407 log.debug("Trying to open URL %s", cleaned_uri)
415 resp = o.open(req)
408 resp = o.open(req)
416 if resp.code != 200:
409 if resp.code != 200:
417 raise exceptions.URLError()('Return Code is not 200')
410 raise exceptions.URLError()('Return Code is not 200')
418 except Exception as e:
411 except Exception as e:
419 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
412 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
420 # means it cannot be cloned
413 # means it cannot be cloned
421 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
414 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
422
415
423 # now check if it's a proper hg repo, but don't do it for svn
416 # now check if it's a proper hg repo, but don't do it for svn
424 try:
417 try:
425 if _proto == 'svn':
418 if _proto == 'svn':
426 pass
419 pass
427 else:
420 else:
428 # check for pure hg repos
421 # check for pure hg repos
429 log.debug(
422 log.debug(
430 "Verifying if URL is a Mercurial repository: %s",
423 "Verifying if URL is a Mercurial repository: %s",
431 cleaned_uri)
424 cleaned_uri)
432 ui = make_ui_from_config(config)
425 ui = make_ui_from_config(config)
433 peer_checker = makepeer(ui, url)
426 peer_checker = makepeer(ui, url)
434 peer_checker.lookup('tip')
427 peer_checker.lookup('tip')
435 except Exception as e:
428 except Exception as e:
436 log.warning("URL is not a valid Mercurial repository: %s",
429 log.warning("URL is not a valid Mercurial repository: %s",
437 cleaned_uri)
430 cleaned_uri)
438 raise exceptions.URLError(e)(
431 raise exceptions.URLError(e)(
439 "url [%s] does not look like an hg repo org_exc: %s"
432 "url [%s] does not look like an hg repo org_exc: %s"
440 % (cleaned_uri, e))
433 % (cleaned_uri, e))
441
434
442 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
435 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
443 return True
436 return True
444
437
445 @reraise_safe_exceptions
438 @reraise_safe_exceptions
446 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
439 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
447 repo = self._factory.repo(wire)
440 repo = self._factory.repo(wire)
448
441
449 if file_filter:
442 if file_filter:
450 match_filter = match(file_filter[0], '', [file_filter[1]])
443 match_filter = match(file_filter[0], '', [file_filter[1]])
451 else:
444 else:
452 match_filter = file_filter
445 match_filter = file_filter
453 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
446 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
454
447
455 try:
448 try:
456 return "".join(patch.diff(
449 return "".join(patch.diff(
457 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
450 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
458 except RepoLookupError as e:
451 except RepoLookupError as e:
459 raise exceptions.LookupException(e)()
452 raise exceptions.LookupException(e)()
460
453
461 @reraise_safe_exceptions
454 @reraise_safe_exceptions
462 def node_history(self, wire, revision, path, limit):
455 def node_history(self, wire, revision, path, limit):
463 cache_on, context_uid, repo_id = self._cache_on(wire)
456 cache_on, context_uid, repo_id = self._cache_on(wire)
464 @self.region.conditional_cache_on_arguments(condition=cache_on)
457 @self.region.conditional_cache_on_arguments(condition=cache_on)
465 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
458 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
466 repo = self._factory.repo(wire)
459 repo = self._factory.repo(wire)
467
460
468 ctx = self._get_ctx(repo, revision)
461 ctx = self._get_ctx(repo, revision)
469 fctx = ctx.filectx(path)
462 fctx = ctx.filectx(path)
470
463
471 def history_iter():
464 def history_iter():
472 limit_rev = fctx.rev()
465 limit_rev = fctx.rev()
473 for obj in reversed(list(fctx.filelog())):
466 for obj in reversed(list(fctx.filelog())):
474 obj = fctx.filectx(obj)
467 obj = fctx.filectx(obj)
475 ctx = obj.changectx()
468 ctx = obj.changectx()
476 if ctx.hidden() or ctx.obsolete():
469 if ctx.hidden() or ctx.obsolete():
477 continue
470 continue
478
471
479 if limit_rev >= obj.rev():
472 if limit_rev >= obj.rev():
480 yield obj
473 yield obj
481
474
482 history = []
475 history = []
483 for cnt, obj in enumerate(history_iter()):
476 for cnt, obj in enumerate(history_iter()):
484 if limit and cnt >= limit:
477 if limit and cnt >= limit:
485 break
478 break
486 history.append(hex(obj.node()))
479 history.append(hex(obj.node()))
487
480
488 return [x for x in history]
481 return [x for x in history]
489 return _node_history(context_uid, repo_id, revision, path, limit)
482 return _node_history(context_uid, repo_id, revision, path, limit)
490
483
491 @reraise_safe_exceptions
484 @reraise_safe_exceptions
492 def node_history_untill(self, wire, revision, path, limit):
485 def node_history_untill(self, wire, revision, path, limit):
493 cache_on, context_uid, repo_id = self._cache_on(wire)
486 cache_on, context_uid, repo_id = self._cache_on(wire)
494 @self.region.conditional_cache_on_arguments(condition=cache_on)
487 @self.region.conditional_cache_on_arguments(condition=cache_on)
495 def _node_history_until(_context_uid, _repo_id):
488 def _node_history_until(_context_uid, _repo_id):
496 repo = self._factory.repo(wire)
489 repo = self._factory.repo(wire)
497 ctx = self._get_ctx(repo, revision)
490 ctx = self._get_ctx(repo, revision)
498 fctx = ctx.filectx(path)
491 fctx = ctx.filectx(path)
499
492
500 file_log = list(fctx.filelog())
493 file_log = list(fctx.filelog())
501 if limit:
494 if limit:
502 # Limit to the last n items
495 # Limit to the last n items
503 file_log = file_log[-limit:]
496 file_log = file_log[-limit:]
504
497
505 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
498 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
506 return _node_history_until(context_uid, repo_id, revision, path, limit)
499 return _node_history_until(context_uid, repo_id, revision, path, limit)
507
500
508 @reraise_safe_exceptions
501 @reraise_safe_exceptions
509 def fctx_annotate(self, wire, revision, path):
502 def fctx_annotate(self, wire, revision, path):
510 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
511 ctx = self._get_ctx(repo, revision)
504 ctx = self._get_ctx(repo, revision)
512 fctx = ctx.filectx(path)
505 fctx = ctx.filectx(path)
513
506
514 result = []
507 result = []
515 for i, annotate_obj in enumerate(fctx.annotate(), 1):
508 for i, annotate_obj in enumerate(fctx.annotate(), 1):
516 ln_no = i
509 ln_no = i
517 sha = hex(annotate_obj.fctx.node())
510 sha = hex(annotate_obj.fctx.node())
518 content = annotate_obj.text
511 content = annotate_obj.text
519 result.append((ln_no, sha, content))
512 result.append((ln_no, sha, content))
520 return result
513 return result
521
514
522 @reraise_safe_exceptions
515 @reraise_safe_exceptions
523 def fctx_node_data(self, wire, revision, path):
516 def fctx_node_data(self, wire, revision, path):
524 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
525 ctx = self._get_ctx(repo, revision)
518 ctx = self._get_ctx(repo, revision)
526 fctx = ctx.filectx(path)
519 fctx = ctx.filectx(path)
527 return fctx.data()
520 return fctx.data()
528
521
529 @reraise_safe_exceptions
522 @reraise_safe_exceptions
530 def fctx_flags(self, wire, commit_id, path):
523 def fctx_flags(self, wire, commit_id, path):
531 cache_on, context_uid, repo_id = self._cache_on(wire)
524 cache_on, context_uid, repo_id = self._cache_on(wire)
532 @self.region.conditional_cache_on_arguments(condition=cache_on)
525 @self.region.conditional_cache_on_arguments(condition=cache_on)
533 def _fctx_flags(_repo_id, _commit_id, _path):
526 def _fctx_flags(_repo_id, _commit_id, _path):
534 repo = self._factory.repo(wire)
527 repo = self._factory.repo(wire)
535 ctx = self._get_ctx(repo, commit_id)
528 ctx = self._get_ctx(repo, commit_id)
536 fctx = ctx.filectx(path)
529 fctx = ctx.filectx(path)
537 return fctx.flags()
530 return fctx.flags()
538
531
539 return _fctx_flags(repo_id, commit_id, path)
532 return _fctx_flags(repo_id, commit_id, path)
540
533
541 @reraise_safe_exceptions
534 @reraise_safe_exceptions
542 def fctx_size(self, wire, commit_id, path):
535 def fctx_size(self, wire, commit_id, path):
543 cache_on, context_uid, repo_id = self._cache_on(wire)
536 cache_on, context_uid, repo_id = self._cache_on(wire)
544 @self.region.conditional_cache_on_arguments(condition=cache_on)
537 @self.region.conditional_cache_on_arguments(condition=cache_on)
545 def _fctx_size(_repo_id, _revision, _path):
538 def _fctx_size(_repo_id, _revision, _path):
546 repo = self._factory.repo(wire)
539 repo = self._factory.repo(wire)
547 ctx = self._get_ctx(repo, commit_id)
540 ctx = self._get_ctx(repo, commit_id)
548 fctx = ctx.filectx(path)
541 fctx = ctx.filectx(path)
549 return fctx.size()
542 return fctx.size()
550 return _fctx_size(repo_id, commit_id, path)
543 return _fctx_size(repo_id, commit_id, path)
551
544
552 @reraise_safe_exceptions
545 @reraise_safe_exceptions
553 def get_all_commit_ids(self, wire, name):
546 def get_all_commit_ids(self, wire, name):
554 cache_on, context_uid, repo_id = self._cache_on(wire)
547 cache_on, context_uid, repo_id = self._cache_on(wire)
555 @self.region.conditional_cache_on_arguments(condition=cache_on)
548 @self.region.conditional_cache_on_arguments(condition=cache_on)
556 def _get_all_commit_ids(_context_uid, _repo_id, _name):
549 def _get_all_commit_ids(_context_uid, _repo_id, _name):
557 repo = self._factory.repo(wire)
550 repo = self._factory.repo(wire)
558 repo = repo.filtered(name)
551 repo = repo.filtered(name)
559 revs = map(lambda x: hex(x[7]), repo.changelog.index)
552 revs = map(lambda x: hex(x[7]), repo.changelog.index)
560 return revs
553 return revs
561 return _get_all_commit_ids(context_uid, repo_id, name)
554 return _get_all_commit_ids(context_uid, repo_id, name)
562
555
563 @reraise_safe_exceptions
556 @reraise_safe_exceptions
564 def get_config_value(self, wire, section, name, untrusted=False):
557 def get_config_value(self, wire, section, name, untrusted=False):
565 repo = self._factory.repo(wire)
558 repo = self._factory.repo(wire)
566 return repo.ui.config(section, name, untrusted=untrusted)
559 return repo.ui.config(section, name, untrusted=untrusted)
567
560
568 @reraise_safe_exceptions
561 @reraise_safe_exceptions
569 def is_large_file(self, wire, path):
562 def is_large_file(self, wire, path):
570 cache_on, context_uid, repo_id = self._cache_on(wire)
563 cache_on, context_uid, repo_id = self._cache_on(wire)
571 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 @self.region.conditional_cache_on_arguments(condition=cache_on)
572 def _is_large_file(_context_uid, _repo_id, _path):
565 def _is_large_file(_context_uid, _repo_id, _path):
573 return largefiles.lfutil.isstandin(path)
566 return largefiles.lfutil.isstandin(path)
574
567
575 return _is_large_file(context_uid, repo_id, path)
568 return _is_large_file(context_uid, repo_id, path)
576
569
577 @reraise_safe_exceptions
570 @reraise_safe_exceptions
578 def in_largefiles_store(self, wire, sha):
571 def in_largefiles_store(self, wire, sha):
579 repo = self._factory.repo(wire)
572 repo = self._factory.repo(wire)
580 return largefiles.lfutil.instore(repo, sha)
573 return largefiles.lfutil.instore(repo, sha)
581
574
582 @reraise_safe_exceptions
575 @reraise_safe_exceptions
583 def in_user_cache(self, wire, sha):
576 def in_user_cache(self, wire, sha):
584 repo = self._factory.repo(wire)
577 repo = self._factory.repo(wire)
585 return largefiles.lfutil.inusercache(repo.ui, sha)
578 return largefiles.lfutil.inusercache(repo.ui, sha)
586
579
587 @reraise_safe_exceptions
580 @reraise_safe_exceptions
588 def store_path(self, wire, sha):
581 def store_path(self, wire, sha):
589 repo = self._factory.repo(wire)
582 repo = self._factory.repo(wire)
590 return largefiles.lfutil.storepath(repo, sha)
583 return largefiles.lfutil.storepath(repo, sha)
591
584
592 @reraise_safe_exceptions
585 @reraise_safe_exceptions
593 def link(self, wire, sha, path):
586 def link(self, wire, sha, path):
594 repo = self._factory.repo(wire)
587 repo = self._factory.repo(wire)
595 largefiles.lfutil.link(
588 largefiles.lfutil.link(
596 largefiles.lfutil.usercachepath(repo.ui, sha), path)
589 largefiles.lfutil.usercachepath(repo.ui, sha), path)
597
590
598 @reraise_safe_exceptions
591 @reraise_safe_exceptions
599 def localrepository(self, wire, create=False):
592 def localrepository(self, wire, create=False):
600 self._factory.repo(wire, create=create)
593 self._factory.repo(wire, create=create)
601
594
602 @reraise_safe_exceptions
595 @reraise_safe_exceptions
603 def lookup(self, wire, revision, both):
596 def lookup(self, wire, revision, both):
604 cache_on, context_uid, repo_id = self._cache_on(wire)
597 cache_on, context_uid, repo_id = self._cache_on(wire)
605 @self.region.conditional_cache_on_arguments(condition=cache_on)
598 @self.region.conditional_cache_on_arguments(condition=cache_on)
606 def _lookup(_context_uid, _repo_id, _revision, _both):
599 def _lookup(_context_uid, _repo_id, _revision, _both):
607
600
608 repo = self._factory.repo(wire)
601 repo = self._factory.repo(wire)
609 rev = _revision
602 rev = _revision
610 if isinstance(rev, int):
603 if isinstance(rev, int):
611 # NOTE(marcink):
604 # NOTE(marcink):
612 # since Mercurial doesn't support negative indexes properly
605 # since Mercurial doesn't support negative indexes properly
613 # we need to shift accordingly by one to get proper index, e.g
606 # we need to shift accordingly by one to get proper index, e.g
614 # repo[-1] => repo[-2]
607 # repo[-1] => repo[-2]
615 # repo[0] => repo[-1]
608 # repo[0] => repo[-1]
616 if rev <= 0:
609 if rev <= 0:
617 rev = rev + -1
610 rev = rev + -1
618 try:
611 try:
619 ctx = self._get_ctx(repo, rev)
612 ctx = self._get_ctx(repo, rev)
620 except (TypeError, RepoLookupError) as e:
613 except (TypeError, RepoLookupError) as e:
621 e._org_exc_tb = traceback.format_exc()
614 e._org_exc_tb = traceback.format_exc()
622 raise exceptions.LookupException(e)(rev)
615 raise exceptions.LookupException(e)(rev)
623 except LookupError as e:
616 except LookupError as e:
624 e._org_exc_tb = traceback.format_exc()
617 e._org_exc_tb = traceback.format_exc()
625 raise exceptions.LookupException(e)(e.name)
618 raise exceptions.LookupException(e)(e.name)
626
619
627 if not both:
620 if not both:
628 return ctx.hex()
621 return ctx.hex()
629
622
630 ctx = repo[ctx.hex()]
623 ctx = repo[ctx.hex()]
631 return ctx.hex(), ctx.rev()
624 return ctx.hex(), ctx.rev()
632
625
633 return _lookup(context_uid, repo_id, revision, both)
626 return _lookup(context_uid, repo_id, revision, both)
634
627
635 @reraise_safe_exceptions
628 @reraise_safe_exceptions
636 def sync_push(self, wire, url):
629 def sync_push(self, wire, url):
637 if not self.check_url(url, wire['config']):
630 if not self.check_url(url, wire['config']):
638 return
631 return
639
632
640 repo = self._factory.repo(wire)
633 repo = self._factory.repo(wire)
641
634
642 # Disable any prompts for this repo
635 # Disable any prompts for this repo
643 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
636 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
644
637
645 bookmarks = dict(repo._bookmarks).keys()
638 bookmarks = dict(repo._bookmarks).keys()
646 remote = peer(repo, {}, url)
639 remote = peer(repo, {}, url)
647 # Disable any prompts for this remote
640 # Disable any prompts for this remote
648 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
641 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
649
642
650 return exchange.push(
643 return exchange.push(
651 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
644 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
652
645
653 @reraise_safe_exceptions
646 @reraise_safe_exceptions
654 def revision(self, wire, rev):
647 def revision(self, wire, rev):
655 repo = self._factory.repo(wire)
648 repo = self._factory.repo(wire)
656 ctx = self._get_ctx(repo, rev)
649 ctx = self._get_ctx(repo, rev)
657 return ctx.rev()
650 return ctx.rev()
658
651
659 @reraise_safe_exceptions
652 @reraise_safe_exceptions
660 def rev_range(self, wire, commit_filter):
653 def rev_range(self, wire, commit_filter):
661 cache_on, context_uid, repo_id = self._cache_on(wire)
654 cache_on, context_uid, repo_id = self._cache_on(wire)
662 @self.region.conditional_cache_on_arguments(condition=cache_on)
655 @self.region.conditional_cache_on_arguments(condition=cache_on)
663 def _rev_range(_context_uid, _repo_id, _filter):
656 def _rev_range(_context_uid, _repo_id, _filter):
664 repo = self._factory.repo(wire)
657 repo = self._factory.repo(wire)
665 revisions = [rev for rev in revrange(repo, commit_filter)]
658 revisions = [rev for rev in revrange(repo, commit_filter)]
666 return revisions
659 return revisions
667
660
668 return _rev_range(context_uid, repo_id, sorted(commit_filter))
661 return _rev_range(context_uid, repo_id, sorted(commit_filter))
669
662
670 @reraise_safe_exceptions
663 @reraise_safe_exceptions
671 def rev_range_hash(self, wire, node):
664 def rev_range_hash(self, wire, node):
672 repo = self._factory.repo(wire)
665 repo = self._factory.repo(wire)
673
666
674 def get_revs(repo, rev_opt):
667 def get_revs(repo, rev_opt):
675 if rev_opt:
668 if rev_opt:
676 revs = revrange(repo, rev_opt)
669 revs = revrange(repo, rev_opt)
677 if len(revs) == 0:
670 if len(revs) == 0:
678 return (nullrev, nullrev)
671 return (nullrev, nullrev)
679 return max(revs), min(revs)
672 return max(revs), min(revs)
680 else:
673 else:
681 return len(repo) - 1, 0
674 return len(repo) - 1, 0
682
675
683 stop, start = get_revs(repo, [node + ':'])
676 stop, start = get_revs(repo, [node + ':'])
684 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
677 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
685 return revs
678 return revs
686
679
687 @reraise_safe_exceptions
680 @reraise_safe_exceptions
688 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
681 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
689 other_path = kwargs.pop('other_path', None)
682 other_path = kwargs.pop('other_path', None)
690
683
691 # case when we want to compare two independent repositories
684 # case when we want to compare two independent repositories
692 if other_path and other_path != wire["path"]:
685 if other_path and other_path != wire["path"]:
693 baseui = self._factory._create_config(wire["config"])
686 baseui = self._factory._create_config(wire["config"])
694 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
687 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
695 else:
688 else:
696 repo = self._factory.repo(wire)
689 repo = self._factory.repo(wire)
697 return list(repo.revs(rev_spec, *args))
690 return list(repo.revs(rev_spec, *args))
698
691
699 @reraise_safe_exceptions
692 @reraise_safe_exceptions
700 def verify(self, wire,):
693 def verify(self, wire,):
701 repo = self._factory.repo(wire)
694 repo = self._factory.repo(wire)
702 baseui = self._factory._create_config(wire['config'])
695 baseui = self._factory._create_config(wire['config'])
703 baseui.setconfig('ui', 'quiet', 'false')
696 baseui.setconfig('ui', 'quiet', 'false')
704 output = io.BytesIO()
697 output = io.BytesIO()
705
698
706 def write(data, **unused_kwargs):
699 def write(data, **unused_kwargs):
707 output.write(data)
700 output.write(data)
708 baseui.write = write
701 baseui.write = write
709
702
710 repo.ui = baseui
703 repo.ui = baseui
711 verify.verify(repo)
704 verify.verify(repo)
712 return output.getvalue()
705 return output.getvalue()
713
706
714 @reraise_safe_exceptions
707 @reraise_safe_exceptions
715 def tags(self, wire):
708 def tags(self, wire):
716 cache_on, context_uid, repo_id = self._cache_on(wire)
709 cache_on, context_uid, repo_id = self._cache_on(wire)
717 @self.region.conditional_cache_on_arguments(condition=cache_on)
710 @self.region.conditional_cache_on_arguments(condition=cache_on)
718 def _tags(_context_uid, _repo_id):
711 def _tags(_context_uid, _repo_id):
719 repo = self._factory.repo(wire)
712 repo = self._factory.repo(wire)
720 return repo.tags()
713 return repo.tags()
721
714
722 return _tags(context_uid, repo_id)
715 return _tags(context_uid, repo_id)
723
716
724 @reraise_safe_exceptions
717 @reraise_safe_exceptions
725 def update(self, wire, node=None, clean=False):
718 def update(self, wire, node=None, clean=False):
726 repo = self._factory.repo(wire)
719 repo = self._factory.repo(wire)
727 baseui = self._factory._create_config(wire['config'])
720 baseui = self._factory._create_config(wire['config'])
728 commands.update(baseui, repo, node=node, clean=clean)
721 commands.update(baseui, repo, node=node, clean=clean)
729
722
730 @reraise_safe_exceptions
723 @reraise_safe_exceptions
731 def identify(self, wire):
724 def identify(self, wire):
732 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
733 baseui = self._factory._create_config(wire['config'])
726 baseui = self._factory._create_config(wire['config'])
734 output = io.BytesIO()
727 output = io.BytesIO()
735 baseui.write = output.write
728 baseui.write = output.write
736 # This is required to get a full node id
729 # This is required to get a full node id
737 baseui.debugflag = True
730 baseui.debugflag = True
738 commands.identify(baseui, repo, id=True)
731 commands.identify(baseui, repo, id=True)
739
732
740 return output.getvalue()
733 return output.getvalue()
741
734
742 @reraise_safe_exceptions
735 @reraise_safe_exceptions
743 def heads(self, wire, branch=None):
736 def heads(self, wire, branch=None):
744 repo = self._factory.repo(wire)
737 repo = self._factory.repo(wire)
745 baseui = self._factory._create_config(wire['config'])
738 baseui = self._factory._create_config(wire['config'])
746 output = io.BytesIO()
739 output = io.BytesIO()
747
740
748 def write(data, **unused_kwargs):
741 def write(data, **unused_kwargs):
749 output.write(data)
742 output.write(data)
750
743
751 baseui.write = write
744 baseui.write = write
752 if branch:
745 if branch:
753 args = [branch]
746 args = [branch]
754 else:
747 else:
755 args = []
748 args = []
756 commands.heads(baseui, repo, template='{node} ', *args)
749 commands.heads(baseui, repo, template='{node} ', *args)
757
750
758 return output.getvalue()
751 return output.getvalue()
759
752
760 @reraise_safe_exceptions
753 @reraise_safe_exceptions
761 def ancestor(self, wire, revision1, revision2):
754 def ancestor(self, wire, revision1, revision2):
762 repo = self._factory.repo(wire)
755 repo = self._factory.repo(wire)
763 changelog = repo.changelog
756 changelog = repo.changelog
764 lookup = repo.lookup
757 lookup = repo.lookup
765 a = changelog.ancestor(lookup(revision1), lookup(revision2))
758 a = changelog.ancestor(lookup(revision1), lookup(revision2))
766 return hex(a)
759 return hex(a)
767
760
768 @reraise_safe_exceptions
761 @reraise_safe_exceptions
769 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
762 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
770 baseui = self._factory._create_config(wire["config"], hooks=hooks)
763 baseui = self._factory._create_config(wire["config"], hooks=hooks)
771 clone(baseui, source, dest, noupdate=not update_after_clone)
764 clone(baseui, source, dest, noupdate=not update_after_clone)
772
765
773 @reraise_safe_exceptions
766 @reraise_safe_exceptions
774 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
767 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
775
768
776 repo = self._factory.repo(wire)
769 repo = self._factory.repo(wire)
777 baseui = self._factory._create_config(wire['config'])
770 baseui = self._factory._create_config(wire['config'])
778 publishing = baseui.configbool('phases', 'publish')
771 publishing = baseui.configbool('phases', 'publish')
779 if publishing:
772 if publishing:
780 new_commit = 'public'
773 new_commit = 'public'
781 else:
774 else:
782 new_commit = 'draft'
775 new_commit = 'draft'
783
776
784 def _filectxfn(_repo, ctx, path):
777 def _filectxfn(_repo, ctx, path):
785 """
778 """
786 Marks given path as added/changed/removed in a given _repo. This is
779 Marks given path as added/changed/removed in a given _repo. This is
787 for internal mercurial commit function.
780 for internal mercurial commit function.
788 """
781 """
789
782
790 # check if this path is removed
783 # check if this path is removed
791 if path in removed:
784 if path in removed:
792 # returning None is a way to mark node for removal
785 # returning None is a way to mark node for removal
793 return None
786 return None
794
787
795 # check if this path is added
788 # check if this path is added
796 for node in updated:
789 for node in updated:
797 if node['path'] == path:
790 if node['path'] == path:
798 return memfilectx(
791 return memfilectx(
799 _repo,
792 _repo,
800 changectx=ctx,
793 changectx=ctx,
801 path=node['path'],
794 path=node['path'],
802 data=node['content'],
795 data=node['content'],
803 islink=False,
796 islink=False,
804 isexec=bool(node['mode'] & stat.S_IXUSR),
797 isexec=bool(node['mode'] & stat.S_IXUSR),
805 copysource=False)
798 copysource=False)
806
799
807 raise exceptions.AbortException()(
800 raise exceptions.AbortException()(
808 "Given path haven't been marked as added, "
801 "Given path haven't been marked as added, "
809 "changed or removed (%s)" % path)
802 "changed or removed (%s)" % path)
810
803
811 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
804 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
812
805
813 commit_ctx = memctx(
806 commit_ctx = memctx(
814 repo=repo,
807 repo=repo,
815 parents=parents,
808 parents=parents,
816 text=message,
809 text=message,
817 files=files,
810 files=files,
818 filectxfn=_filectxfn,
811 filectxfn=_filectxfn,
819 user=user,
812 user=user,
820 date=(commit_time, commit_timezone),
813 date=(commit_time, commit_timezone),
821 extra=extra)
814 extra=extra)
822
815
823 n = repo.commitctx(commit_ctx)
816 n = repo.commitctx(commit_ctx)
824 new_id = hex(n)
817 new_id = hex(n)
825
818
826 return new_id
819 return new_id
827
820
828 @reraise_safe_exceptions
821 @reraise_safe_exceptions
829 def pull(self, wire, url, commit_ids=None):
822 def pull(self, wire, url, commit_ids=None):
830 repo = self._factory.repo(wire)
823 repo = self._factory.repo(wire)
831 # Disable any prompts for this repo
824 # Disable any prompts for this repo
832 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
825 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
833
826
834 remote = peer(repo, {}, url)
827 remote = peer(repo, {}, url)
835 # Disable any prompts for this remote
828 # Disable any prompts for this remote
836 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
829 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
837
830
838 if commit_ids:
831 if commit_ids:
839 commit_ids = [bin(commit_id) for commit_id in commit_ids]
832 commit_ids = [bin(commit_id) for commit_id in commit_ids]
840
833
841 return exchange.pull(
834 return exchange.pull(
842 repo, remote, heads=commit_ids, force=None).cgresult
835 repo, remote, heads=commit_ids, force=None).cgresult
843
836
844 @reraise_safe_exceptions
837 @reraise_safe_exceptions
845 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
838 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
846 repo = self._factory.repo(wire)
839 repo = self._factory.repo(wire)
847 baseui = self._factory._create_config(wire['config'], hooks=hooks)
840 baseui = self._factory._create_config(wire['config'], hooks=hooks)
848
841
849 # Mercurial internally has a lot of logic that checks ONLY if
842 # Mercurial internally has a lot of logic that checks ONLY if
850 # option is defined, we just pass those if they are defined then
843 # option is defined, we just pass those if they are defined then
851 opts = {}
844 opts = {}
852 if bookmark:
845 if bookmark:
853 opts['bookmark'] = bookmark
846 opts['bookmark'] = bookmark
854 if branch:
847 if branch:
855 opts['branch'] = branch
848 opts['branch'] = branch
856 if revision:
849 if revision:
857 opts['rev'] = revision
850 opts['rev'] = revision
858
851
859 commands.pull(baseui, repo, source, **opts)
852 commands.pull(baseui, repo, source, **opts)
860
853
861 @reraise_safe_exceptions
854 @reraise_safe_exceptions
862 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
855 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
863 repo = self._factory.repo(wire)
856 repo = self._factory.repo(wire)
864 baseui = self._factory._create_config(wire['config'], hooks=hooks)
857 baseui = self._factory._create_config(wire['config'], hooks=hooks)
865 commands.push(baseui, repo, dest=dest_path, rev=revisions,
858 commands.push(baseui, repo, dest=dest_path, rev=revisions,
866 new_branch=push_branches)
859 new_branch=push_branches)
867
860
868 @reraise_safe_exceptions
861 @reraise_safe_exceptions
869 def strip(self, wire, revision, update, backup):
862 def strip(self, wire, revision, update, backup):
870 repo = self._factory.repo(wire)
863 repo = self._factory.repo(wire)
871 ctx = self._get_ctx(repo, revision)
864 ctx = self._get_ctx(repo, revision)
872 hgext_strip(
865 hgext_strip(
873 repo.baseui, repo, ctx.node(), update=update, backup=backup)
866 repo.baseui, repo, ctx.node(), update=update, backup=backup)
874
867
875 @reraise_safe_exceptions
868 @reraise_safe_exceptions
876 def merge(self, wire, revision):
869 def merge(self, wire, revision):
877 repo = self._factory.repo(wire)
870 repo = self._factory.repo(wire)
878 baseui = self._factory._create_config(wire['config'])
871 baseui = self._factory._create_config(wire['config'])
879 repo.ui.setconfig('ui', 'merge', 'internal:dump')
872 repo.ui.setconfig('ui', 'merge', 'internal:dump')
880
873
881 # In case of sub repositories are used mercurial prompts the user in
874 # In case of sub repositories are used mercurial prompts the user in
882 # case of merge conflicts or different sub repository sources. By
875 # case of merge conflicts or different sub repository sources. By
883 # setting the interactive flag to `False` mercurial doesn't prompt the
876 # setting the interactive flag to `False` mercurial doesn't prompt the
884 # used but instead uses a default value.
877 # used but instead uses a default value.
885 repo.ui.setconfig('ui', 'interactive', False)
878 repo.ui.setconfig('ui', 'interactive', False)
886 commands.merge(baseui, repo, rev=revision)
879 commands.merge(baseui, repo, rev=revision)
887
880
888 @reraise_safe_exceptions
881 @reraise_safe_exceptions
889 def merge_state(self, wire):
882 def merge_state(self, wire):
890 repo = self._factory.repo(wire)
883 repo = self._factory.repo(wire)
891 repo.ui.setconfig('ui', 'merge', 'internal:dump')
884 repo.ui.setconfig('ui', 'merge', 'internal:dump')
892
885
893 # In case of sub repositories are used mercurial prompts the user in
886 # In case of sub repositories are used mercurial prompts the user in
894 # case of merge conflicts or different sub repository sources. By
887 # case of merge conflicts or different sub repository sources. By
895 # setting the interactive flag to `False` mercurial doesn't prompt the
888 # setting the interactive flag to `False` mercurial doesn't prompt the
896 # used but instead uses a default value.
889 # used but instead uses a default value.
897 repo.ui.setconfig('ui', 'interactive', False)
890 repo.ui.setconfig('ui', 'interactive', False)
898 ms = hg_merge.mergestate(repo)
891 ms = hg_merge.mergestate(repo)
899 return [x for x in ms.unresolved()]
892 return [x for x in ms.unresolved()]
900
893
901 @reraise_safe_exceptions
894 @reraise_safe_exceptions
902 def commit(self, wire, message, username, close_branch=False):
895 def commit(self, wire, message, username, close_branch=False):
903 repo = self._factory.repo(wire)
896 repo = self._factory.repo(wire)
904 baseui = self._factory._create_config(wire['config'])
897 baseui = self._factory._create_config(wire['config'])
905 repo.ui.setconfig('ui', 'username', username)
898 repo.ui.setconfig('ui', 'username', username)
906 commands.commit(baseui, repo, message=message, close_branch=close_branch)
899 commands.commit(baseui, repo, message=message, close_branch=close_branch)
907
900
908 @reraise_safe_exceptions
901 @reraise_safe_exceptions
909 def rebase(self, wire, source=None, dest=None, abort=False):
902 def rebase(self, wire, source=None, dest=None, abort=False):
910 repo = self._factory.repo(wire)
903 repo = self._factory.repo(wire)
911 baseui = self._factory._create_config(wire['config'])
904 baseui = self._factory._create_config(wire['config'])
912 repo.ui.setconfig('ui', 'merge', 'internal:dump')
905 repo.ui.setconfig('ui', 'merge', 'internal:dump')
913 rebase.rebase(
906 rebase.rebase(
914 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
907 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
915
908
916 @reraise_safe_exceptions
909 @reraise_safe_exceptions
917 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
910 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
918 repo = self._factory.repo(wire)
911 repo = self._factory.repo(wire)
919 ctx = self._get_ctx(repo, revision)
912 ctx = self._get_ctx(repo, revision)
920 node = ctx.node()
913 node = ctx.node()
921
914
922 date = (tag_time, tag_timezone)
915 date = (tag_time, tag_timezone)
923 try:
916 try:
924 hg_tag.tag(repo, name, node, message, local, user, date)
917 hg_tag.tag(repo, name, node, message, local, user, date)
925 except Abort as e:
918 except Abort as e:
926 log.exception("Tag operation aborted")
919 log.exception("Tag operation aborted")
927 # Exception can contain unicode which we convert
920 # Exception can contain unicode which we convert
928 raise exceptions.AbortException(e)(repr(e))
921 raise exceptions.AbortException(e)(repr(e))
929
922
930 @reraise_safe_exceptions
923 @reraise_safe_exceptions
931 def bookmark(self, wire, bookmark, revision=None):
924 def bookmark(self, wire, bookmark, revision=None):
932 repo = self._factory.repo(wire)
925 repo = self._factory.repo(wire)
933 baseui = self._factory._create_config(wire['config'])
926 baseui = self._factory._create_config(wire['config'])
934 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
927 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
935
928
936 @reraise_safe_exceptions
929 @reraise_safe_exceptions
937 def install_hooks(self, wire, force=False):
930 def install_hooks(self, wire, force=False):
938 # we don't need any special hooks for Mercurial
931 # we don't need any special hooks for Mercurial
939 pass
932 pass
940
933
941 @reraise_safe_exceptions
934 @reraise_safe_exceptions
942 def get_hooks_info(self, wire):
935 def get_hooks_info(self, wire):
943 return {
936 return {
944 'pre_version': vcsserver.__version__,
937 'pre_version': vcsserver.__version__,
945 'post_version': vcsserver.__version__,
938 'post_version': vcsserver.__version__,
946 }
939 }
@@ -1,796 +1,789 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 from urllib2 import URLError
22 from urllib2 import URLError
23 import urlparse
23 import urlparse
24 import logging
24 import logging
25 import posixpath as vcspath
25 import posixpath as vcspath
26 import StringIO
26 import StringIO
27 import urllib
27 import urllib
28 import traceback
28 import traceback
29
29
30 import svn.client
30 import svn.client
31 import svn.core
31 import svn.core
32 import svn.delta
32 import svn.delta
33 import svn.diff
33 import svn.diff
34 import svn.fs
34 import svn.fs
35 import svn.repos
35 import svn.repos
36
36
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
38 from vcsserver.base import RepoFactory, raise_from_original
39 from vcsserver.vcs_base import RemoteBase
39
40
40 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
41
42
42
43
43 # Set of svn compatible version flags.
44 # Set of svn compatible version flags.
44 # Compare with subversion/svnadmin/svnadmin.c
45 # Compare with subversion/svnadmin/svnadmin.c
45 svn_compatible_versions = {
46 svn_compatible_versions = {
46 'pre-1.4-compatible',
47 'pre-1.4-compatible',
47 'pre-1.5-compatible',
48 'pre-1.5-compatible',
48 'pre-1.6-compatible',
49 'pre-1.6-compatible',
49 'pre-1.8-compatible',
50 'pre-1.8-compatible',
50 'pre-1.9-compatible'
51 'pre-1.9-compatible'
51 }
52 }
52
53
53 svn_compatible_versions_map = {
54 svn_compatible_versions_map = {
54 'pre-1.4-compatible': '1.3',
55 'pre-1.4-compatible': '1.3',
55 'pre-1.5-compatible': '1.4',
56 'pre-1.5-compatible': '1.4',
56 'pre-1.6-compatible': '1.5',
57 'pre-1.6-compatible': '1.5',
57 'pre-1.8-compatible': '1.7',
58 'pre-1.8-compatible': '1.7',
58 'pre-1.9-compatible': '1.8',
59 'pre-1.9-compatible': '1.8',
59 }
60 }
60
61
61
62
62 def reraise_safe_exceptions(func):
63 def reraise_safe_exceptions(func):
63 """Decorator for converting svn exceptions to something neutral."""
64 """Decorator for converting svn exceptions to something neutral."""
64 def wrapper(*args, **kwargs):
65 def wrapper(*args, **kwargs):
65 try:
66 try:
66 return func(*args, **kwargs)
67 return func(*args, **kwargs)
67 except Exception as e:
68 except Exception as e:
68 if not hasattr(e, '_vcs_kind'):
69 if not hasattr(e, '_vcs_kind'):
69 log.exception("Unhandled exception in svn remote call")
70 log.exception("Unhandled exception in svn remote call")
70 raise_from_original(exceptions.UnhandledException(e))
71 raise_from_original(exceptions.UnhandledException(e))
71 raise
72 raise
72 return wrapper
73 return wrapper
73
74
74
75
75 class SubversionFactory(RepoFactory):
76 class SubversionFactory(RepoFactory):
76 repo_type = 'svn'
77 repo_type = 'svn'
77
78
78 def _create_repo(self, wire, create, compatible_version):
79 def _create_repo(self, wire, create, compatible_version):
79 path = svn.core.svn_path_canonicalize(wire['path'])
80 path = svn.core.svn_path_canonicalize(wire['path'])
80 if create:
81 if create:
81 fs_config = {'compatible-version': '1.9'}
82 fs_config = {'compatible-version': '1.9'}
82 if compatible_version:
83 if compatible_version:
83 if compatible_version not in svn_compatible_versions:
84 if compatible_version not in svn_compatible_versions:
84 raise Exception('Unknown SVN compatible version "{}"'
85 raise Exception('Unknown SVN compatible version "{}"'
85 .format(compatible_version))
86 .format(compatible_version))
86 fs_config['compatible-version'] = \
87 fs_config['compatible-version'] = \
87 svn_compatible_versions_map[compatible_version]
88 svn_compatible_versions_map[compatible_version]
88
89
89 log.debug('Create SVN repo with config "%s"', fs_config)
90 log.debug('Create SVN repo with config "%s"', fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
91 repo = svn.repos.create(path, "", "", None, fs_config)
91 else:
92 else:
92 repo = svn.repos.open(path)
93 repo = svn.repos.open(path)
93
94
94 log.debug('Got SVN object: %s', repo)
95 log.debug('Got SVN object: %s', repo)
95 return repo
96 return repo
96
97
97 def repo(self, wire, create=False, compatible_version=None):
98 def repo(self, wire, create=False, compatible_version=None):
98 """
99 """
99 Get a repository instance for the given path.
100 Get a repository instance for the given path.
100 """
101 """
101 return self._create_repo(wire, create, compatible_version)
102 return self._create_repo(wire, create, compatible_version)
102
103
103
104
104 NODE_TYPE_MAPPING = {
105 NODE_TYPE_MAPPING = {
105 svn.core.svn_node_file: 'file',
106 svn.core.svn_node_file: 'file',
106 svn.core.svn_node_dir: 'dir',
107 svn.core.svn_node_dir: 'dir',
107 }
108 }
108
109
109
110
110 class SvnRemote(object):
111 class SvnRemote(RemoteBase):
111
112
112 def __init__(self, factory, hg_factory=None):
113 def __init__(self, factory, hg_factory=None):
113 self._factory = factory
114 self._factory = factory
114 # TODO: Remove once we do not use internal Mercurial objects anymore
115 # TODO: Remove once we do not use internal Mercurial objects anymore
115 # for subversion
116 # for subversion
116 self._hg_factory = hg_factory
117 self._hg_factory = hg_factory
117 self.region = self._factory._cache_region
118 self.region = self._factory._cache_region
118
119
119 def _cache_on(self, wire):
120 context = wire.get('context', '')
121 context_uid = '{}'.format(context)
122 repo_id = wire.get('repo_id', '')
123 cache = wire.get('cache', True)
124 cache_on = context and cache
125 return cache_on, context_uid, repo_id
126
127 @reraise_safe_exceptions
120 @reraise_safe_exceptions
128 def discover_svn_version(self):
121 def discover_svn_version(self):
129 try:
122 try:
130 import svn.core
123 import svn.core
131 svn_ver = svn.core.SVN_VERSION
124 svn_ver = svn.core.SVN_VERSION
132 except ImportError:
125 except ImportError:
133 svn_ver = None
126 svn_ver = None
134 return svn_ver
127 return svn_ver
135
128
136 @reraise_safe_exceptions
129 @reraise_safe_exceptions
137 def is_empty(self, wire):
130 def is_empty(self, wire):
138
131
139 try:
132 try:
140 return self.lookup(wire, -1) == 0
133 return self.lookup(wire, -1) == 0
141 except Exception:
134 except Exception:
142 log.exception("failed to read object_store")
135 log.exception("failed to read object_store")
143 return False
136 return False
144
137
145 def check_url(self, url, config_items):
138 def check_url(self, url, config_items):
146 # this can throw exception if not installed, but we detect this
139 # this can throw exception if not installed, but we detect this
147 from hgsubversion import svnrepo
140 from hgsubversion import svnrepo
148
141
149 baseui = self._hg_factory._create_config(config_items)
142 baseui = self._hg_factory._create_config(config_items)
150 # uuid function get's only valid UUID from proper repo, else
143 # uuid function get's only valid UUID from proper repo, else
151 # throws exception
144 # throws exception
152 try:
145 try:
153 svnrepo.svnremoterepo(baseui, url).svn.uuid
146 svnrepo.svnremoterepo(baseui, url).svn.uuid
154 except Exception:
147 except Exception:
155 tb = traceback.format_exc()
148 tb = traceback.format_exc()
156 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
149 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
157 raise URLError(
150 raise URLError(
158 '"%s" is not a valid Subversion source url.' % (url, ))
151 '"%s" is not a valid Subversion source url.' % (url, ))
159 return True
152 return True
160
153
161 def is_path_valid_repository(self, wire, path):
154 def is_path_valid_repository(self, wire, path):
162
155
163 # NOTE(marcink): short circuit the check for SVN repo
156 # NOTE(marcink): short circuit the check for SVN repo
164 # the repos.open might be expensive to check, but we have one cheap
157 # the repos.open might be expensive to check, but we have one cheap
165 # pre condition that we can use, to check for 'format' file
158 # pre condition that we can use, to check for 'format' file
166
159
167 if not os.path.isfile(os.path.join(path, 'format')):
160 if not os.path.isfile(os.path.join(path, 'format')):
168 return False
161 return False
169
162
170 try:
163 try:
171 svn.repos.open(path)
164 svn.repos.open(path)
172 except svn.core.SubversionException:
165 except svn.core.SubversionException:
173 tb = traceback.format_exc()
166 tb = traceback.format_exc()
174 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
167 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
175 return False
168 return False
176 return True
169 return True
177
170
178 @reraise_safe_exceptions
171 @reraise_safe_exceptions
179 def verify(self, wire,):
172 def verify(self, wire,):
180 repo_path = wire['path']
173 repo_path = wire['path']
181 if not self.is_path_valid_repository(wire, repo_path):
174 if not self.is_path_valid_repository(wire, repo_path):
182 raise Exception(
175 raise Exception(
183 "Path %s is not a valid Subversion repository." % repo_path)
176 "Path %s is not a valid Subversion repository." % repo_path)
184
177
185 cmd = ['svnadmin', 'info', repo_path]
178 cmd = ['svnadmin', 'info', repo_path]
186 stdout, stderr = subprocessio.run_command(cmd)
179 stdout, stderr = subprocessio.run_command(cmd)
187 return stdout
180 return stdout
188
181
189 def lookup(self, wire, revision):
182 def lookup(self, wire, revision):
190 if revision not in [-1, None, 'HEAD']:
183 if revision not in [-1, None, 'HEAD']:
191 raise NotImplementedError
184 raise NotImplementedError
192 repo = self._factory.repo(wire)
185 repo = self._factory.repo(wire)
193 fs_ptr = svn.repos.fs(repo)
186 fs_ptr = svn.repos.fs(repo)
194 head = svn.fs.youngest_rev(fs_ptr)
187 head = svn.fs.youngest_rev(fs_ptr)
195 return head
188 return head
196
189
197 def lookup_interval(self, wire, start_ts, end_ts):
190 def lookup_interval(self, wire, start_ts, end_ts):
198 repo = self._factory.repo(wire)
191 repo = self._factory.repo(wire)
199 fsobj = svn.repos.fs(repo)
192 fsobj = svn.repos.fs(repo)
200 start_rev = None
193 start_rev = None
201 end_rev = None
194 end_rev = None
202 if start_ts:
195 if start_ts:
203 start_ts_svn = apr_time_t(start_ts)
196 start_ts_svn = apr_time_t(start_ts)
204 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
197 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
205 else:
198 else:
206 start_rev = 1
199 start_rev = 1
207 if end_ts:
200 if end_ts:
208 end_ts_svn = apr_time_t(end_ts)
201 end_ts_svn = apr_time_t(end_ts)
209 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
202 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
210 else:
203 else:
211 end_rev = svn.fs.youngest_rev(fsobj)
204 end_rev = svn.fs.youngest_rev(fsobj)
212 return start_rev, end_rev
205 return start_rev, end_rev
213
206
214 def revision_properties(self, wire, revision):
207 def revision_properties(self, wire, revision):
215
208
216 cache_on, context_uid, repo_id = self._cache_on(wire)
209 cache_on, context_uid, repo_id = self._cache_on(wire)
217 @self.region.conditional_cache_on_arguments(condition=cache_on)
210 @self.region.conditional_cache_on_arguments(condition=cache_on)
218 def _revision_properties(_repo_id, _revision):
211 def _revision_properties(_repo_id, _revision):
219 repo = self._factory.repo(wire)
212 repo = self._factory.repo(wire)
220 fs_ptr = svn.repos.fs(repo)
213 fs_ptr = svn.repos.fs(repo)
221 return svn.fs.revision_proplist(fs_ptr, revision)
214 return svn.fs.revision_proplist(fs_ptr, revision)
222 return _revision_properties(repo_id, revision)
215 return _revision_properties(repo_id, revision)
223
216
224 def revision_changes(self, wire, revision):
217 def revision_changes(self, wire, revision):
225
218
226 repo = self._factory.repo(wire)
219 repo = self._factory.repo(wire)
227 fsobj = svn.repos.fs(repo)
220 fsobj = svn.repos.fs(repo)
228 rev_root = svn.fs.revision_root(fsobj, revision)
221 rev_root = svn.fs.revision_root(fsobj, revision)
229
222
230 editor = svn.repos.ChangeCollector(fsobj, rev_root)
223 editor = svn.repos.ChangeCollector(fsobj, rev_root)
231 editor_ptr, editor_baton = svn.delta.make_editor(editor)
224 editor_ptr, editor_baton = svn.delta.make_editor(editor)
232 base_dir = ""
225 base_dir = ""
233 send_deltas = False
226 send_deltas = False
234 svn.repos.replay2(
227 svn.repos.replay2(
235 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
228 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
236 editor_ptr, editor_baton, None)
229 editor_ptr, editor_baton, None)
237
230
238 added = []
231 added = []
239 changed = []
232 changed = []
240 removed = []
233 removed = []
241
234
242 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
235 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
243 for path, change in editor.changes.iteritems():
236 for path, change in editor.changes.iteritems():
244 # TODO: Decide what to do with directory nodes. Subversion can add
237 # TODO: Decide what to do with directory nodes. Subversion can add
245 # empty directories.
238 # empty directories.
246
239
247 if change.item_kind == svn.core.svn_node_dir:
240 if change.item_kind == svn.core.svn_node_dir:
248 continue
241 continue
249 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
242 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
250 added.append(path)
243 added.append(path)
251 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
244 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
252 svn.repos.CHANGE_ACTION_REPLACE]:
245 svn.repos.CHANGE_ACTION_REPLACE]:
253 changed.append(path)
246 changed.append(path)
254 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
247 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
255 removed.append(path)
248 removed.append(path)
256 else:
249 else:
257 raise NotImplementedError(
250 raise NotImplementedError(
258 "Action %s not supported on path %s" % (
251 "Action %s not supported on path %s" % (
259 change.action, path))
252 change.action, path))
260
253
261 changes = {
254 changes = {
262 'added': added,
255 'added': added,
263 'changed': changed,
256 'changed': changed,
264 'removed': removed,
257 'removed': removed,
265 }
258 }
266 return changes
259 return changes
267
260
268 @reraise_safe_exceptions
261 @reraise_safe_exceptions
269 def node_history(self, wire, path, revision, limit):
262 def node_history(self, wire, path, revision, limit):
270 cache_on, context_uid, repo_id = self._cache_on(wire)
263 cache_on, context_uid, repo_id = self._cache_on(wire)
271 @self.region.conditional_cache_on_arguments(condition=cache_on)
264 @self.region.conditional_cache_on_arguments(condition=cache_on)
272 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
265 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
273 cross_copies = False
266 cross_copies = False
274 repo = self._factory.repo(wire)
267 repo = self._factory.repo(wire)
275 fsobj = svn.repos.fs(repo)
268 fsobj = svn.repos.fs(repo)
276 rev_root = svn.fs.revision_root(fsobj, revision)
269 rev_root = svn.fs.revision_root(fsobj, revision)
277
270
278 history_revisions = []
271 history_revisions = []
279 history = svn.fs.node_history(rev_root, path)
272 history = svn.fs.node_history(rev_root, path)
280 history = svn.fs.history_prev(history, cross_copies)
273 history = svn.fs.history_prev(history, cross_copies)
281 while history:
274 while history:
282 __, node_revision = svn.fs.history_location(history)
275 __, node_revision = svn.fs.history_location(history)
283 history_revisions.append(node_revision)
276 history_revisions.append(node_revision)
284 if limit and len(history_revisions) >= limit:
277 if limit and len(history_revisions) >= limit:
285 break
278 break
286 history = svn.fs.history_prev(history, cross_copies)
279 history = svn.fs.history_prev(history, cross_copies)
287 return history_revisions
280 return history_revisions
288 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
281 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
289
282
290 def node_properties(self, wire, path, revision):
283 def node_properties(self, wire, path, revision):
291 cache_on, context_uid, repo_id = self._cache_on(wire)
284 cache_on, context_uid, repo_id = self._cache_on(wire)
292 @self.region.conditional_cache_on_arguments(condition=cache_on)
285 @self.region.conditional_cache_on_arguments(condition=cache_on)
293 def _node_properties(_repo_id, _path, _revision):
286 def _node_properties(_repo_id, _path, _revision):
294 repo = self._factory.repo(wire)
287 repo = self._factory.repo(wire)
295 fsobj = svn.repos.fs(repo)
288 fsobj = svn.repos.fs(repo)
296 rev_root = svn.fs.revision_root(fsobj, revision)
289 rev_root = svn.fs.revision_root(fsobj, revision)
297 return svn.fs.node_proplist(rev_root, path)
290 return svn.fs.node_proplist(rev_root, path)
298 return _node_properties(repo_id, path, revision)
291 return _node_properties(repo_id, path, revision)
299
292
300 def file_annotate(self, wire, path, revision):
293 def file_annotate(self, wire, path, revision):
301 abs_path = 'file://' + urllib.pathname2url(
294 abs_path = 'file://' + urllib.pathname2url(
302 vcspath.join(wire['path'], path))
295 vcspath.join(wire['path'], path))
303 file_uri = svn.core.svn_path_canonicalize(abs_path)
296 file_uri = svn.core.svn_path_canonicalize(abs_path)
304
297
305 start_rev = svn_opt_revision_value_t(0)
298 start_rev = svn_opt_revision_value_t(0)
306 peg_rev = svn_opt_revision_value_t(revision)
299 peg_rev = svn_opt_revision_value_t(revision)
307 end_rev = peg_rev
300 end_rev = peg_rev
308
301
309 annotations = []
302 annotations = []
310
303
311 def receiver(line_no, revision, author, date, line, pool):
304 def receiver(line_no, revision, author, date, line, pool):
312 annotations.append((line_no, revision, line))
305 annotations.append((line_no, revision, line))
313
306
314 # TODO: Cannot use blame5, missing typemap function in the swig code
307 # TODO: Cannot use blame5, missing typemap function in the swig code
315 try:
308 try:
316 svn.client.blame2(
309 svn.client.blame2(
317 file_uri, peg_rev, start_rev, end_rev,
310 file_uri, peg_rev, start_rev, end_rev,
318 receiver, svn.client.create_context())
311 receiver, svn.client.create_context())
319 except svn.core.SubversionException as exc:
312 except svn.core.SubversionException as exc:
320 log.exception("Error during blame operation.")
313 log.exception("Error during blame operation.")
321 raise Exception(
314 raise Exception(
322 "Blame not supported or file does not exist at path %s. "
315 "Blame not supported or file does not exist at path %s. "
323 "Error %s." % (path, exc))
316 "Error %s." % (path, exc))
324
317
325 return annotations
318 return annotations
326
319
327 def get_node_type(self, wire, path, revision=None):
320 def get_node_type(self, wire, path, revision=None):
328
321
329 cache_on, context_uid, repo_id = self._cache_on(wire)
322 cache_on, context_uid, repo_id = self._cache_on(wire)
330 @self.region.conditional_cache_on_arguments(condition=cache_on)
323 @self.region.conditional_cache_on_arguments(condition=cache_on)
331 def _get_node_type(_repo_id, _path, _revision):
324 def _get_node_type(_repo_id, _path, _revision):
332 repo = self._factory.repo(wire)
325 repo = self._factory.repo(wire)
333 fs_ptr = svn.repos.fs(repo)
326 fs_ptr = svn.repos.fs(repo)
334 if _revision is None:
327 if _revision is None:
335 _revision = svn.fs.youngest_rev(fs_ptr)
328 _revision = svn.fs.youngest_rev(fs_ptr)
336 root = svn.fs.revision_root(fs_ptr, _revision)
329 root = svn.fs.revision_root(fs_ptr, _revision)
337 node = svn.fs.check_path(root, path)
330 node = svn.fs.check_path(root, path)
338 return NODE_TYPE_MAPPING.get(node, None)
331 return NODE_TYPE_MAPPING.get(node, None)
339 return _get_node_type(repo_id, path, revision)
332 return _get_node_type(repo_id, path, revision)
340
333
341 def get_nodes(self, wire, path, revision=None):
334 def get_nodes(self, wire, path, revision=None):
342
335
343 cache_on, context_uid, repo_id = self._cache_on(wire)
336 cache_on, context_uid, repo_id = self._cache_on(wire)
344 @self.region.conditional_cache_on_arguments(condition=cache_on)
337 @self.region.conditional_cache_on_arguments(condition=cache_on)
345 def _get_nodes(_repo_id, _path, _revision):
338 def _get_nodes(_repo_id, _path, _revision):
346 repo = self._factory.repo(wire)
339 repo = self._factory.repo(wire)
347 fsobj = svn.repos.fs(repo)
340 fsobj = svn.repos.fs(repo)
348 if _revision is None:
341 if _revision is None:
349 _revision = svn.fs.youngest_rev(fsobj)
342 _revision = svn.fs.youngest_rev(fsobj)
350 root = svn.fs.revision_root(fsobj, _revision)
343 root = svn.fs.revision_root(fsobj, _revision)
351 entries = svn.fs.dir_entries(root, path)
344 entries = svn.fs.dir_entries(root, path)
352 result = []
345 result = []
353 for entry_path, entry_info in entries.iteritems():
346 for entry_path, entry_info in entries.iteritems():
354 result.append(
347 result.append(
355 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
348 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
356 return result
349 return result
357 return _get_nodes(repo_id, path, revision)
350 return _get_nodes(repo_id, path, revision)
358
351
359 def get_file_content(self, wire, path, rev=None):
352 def get_file_content(self, wire, path, rev=None):
360 repo = self._factory.repo(wire)
353 repo = self._factory.repo(wire)
361 fsobj = svn.repos.fs(repo)
354 fsobj = svn.repos.fs(repo)
362 if rev is None:
355 if rev is None:
363 rev = svn.fs.youngest_revision(fsobj)
356 rev = svn.fs.youngest_revision(fsobj)
364 root = svn.fs.revision_root(fsobj, rev)
357 root = svn.fs.revision_root(fsobj, rev)
365 content = svn.core.Stream(svn.fs.file_contents(root, path))
358 content = svn.core.Stream(svn.fs.file_contents(root, path))
366 return content.read()
359 return content.read()
367
360
368 def get_file_size(self, wire, path, revision=None):
361 def get_file_size(self, wire, path, revision=None):
369
362
370 cache_on, context_uid, repo_id = self._cache_on(wire)
363 cache_on, context_uid, repo_id = self._cache_on(wire)
371 @self.region.conditional_cache_on_arguments(condition=cache_on)
364 @self.region.conditional_cache_on_arguments(condition=cache_on)
372 def _get_file_size(_repo_id, _path, _revision):
365 def _get_file_size(_repo_id, _path, _revision):
373 repo = self._factory.repo(wire)
366 repo = self._factory.repo(wire)
374 fsobj = svn.repos.fs(repo)
367 fsobj = svn.repos.fs(repo)
375 if _revision is None:
368 if _revision is None:
376 _revision = svn.fs.youngest_revision(fsobj)
369 _revision = svn.fs.youngest_revision(fsobj)
377 root = svn.fs.revision_root(fsobj, _revision)
370 root = svn.fs.revision_root(fsobj, _revision)
378 size = svn.fs.file_length(root, path)
371 size = svn.fs.file_length(root, path)
379 return size
372 return size
380 return _get_file_size(repo_id, path, revision)
373 return _get_file_size(repo_id, path, revision)
381
374
382 def create_repository(self, wire, compatible_version=None):
375 def create_repository(self, wire, compatible_version=None):
383 log.info('Creating Subversion repository in path "%s"', wire['path'])
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
384 self._factory.repo(wire, create=True,
377 self._factory.repo(wire, create=True,
385 compatible_version=compatible_version)
378 compatible_version=compatible_version)
386
379
387 def get_url_and_credentials(self, src_url):
380 def get_url_and_credentials(self, src_url):
388 obj = urlparse.urlparse(src_url)
381 obj = urlparse.urlparse(src_url)
389 username = obj.username or None
382 username = obj.username or None
390 password = obj.password or None
383 password = obj.password or None
391 return username, password, src_url
384 return username, password, src_url
392
385
393 def import_remote_repository(self, wire, src_url):
386 def import_remote_repository(self, wire, src_url):
394 repo_path = wire['path']
387 repo_path = wire['path']
395 if not self.is_path_valid_repository(wire, repo_path):
388 if not self.is_path_valid_repository(wire, repo_path):
396 raise Exception(
389 raise Exception(
397 "Path %s is not a valid Subversion repository." % repo_path)
390 "Path %s is not a valid Subversion repository." % repo_path)
398
391
399 username, password, src_url = self.get_url_and_credentials(src_url)
392 username, password, src_url = self.get_url_and_credentials(src_url)
400 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
401 '--trust-server-cert-failures=unknown-ca']
394 '--trust-server-cert-failures=unknown-ca']
402 if username and password:
395 if username and password:
403 rdump_cmd += ['--username', username, '--password', password]
396 rdump_cmd += ['--username', username, '--password', password]
404 rdump_cmd += [src_url]
397 rdump_cmd += [src_url]
405
398
406 rdump = subprocess.Popen(
399 rdump = subprocess.Popen(
407 rdump_cmd,
400 rdump_cmd,
408 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
409 load = subprocess.Popen(
402 load = subprocess.Popen(
410 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
411
404
412 # TODO: johbo: This can be a very long operation, might be better
405 # TODO: johbo: This can be a very long operation, might be better
413 # to track some kind of status and provide an api to check if the
406 # to track some kind of status and provide an api to check if the
414 # import is done.
407 # import is done.
415 rdump.wait()
408 rdump.wait()
416 load.wait()
409 load.wait()
417
410
418 log.debug('Return process ended with code: %s', rdump.returncode)
411 log.debug('Return process ended with code: %s', rdump.returncode)
419 if rdump.returncode != 0:
412 if rdump.returncode != 0:
420 errors = rdump.stderr.read()
413 errors = rdump.stderr.read()
421 log.error('svnrdump dump failed: statuscode %s: message: %s',
414 log.error('svnrdump dump failed: statuscode %s: message: %s',
422 rdump.returncode, errors)
415 rdump.returncode, errors)
423 reason = 'UNKNOWN'
416 reason = 'UNKNOWN'
424 if 'svnrdump: E230001:' in errors:
417 if 'svnrdump: E230001:' in errors:
425 reason = 'INVALID_CERTIFICATE'
418 reason = 'INVALID_CERTIFICATE'
426
419
427 if reason == 'UNKNOWN':
420 if reason == 'UNKNOWN':
428 reason = 'UNKNOWN:{}'.format(errors)
421 reason = 'UNKNOWN:{}'.format(errors)
429 raise Exception(
422 raise Exception(
430 'Failed to dump the remote repository from %s. Reason:%s' % (
423 'Failed to dump the remote repository from %s. Reason:%s' % (
431 src_url, reason))
424 src_url, reason))
432 if load.returncode != 0:
425 if load.returncode != 0:
433 raise Exception(
426 raise Exception(
434 'Failed to load the dump of remote repository from %s.' %
427 'Failed to load the dump of remote repository from %s.' %
435 (src_url, ))
428 (src_url, ))
436
429
437 def commit(self, wire, message, author, timestamp, updated, removed):
430 def commit(self, wire, message, author, timestamp, updated, removed):
438 assert isinstance(message, str)
431 assert isinstance(message, str)
439 assert isinstance(author, str)
432 assert isinstance(author, str)
440
433
441 repo = self._factory.repo(wire)
434 repo = self._factory.repo(wire)
442 fsobj = svn.repos.fs(repo)
435 fsobj = svn.repos.fs(repo)
443
436
444 rev = svn.fs.youngest_rev(fsobj)
437 rev = svn.fs.youngest_rev(fsobj)
445 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
438 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
446 txn_root = svn.fs.txn_root(txn)
439 txn_root = svn.fs.txn_root(txn)
447
440
448 for node in updated:
441 for node in updated:
449 TxnNodeProcessor(node, txn_root).update()
442 TxnNodeProcessor(node, txn_root).update()
450 for node in removed:
443 for node in removed:
451 TxnNodeProcessor(node, txn_root).remove()
444 TxnNodeProcessor(node, txn_root).remove()
452
445
453 commit_id = svn.repos.fs_commit_txn(repo, txn)
446 commit_id = svn.repos.fs_commit_txn(repo, txn)
454
447
455 if timestamp:
448 if timestamp:
456 apr_time = apr_time_t(timestamp)
449 apr_time = apr_time_t(timestamp)
457 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
450 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
458 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
451 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
459
452
460 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
453 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
461 return commit_id
454 return commit_id
462
455
463 def diff(self, wire, rev1, rev2, path1=None, path2=None,
456 def diff(self, wire, rev1, rev2, path1=None, path2=None,
464 ignore_whitespace=False, context=3):
457 ignore_whitespace=False, context=3):
465
458
466 wire.update(cache=False)
459 wire.update(cache=False)
467 repo = self._factory.repo(wire)
460 repo = self._factory.repo(wire)
468 diff_creator = SvnDiffer(
461 diff_creator = SvnDiffer(
469 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
462 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
470 try:
463 try:
471 return diff_creator.generate_diff()
464 return diff_creator.generate_diff()
472 except svn.core.SubversionException as e:
465 except svn.core.SubversionException as e:
473 log.exception(
466 log.exception(
474 "Error during diff operation operation. "
467 "Error during diff operation operation. "
475 "Path might not exist %s, %s" % (path1, path2))
468 "Path might not exist %s, %s" % (path1, path2))
476 return ""
469 return ""
477
470
478 @reraise_safe_exceptions
471 @reraise_safe_exceptions
479 def is_large_file(self, wire, path):
472 def is_large_file(self, wire, path):
480 return False
473 return False
481
474
482 @reraise_safe_exceptions
475 @reraise_safe_exceptions
483 def run_svn_command(self, wire, cmd, **opts):
476 def run_svn_command(self, wire, cmd, **opts):
484 path = wire.get('path', None)
477 path = wire.get('path', None)
485
478
486 if path and os.path.isdir(path):
479 if path and os.path.isdir(path):
487 opts['cwd'] = path
480 opts['cwd'] = path
488
481
489 safe_call = False
482 safe_call = False
490 if '_safe' in opts:
483 if '_safe' in opts:
491 safe_call = True
484 safe_call = True
492
485
493 svnenv = os.environ.copy()
486 svnenv = os.environ.copy()
494 svnenv.update(opts.pop('extra_env', {}))
487 svnenv.update(opts.pop('extra_env', {}))
495
488
496 _opts = {'env': svnenv, 'shell': False}
489 _opts = {'env': svnenv, 'shell': False}
497
490
498 try:
491 try:
499 _opts.update(opts)
492 _opts.update(opts)
500 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
493 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
501
494
502 return ''.join(p), ''.join(p.error)
495 return ''.join(p), ''.join(p.error)
503 except (EnvironmentError, OSError) as err:
496 except (EnvironmentError, OSError) as err:
504 cmd = ' '.join(cmd) # human friendly CMD
497 cmd = ' '.join(cmd) # human friendly CMD
505 tb_err = ("Couldn't run svn command (%s).\n"
498 tb_err = ("Couldn't run svn command (%s).\n"
506 "Original error was:%s\n"
499 "Original error was:%s\n"
507 "Call options:%s\n"
500 "Call options:%s\n"
508 % (cmd, err, _opts))
501 % (cmd, err, _opts))
509 log.exception(tb_err)
502 log.exception(tb_err)
510 if safe_call:
503 if safe_call:
511 return '', err
504 return '', err
512 else:
505 else:
513 raise exceptions.VcsException()(tb_err)
506 raise exceptions.VcsException()(tb_err)
514
507
515 @reraise_safe_exceptions
508 @reraise_safe_exceptions
516 def install_hooks(self, wire, force=False):
509 def install_hooks(self, wire, force=False):
517 from vcsserver.hook_utils import install_svn_hooks
510 from vcsserver.hook_utils import install_svn_hooks
518 repo_path = wire['path']
511 repo_path = wire['path']
519 binary_dir = settings.BINARY_DIR
512 binary_dir = settings.BINARY_DIR
520 executable = None
513 executable = None
521 if binary_dir:
514 if binary_dir:
522 executable = os.path.join(binary_dir, 'python')
515 executable = os.path.join(binary_dir, 'python')
523 return install_svn_hooks(
516 return install_svn_hooks(
524 repo_path, executable=executable, force_create=force)
517 repo_path, executable=executable, force_create=force)
525
518
526 @reraise_safe_exceptions
519 @reraise_safe_exceptions
527 def get_hooks_info(self, wire):
520 def get_hooks_info(self, wire):
528 from vcsserver.hook_utils import (
521 from vcsserver.hook_utils import (
529 get_svn_pre_hook_version, get_svn_post_hook_version)
522 get_svn_pre_hook_version, get_svn_post_hook_version)
530 repo_path = wire['path']
523 repo_path = wire['path']
531 return {
524 return {
532 'pre_version': get_svn_pre_hook_version(repo_path),
525 'pre_version': get_svn_pre_hook_version(repo_path),
533 'post_version': get_svn_post_hook_version(repo_path),
526 'post_version': get_svn_post_hook_version(repo_path),
534 }
527 }
535
528
536
529
537 class SvnDiffer(object):
530 class SvnDiffer(object):
538 """
531 """
539 Utility to create diffs based on difflib and the Subversion api
532 Utility to create diffs based on difflib and the Subversion api
540 """
533 """
541
534
542 binary_content = False
535 binary_content = False
543
536
544 def __init__(
537 def __init__(
545 self, repo, src_rev, src_path, tgt_rev, tgt_path,
538 self, repo, src_rev, src_path, tgt_rev, tgt_path,
546 ignore_whitespace, context):
539 ignore_whitespace, context):
547 self.repo = repo
540 self.repo = repo
548 self.ignore_whitespace = ignore_whitespace
541 self.ignore_whitespace = ignore_whitespace
549 self.context = context
542 self.context = context
550
543
551 fsobj = svn.repos.fs(repo)
544 fsobj = svn.repos.fs(repo)
552
545
553 self.tgt_rev = tgt_rev
546 self.tgt_rev = tgt_rev
554 self.tgt_path = tgt_path or ''
547 self.tgt_path = tgt_path or ''
555 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
548 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
556 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
549 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
557
550
558 self.src_rev = src_rev
551 self.src_rev = src_rev
559 self.src_path = src_path or self.tgt_path
552 self.src_path = src_path or self.tgt_path
560 self.src_root = svn.fs.revision_root(fsobj, src_rev)
553 self.src_root = svn.fs.revision_root(fsobj, src_rev)
561 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
554 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
562
555
563 self._validate()
556 self._validate()
564
557
565 def _validate(self):
558 def _validate(self):
566 if (self.tgt_kind != svn.core.svn_node_none and
559 if (self.tgt_kind != svn.core.svn_node_none and
567 self.src_kind != svn.core.svn_node_none and
560 self.src_kind != svn.core.svn_node_none and
568 self.src_kind != self.tgt_kind):
561 self.src_kind != self.tgt_kind):
569 # TODO: johbo: proper error handling
562 # TODO: johbo: proper error handling
570 raise Exception(
563 raise Exception(
571 "Source and target are not compatible for diff generation. "
564 "Source and target are not compatible for diff generation. "
572 "Source type: %s, target type: %s" %
565 "Source type: %s, target type: %s" %
573 (self.src_kind, self.tgt_kind))
566 (self.src_kind, self.tgt_kind))
574
567
575 def generate_diff(self):
568 def generate_diff(self):
576 buf = StringIO.StringIO()
569 buf = StringIO.StringIO()
577 if self.tgt_kind == svn.core.svn_node_dir:
570 if self.tgt_kind == svn.core.svn_node_dir:
578 self._generate_dir_diff(buf)
571 self._generate_dir_diff(buf)
579 else:
572 else:
580 self._generate_file_diff(buf)
573 self._generate_file_diff(buf)
581 return buf.getvalue()
574 return buf.getvalue()
582
575
583 def _generate_dir_diff(self, buf):
576 def _generate_dir_diff(self, buf):
584 editor = DiffChangeEditor()
577 editor = DiffChangeEditor()
585 editor_ptr, editor_baton = svn.delta.make_editor(editor)
578 editor_ptr, editor_baton = svn.delta.make_editor(editor)
586 svn.repos.dir_delta2(
579 svn.repos.dir_delta2(
587 self.src_root,
580 self.src_root,
588 self.src_path,
581 self.src_path,
589 '', # src_entry
582 '', # src_entry
590 self.tgt_root,
583 self.tgt_root,
591 self.tgt_path,
584 self.tgt_path,
592 editor_ptr, editor_baton,
585 editor_ptr, editor_baton,
593 authorization_callback_allow_all,
586 authorization_callback_allow_all,
594 False, # text_deltas
587 False, # text_deltas
595 svn.core.svn_depth_infinity, # depth
588 svn.core.svn_depth_infinity, # depth
596 False, # entry_props
589 False, # entry_props
597 False, # ignore_ancestry
590 False, # ignore_ancestry
598 )
591 )
599
592
600 for path, __, change in sorted(editor.changes):
593 for path, __, change in sorted(editor.changes):
601 self._generate_node_diff(
594 self._generate_node_diff(
602 buf, change, path, self.tgt_path, path, self.src_path)
595 buf, change, path, self.tgt_path, path, self.src_path)
603
596
604 def _generate_file_diff(self, buf):
597 def _generate_file_diff(self, buf):
605 change = None
598 change = None
606 if self.src_kind == svn.core.svn_node_none:
599 if self.src_kind == svn.core.svn_node_none:
607 change = "add"
600 change = "add"
608 elif self.tgt_kind == svn.core.svn_node_none:
601 elif self.tgt_kind == svn.core.svn_node_none:
609 change = "delete"
602 change = "delete"
610 tgt_base, tgt_path = vcspath.split(self.tgt_path)
603 tgt_base, tgt_path = vcspath.split(self.tgt_path)
611 src_base, src_path = vcspath.split(self.src_path)
604 src_base, src_path = vcspath.split(self.src_path)
612 self._generate_node_diff(
605 self._generate_node_diff(
613 buf, change, tgt_path, tgt_base, src_path, src_base)
606 buf, change, tgt_path, tgt_base, src_path, src_base)
614
607
615 def _generate_node_diff(
608 def _generate_node_diff(
616 self, buf, change, tgt_path, tgt_base, src_path, src_base):
609 self, buf, change, tgt_path, tgt_base, src_path, src_base):
617
610
618 if self.src_rev == self.tgt_rev and tgt_base == src_base:
611 if self.src_rev == self.tgt_rev and tgt_base == src_base:
619 # makes consistent behaviour with git/hg to return empty diff if
612 # makes consistent behaviour with git/hg to return empty diff if
620 # we compare same revisions
613 # we compare same revisions
621 return
614 return
622
615
623 tgt_full_path = vcspath.join(tgt_base, tgt_path)
616 tgt_full_path = vcspath.join(tgt_base, tgt_path)
624 src_full_path = vcspath.join(src_base, src_path)
617 src_full_path = vcspath.join(src_base, src_path)
625
618
626 self.binary_content = False
619 self.binary_content = False
627 mime_type = self._get_mime_type(tgt_full_path)
620 mime_type = self._get_mime_type(tgt_full_path)
628
621
629 if mime_type and not mime_type.startswith('text'):
622 if mime_type and not mime_type.startswith('text'):
630 self.binary_content = True
623 self.binary_content = True
631 buf.write("=" * 67 + '\n')
624 buf.write("=" * 67 + '\n')
632 buf.write("Cannot display: file marked as a binary type.\n")
625 buf.write("Cannot display: file marked as a binary type.\n")
633 buf.write("svn:mime-type = %s\n" % mime_type)
626 buf.write("svn:mime-type = %s\n" % mime_type)
634 buf.write("Index: %s\n" % (tgt_path, ))
627 buf.write("Index: %s\n" % (tgt_path, ))
635 buf.write("=" * 67 + '\n')
628 buf.write("=" * 67 + '\n')
636 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
629 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
637 'tgt_path': tgt_path})
630 'tgt_path': tgt_path})
638
631
639 if change == 'add':
632 if change == 'add':
640 # TODO: johbo: SVN is missing a zero here compared to git
633 # TODO: johbo: SVN is missing a zero here compared to git
641 buf.write("new file mode 10644\n")
634 buf.write("new file mode 10644\n")
642
635
643 #TODO(marcink): intro to binary detection of svn patches
636 #TODO(marcink): intro to binary detection of svn patches
644 # if self.binary_content:
637 # if self.binary_content:
645 # buf.write('GIT binary patch\n')
638 # buf.write('GIT binary patch\n')
646
639
647 buf.write("--- /dev/null\t(revision 0)\n")
640 buf.write("--- /dev/null\t(revision 0)\n")
648 src_lines = []
641 src_lines = []
649 else:
642 else:
650 if change == 'delete':
643 if change == 'delete':
651 buf.write("deleted file mode 10644\n")
644 buf.write("deleted file mode 10644\n")
652
645
653 #TODO(marcink): intro to binary detection of svn patches
646 #TODO(marcink): intro to binary detection of svn patches
654 # if self.binary_content:
647 # if self.binary_content:
655 # buf.write('GIT binary patch\n')
648 # buf.write('GIT binary patch\n')
656
649
657 buf.write("--- a/%s\t(revision %s)\n" % (
650 buf.write("--- a/%s\t(revision %s)\n" % (
658 src_path, self.src_rev))
651 src_path, self.src_rev))
659 src_lines = self._svn_readlines(self.src_root, src_full_path)
652 src_lines = self._svn_readlines(self.src_root, src_full_path)
660
653
661 if change == 'delete':
654 if change == 'delete':
662 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
655 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
663 tgt_lines = []
656 tgt_lines = []
664 else:
657 else:
665 buf.write("+++ b/%s\t(revision %s)\n" % (
658 buf.write("+++ b/%s\t(revision %s)\n" % (
666 tgt_path, self.tgt_rev))
659 tgt_path, self.tgt_rev))
667 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
660 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
668
661
669 if not self.binary_content:
662 if not self.binary_content:
670 udiff = svn_diff.unified_diff(
663 udiff = svn_diff.unified_diff(
671 src_lines, tgt_lines, context=self.context,
664 src_lines, tgt_lines, context=self.context,
672 ignore_blank_lines=self.ignore_whitespace,
665 ignore_blank_lines=self.ignore_whitespace,
673 ignore_case=False,
666 ignore_case=False,
674 ignore_space_changes=self.ignore_whitespace)
667 ignore_space_changes=self.ignore_whitespace)
675 buf.writelines(udiff)
668 buf.writelines(udiff)
676
669
677 def _get_mime_type(self, path):
670 def _get_mime_type(self, path):
678 try:
671 try:
679 mime_type = svn.fs.node_prop(
672 mime_type = svn.fs.node_prop(
680 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
673 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
681 except svn.core.SubversionException:
674 except svn.core.SubversionException:
682 mime_type = svn.fs.node_prop(
675 mime_type = svn.fs.node_prop(
683 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
676 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
684 return mime_type
677 return mime_type
685
678
686 def _svn_readlines(self, fs_root, node_path):
679 def _svn_readlines(self, fs_root, node_path):
687 if self.binary_content:
680 if self.binary_content:
688 return []
681 return []
689 node_kind = svn.fs.check_path(fs_root, node_path)
682 node_kind = svn.fs.check_path(fs_root, node_path)
690 if node_kind not in (
683 if node_kind not in (
691 svn.core.svn_node_file, svn.core.svn_node_symlink):
684 svn.core.svn_node_file, svn.core.svn_node_symlink):
692 return []
685 return []
693 content = svn.core.Stream(
686 content = svn.core.Stream(
694 svn.fs.file_contents(fs_root, node_path)).read()
687 svn.fs.file_contents(fs_root, node_path)).read()
695 return content.splitlines(True)
688 return content.splitlines(True)
696
689
697
690
698 class DiffChangeEditor(svn.delta.Editor):
691 class DiffChangeEditor(svn.delta.Editor):
699 """
692 """
700 Records changes between two given revisions
693 Records changes between two given revisions
701 """
694 """
702
695
703 def __init__(self):
696 def __init__(self):
704 self.changes = []
697 self.changes = []
705
698
706 def delete_entry(self, path, revision, parent_baton, pool=None):
699 def delete_entry(self, path, revision, parent_baton, pool=None):
707 self.changes.append((path, None, 'delete'))
700 self.changes.append((path, None, 'delete'))
708
701
709 def add_file(
702 def add_file(
710 self, path, parent_baton, copyfrom_path, copyfrom_revision,
703 self, path, parent_baton, copyfrom_path, copyfrom_revision,
711 file_pool=None):
704 file_pool=None):
712 self.changes.append((path, 'file', 'add'))
705 self.changes.append((path, 'file', 'add'))
713
706
714 def open_file(self, path, parent_baton, base_revision, file_pool=None):
707 def open_file(self, path, parent_baton, base_revision, file_pool=None):
715 self.changes.append((path, 'file', 'change'))
708 self.changes.append((path, 'file', 'change'))
716
709
717
710
718 def authorization_callback_allow_all(root, path, pool):
711 def authorization_callback_allow_all(root, path, pool):
719 return True
712 return True
720
713
721
714
722 class TxnNodeProcessor(object):
715 class TxnNodeProcessor(object):
723 """
716 """
724 Utility to process the change of one node within a transaction root.
717 Utility to process the change of one node within a transaction root.
725
718
726 It encapsulates the knowledge of how to add, update or remove
719 It encapsulates the knowledge of how to add, update or remove
727 a node for a given transaction root. The purpose is to support the method
720 a node for a given transaction root. The purpose is to support the method
728 `SvnRemote.commit`.
721 `SvnRemote.commit`.
729 """
722 """
730
723
731 def __init__(self, node, txn_root):
724 def __init__(self, node, txn_root):
732 assert isinstance(node['path'], str)
725 assert isinstance(node['path'], str)
733
726
734 self.node = node
727 self.node = node
735 self.txn_root = txn_root
728 self.txn_root = txn_root
736
729
737 def update(self):
730 def update(self):
738 self._ensure_parent_dirs()
731 self._ensure_parent_dirs()
739 self._add_file_if_node_does_not_exist()
732 self._add_file_if_node_does_not_exist()
740 self._update_file_content()
733 self._update_file_content()
741 self._update_file_properties()
734 self._update_file_properties()
742
735
743 def remove(self):
736 def remove(self):
744 svn.fs.delete(self.txn_root, self.node['path'])
737 svn.fs.delete(self.txn_root, self.node['path'])
745 # TODO: Clean up directory if empty
738 # TODO: Clean up directory if empty
746
739
747 def _ensure_parent_dirs(self):
740 def _ensure_parent_dirs(self):
748 curdir = vcspath.dirname(self.node['path'])
741 curdir = vcspath.dirname(self.node['path'])
749 dirs_to_create = []
742 dirs_to_create = []
750 while not self._svn_path_exists(curdir):
743 while not self._svn_path_exists(curdir):
751 dirs_to_create.append(curdir)
744 dirs_to_create.append(curdir)
752 curdir = vcspath.dirname(curdir)
745 curdir = vcspath.dirname(curdir)
753
746
754 for curdir in reversed(dirs_to_create):
747 for curdir in reversed(dirs_to_create):
755 log.debug('Creating missing directory "%s"', curdir)
748 log.debug('Creating missing directory "%s"', curdir)
756 svn.fs.make_dir(self.txn_root, curdir)
749 svn.fs.make_dir(self.txn_root, curdir)
757
750
758 def _svn_path_exists(self, path):
751 def _svn_path_exists(self, path):
759 path_status = svn.fs.check_path(self.txn_root, path)
752 path_status = svn.fs.check_path(self.txn_root, path)
760 return path_status != svn.core.svn_node_none
753 return path_status != svn.core.svn_node_none
761
754
762 def _add_file_if_node_does_not_exist(self):
755 def _add_file_if_node_does_not_exist(self):
763 kind = svn.fs.check_path(self.txn_root, self.node['path'])
756 kind = svn.fs.check_path(self.txn_root, self.node['path'])
764 if kind == svn.core.svn_node_none:
757 if kind == svn.core.svn_node_none:
765 svn.fs.make_file(self.txn_root, self.node['path'])
758 svn.fs.make_file(self.txn_root, self.node['path'])
766
759
767 def _update_file_content(self):
760 def _update_file_content(self):
768 assert isinstance(self.node['content'], str)
761 assert isinstance(self.node['content'], str)
769 handler, baton = svn.fs.apply_textdelta(
762 handler, baton = svn.fs.apply_textdelta(
770 self.txn_root, self.node['path'], None, None)
763 self.txn_root, self.node['path'], None, None)
771 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
764 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
772
765
773 def _update_file_properties(self):
766 def _update_file_properties(self):
774 properties = self.node.get('properties', {})
767 properties = self.node.get('properties', {})
775 for key, value in properties.iteritems():
768 for key, value in properties.iteritems():
776 svn.fs.change_node_prop(
769 svn.fs.change_node_prop(
777 self.txn_root, self.node['path'], key, value)
770 self.txn_root, self.node['path'], key, value)
778
771
779
772
780 def apr_time_t(timestamp):
773 def apr_time_t(timestamp):
781 """
774 """
782 Convert a Python timestamp into APR timestamp type apr_time_t
775 Convert a Python timestamp into APR timestamp type apr_time_t
783 """
776 """
784 return timestamp * 1E6
777 return timestamp * 1E6
785
778
786
779
787 def svn_opt_revision_value_t(num):
780 def svn_opt_revision_value_t(num):
788 """
781 """
789 Put `num` into a `svn_opt_revision_value_t` structure.
782 Put `num` into a `svn_opt_revision_value_t` structure.
790 """
783 """
791 value = svn.core.svn_opt_revision_value_t()
784 value = svn.core.svn_opt_revision_value_t()
792 value.number = num
785 value.number = num
793 revision = svn.core.svn_opt_revision_t()
786 revision = svn.core.svn_opt_revision_t()
794 revision.kind = svn.core.svn_opt_revision_number
787 revision.kind = svn.core.svn_opt_revision_number
795 revision.value = value
788 revision.value = value
796 return revision
789 return revision
General Comments 0
You need to be logged in to leave comments. Login now