##// END OF EJS Templates
feat(git/svn): ensure both assert repo function for git and svn use caching and only those create dirs....
super-admin -
r1184:86489ea8 default
parent child Browse files
Show More
@@ -1,1491 +1,1493 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib.request
24 import urllib.request
25 import urllib.parse
25 import urllib.parse
26 import urllib.error
26 import urllib.error
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient, FetchPackResult
34 from dulwich.client import HttpGitClient, LocalGitClient, FetchPackResult
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 import rhodecode
42 import rhodecode
43 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver import exceptions, settings, subprocessio
44 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_bytes
44 from vcsserver.str_utils import safe_str, safe_int, safe_bytes, ascii_bytes
45 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
45 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, store_archive_in_cache, BytesEnvelope, BinaryEnvelope
46 from vcsserver.hgcompat import (
46 from vcsserver.hgcompat import (
47 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
48 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.git_lfs.lib import LFSOidStore
49 from vcsserver.vcs_base import RemoteBase
49 from vcsserver.vcs_base import RemoteBase
50
50
51 DIR_STAT = stat.S_IFDIR
51 DIR_STAT = stat.S_IFDIR
52 FILE_MODE = stat.S_IFMT
52 FILE_MODE = stat.S_IFMT
53 GIT_LINK = objects.S_IFGITLINK
53 GIT_LINK = objects.S_IFGITLINK
54 PEELED_REF_MARKER = b'^{}'
54 PEELED_REF_MARKER = b'^{}'
55 HEAD_MARKER = b'HEAD'
55 HEAD_MARKER = b'HEAD'
56
56
57 log = logging.getLogger(__name__)
57 log = logging.getLogger(__name__)
58
58
59
59
60 def reraise_safe_exceptions(func):
60 def reraise_safe_exceptions(func):
61 """Converts Dulwich exceptions to something neutral."""
61 """Converts Dulwich exceptions to something neutral."""
62
62
63 @wraps(func)
63 @wraps(func)
64 def wrapper(*args, **kwargs):
64 def wrapper(*args, **kwargs):
65 try:
65 try:
66 return func(*args, **kwargs)
66 return func(*args, **kwargs)
67 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
67 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
68 exc = exceptions.LookupException(org_exc=e)
68 exc = exceptions.LookupException(org_exc=e)
69 raise exc(safe_str(e))
69 raise exc(safe_str(e))
70 except (HangupException, UnexpectedCommandError) as e:
70 except (HangupException, UnexpectedCommandError) as e:
71 exc = exceptions.VcsException(org_exc=e)
71 exc = exceptions.VcsException(org_exc=e)
72 raise exc(safe_str(e))
72 raise exc(safe_str(e))
73 except Exception:
73 except Exception:
74 # NOTE(marcink): because of how dulwich handles some exceptions
74 # NOTE(marcink): because of how dulwich handles some exceptions
75 # (KeyError on empty repos), we cannot track this and catch all
75 # (KeyError on empty repos), we cannot track this and catch all
76 # exceptions, it's an exceptions from other handlers
76 # exceptions, it's an exceptions from other handlers
77 #if not hasattr(e, '_vcs_kind'):
77 #if not hasattr(e, '_vcs_kind'):
78 #log.exception("Unhandled exception in git remote call")
78 #log.exception("Unhandled exception in git remote call")
79 #raise_from_original(exceptions.UnhandledException)
79 #raise_from_original(exceptions.UnhandledException)
80 raise
80 raise
81 return wrapper
81 return wrapper
82
82
83
83
84 class Repo(DulwichRepo):
84 class Repo(DulwichRepo):
85 """
85 """
86 A wrapper for dulwich Repo class.
86 A wrapper for dulwich Repo class.
87
87
88 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
88 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
89 "Too many open files" error. We need to close all opened file descriptors
89 "Too many open files" error. We need to close all opened file descriptors
90 once the repo object is destroyed.
90 once the repo object is destroyed.
91 """
91 """
92 def __del__(self):
92 def __del__(self):
93 if hasattr(self, 'object_store'):
93 if hasattr(self, 'object_store'):
94 self.close()
94 self.close()
95
95
96
96
97 class Repository(LibGit2Repo):
97 class Repository(LibGit2Repo):
98
98
99 def __enter__(self):
99 def __enter__(self):
100 return self
100 return self
101
101
102 def __exit__(self, exc_type, exc_val, exc_tb):
102 def __exit__(self, exc_type, exc_val, exc_tb):
103 self.free()
103 self.free()
104
104
105
105
106 class GitFactory(RepoFactory):
106 class GitFactory(RepoFactory):
107 repo_type = 'git'
107 repo_type = 'git'
108
108
109 def _create_repo(self, wire, create, use_libgit2=False):
109 def _create_repo(self, wire, create, use_libgit2=False):
110 if use_libgit2:
110 if use_libgit2:
111 repo = Repository(safe_bytes(wire['path']))
111 repo = Repository(safe_bytes(wire['path']))
112 else:
112 else:
113 # dulwich mode
113 # dulwich mode
114 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
114 repo_path = safe_str(wire['path'], to_encoding=settings.WIRE_ENCODING)
115 repo = Repo(repo_path)
115 repo = Repo(repo_path)
116
116
117 log.debug('repository created: got GIT object: %s', repo)
117 log.debug('repository created: got GIT object: %s', repo)
118 return repo
118 return repo
119
119
120 def repo(self, wire, create=False, use_libgit2=False):
120 def repo(self, wire, create=False, use_libgit2=False):
121 """
121 """
122 Get a repository instance for the given path.
122 Get a repository instance for the given path.
123 """
123 """
124 return self._create_repo(wire, create, use_libgit2)
124 return self._create_repo(wire, create, use_libgit2)
125
125
126 def repo_libgit2(self, wire):
126 def repo_libgit2(self, wire):
127 return self.repo(wire, use_libgit2=True)
127 return self.repo(wire, use_libgit2=True)
128
128
129
129
130 def create_signature_from_string(author_str, **kwargs):
130 def create_signature_from_string(author_str, **kwargs):
131 """
131 """
132 Creates a pygit2.Signature object from a string of the format 'Name <email>'.
132 Creates a pygit2.Signature object from a string of the format 'Name <email>'.
133
133
134 :param author_str: String of the format 'Name <email>'
134 :param author_str: String of the format 'Name <email>'
135 :return: pygit2.Signature object
135 :return: pygit2.Signature object
136 """
136 """
137 match = re.match(r'^(.+) <(.+)>$', author_str)
137 match = re.match(r'^(.+) <(.+)>$', author_str)
138 if match is None:
138 if match is None:
139 raise ValueError(f"Invalid format: {author_str}")
139 raise ValueError(f"Invalid format: {author_str}")
140
140
141 name, email = match.groups()
141 name, email = match.groups()
142 return pygit2.Signature(name, email, **kwargs)
142 return pygit2.Signature(name, email, **kwargs)
143
143
144
144
145 def get_obfuscated_url(url_obj):
145 def get_obfuscated_url(url_obj):
146 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
146 url_obj.passwd = b'*****' if url_obj.passwd else url_obj.passwd
147 url_obj.query = obfuscate_qs(url_obj.query)
147 url_obj.query = obfuscate_qs(url_obj.query)
148 obfuscated_uri = str(url_obj)
148 obfuscated_uri = str(url_obj)
149 return obfuscated_uri
149 return obfuscated_uri
150
150
151
151
152 class GitRemote(RemoteBase):
152 class GitRemote(RemoteBase):
153
153
154 def __init__(self, factory):
154 def __init__(self, factory):
155 self._factory = factory
155 self._factory = factory
156 self._bulk_methods = {
156 self._bulk_methods = {
157 "date": self.date,
157 "date": self.date,
158 "author": self.author,
158 "author": self.author,
159 "branch": self.branch,
159 "branch": self.branch,
160 "message": self.message,
160 "message": self.message,
161 "parents": self.parents,
161 "parents": self.parents,
162 "_commit": self.revision,
162 "_commit": self.revision,
163 }
163 }
164 self._bulk_file_methods = {
164 self._bulk_file_methods = {
165 "size": self.get_node_size,
165 "size": self.get_node_size,
166 "data": self.get_node_data,
166 "data": self.get_node_data,
167 "flags": self.get_node_flags,
167 "flags": self.get_node_flags,
168 "is_binary": self.get_node_is_binary,
168 "is_binary": self.get_node_is_binary,
169 "md5": self.md5_hash
169 "md5": self.md5_hash
170 }
170 }
171
171
172 def _wire_to_config(self, wire):
172 def _wire_to_config(self, wire):
173 if 'config' in wire:
173 if 'config' in wire:
174 return {x[0] + '_' + x[1]: x[2] for x in wire['config']}
174 return {x[0] + '_' + x[1]: x[2] for x in wire['config']}
175 return {}
175 return {}
176
176
177 def _remote_conf(self, config):
177 def _remote_conf(self, config):
178 params = [
178 params = [
179 '-c', 'core.askpass=""',
179 '-c', 'core.askpass=""',
180 ]
180 ]
181 ssl_cert_dir = config.get('vcs_ssl_dir')
181 ssl_cert_dir = config.get('vcs_ssl_dir')
182 if ssl_cert_dir:
182 if ssl_cert_dir:
183 params.extend(['-c', f'http.sslCAinfo={ssl_cert_dir}'])
183 params.extend(['-c', f'http.sslCAinfo={ssl_cert_dir}'])
184 return params
184 return params
185
185
186 @reraise_safe_exceptions
186 @reraise_safe_exceptions
187 def discover_git_version(self):
187 def discover_git_version(self):
188 stdout, _ = self.run_git_command(
188 stdout, _ = self.run_git_command(
189 {}, ['--version'], _bare=True, _safe=True)
189 {}, ['--version'], _bare=True, _safe=True)
190 prefix = b'git version'
190 prefix = b'git version'
191 if stdout.startswith(prefix):
191 if stdout.startswith(prefix):
192 stdout = stdout[len(prefix):]
192 stdout = stdout[len(prefix):]
193 return safe_str(stdout.strip())
193 return safe_str(stdout.strip())
194
194
195 @reraise_safe_exceptions
195 @reraise_safe_exceptions
196 def is_empty(self, wire):
196 def is_empty(self, wire):
197 repo_init = self._factory.repo_libgit2(wire)
197 repo_init = self._factory.repo_libgit2(wire)
198 with repo_init as repo:
198 with repo_init as repo:
199
199
200 try:
200 try:
201 has_head = repo.head.name
201 has_head = repo.head.name
202 if has_head:
202 if has_head:
203 return False
203 return False
204
204
205 # NOTE(marcink): check again using more expensive method
205 # NOTE(marcink): check again using more expensive method
206 return repo.is_empty
206 return repo.is_empty
207 except Exception:
207 except Exception:
208 pass
208 pass
209
209
210 return True
210 return True
211
211
212 @reraise_safe_exceptions
212 @reraise_safe_exceptions
213 def assert_correct_path(self, wire):
213 def assert_correct_path(self, wire):
214 cache_on, context_uid, repo_id = self._cache_on(wire)
214 cache_on, context_uid, repo_id = self._cache_on(wire)
215 region = self._region(wire)
215 region = self._region(wire)
216
216
217 @region.conditional_cache_on_arguments(condition=cache_on)
217 @region.conditional_cache_on_arguments(condition=cache_on)
218 def _assert_correct_path(_context_uid, _repo_id, fast_check):
218 def _assert_correct_path(_context_uid, _repo_id, fast_check):
219 if fast_check:
219 if fast_check:
220 path = safe_str(wire['path'])
220 path = safe_str(wire['path'])
221 if pygit2.discover_repository(path):
221 if pygit2.discover_repository(path):
222 return True
222 return True
223 return False
223 return False
224 else:
224 else:
225 try:
225 try:
226 repo_init = self._factory.repo_libgit2(wire)
226 repo_init = self._factory.repo_libgit2(wire)
227 with repo_init:
227 with repo_init:
228 pass
228 pass
229 except pygit2.GitError:
229 except pygit2.GitError:
230 path = wire.get('path')
230 path = wire.get('path')
231 tb = traceback.format_exc()
231 tb = traceback.format_exc()
232 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
232 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
233 return False
233 return False
234 return True
234 return True
235
235
236 return _assert_correct_path(context_uid, repo_id, True)
236 return _assert_correct_path(context_uid, repo_id, True)
237
237
238 @reraise_safe_exceptions
238 @reraise_safe_exceptions
239 def bare(self, wire):
239 def bare(self, wire):
240 repo_init = self._factory.repo_libgit2(wire)
240 repo_init = self._factory.repo_libgit2(wire)
241 with repo_init as repo:
241 with repo_init as repo:
242 return repo.is_bare
242 return repo.is_bare
243
243
244 @reraise_safe_exceptions
244 @reraise_safe_exceptions
245 def get_node_data(self, wire, commit_id, path):
245 def get_node_data(self, wire, commit_id, path):
246 repo_init = self._factory.repo_libgit2(wire)
246 repo_init = self._factory.repo_libgit2(wire)
247 with repo_init as repo:
247 with repo_init as repo:
248 commit = repo[commit_id]
248 commit = repo[commit_id]
249 blob_obj = commit.tree[path]
249 blob_obj = commit.tree[path]
250
250
251 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
251 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
252 raise exceptions.LookupException()(
252 raise exceptions.LookupException()(
253 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
253 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
254
254
255 return BytesEnvelope(blob_obj.data)
255 return BytesEnvelope(blob_obj.data)
256
256
257 @reraise_safe_exceptions
257 @reraise_safe_exceptions
258 def get_node_size(self, wire, commit_id, path):
258 def get_node_size(self, wire, commit_id, path):
259 repo_init = self._factory.repo_libgit2(wire)
259 repo_init = self._factory.repo_libgit2(wire)
260 with repo_init as repo:
260 with repo_init as repo:
261 commit = repo[commit_id]
261 commit = repo[commit_id]
262 blob_obj = commit.tree[path]
262 blob_obj = commit.tree[path]
263
263
264 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
264 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
265 raise exceptions.LookupException()(
265 raise exceptions.LookupException()(
266 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
266 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
267
267
268 return blob_obj.size
268 return blob_obj.size
269
269
270 @reraise_safe_exceptions
270 @reraise_safe_exceptions
271 def get_node_flags(self, wire, commit_id, path):
271 def get_node_flags(self, wire, commit_id, path):
272 repo_init = self._factory.repo_libgit2(wire)
272 repo_init = self._factory.repo_libgit2(wire)
273 with repo_init as repo:
273 with repo_init as repo:
274 commit = repo[commit_id]
274 commit = repo[commit_id]
275 blob_obj = commit.tree[path]
275 blob_obj = commit.tree[path]
276
276
277 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
277 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
278 raise exceptions.LookupException()(
278 raise exceptions.LookupException()(
279 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
279 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
280
280
281 return blob_obj.filemode
281 return blob_obj.filemode
282
282
283 @reraise_safe_exceptions
283 @reraise_safe_exceptions
284 def get_node_is_binary(self, wire, commit_id, path):
284 def get_node_is_binary(self, wire, commit_id, path):
285 repo_init = self._factory.repo_libgit2(wire)
285 repo_init = self._factory.repo_libgit2(wire)
286 with repo_init as repo:
286 with repo_init as repo:
287 commit = repo[commit_id]
287 commit = repo[commit_id]
288 blob_obj = commit.tree[path]
288 blob_obj = commit.tree[path]
289
289
290 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
290 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
291 raise exceptions.LookupException()(
291 raise exceptions.LookupException()(
292 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
292 f'Tree for commit_id:{commit_id} is not a blob: {blob_obj.type_str}')
293
293
294 return blob_obj.is_binary
294 return blob_obj.is_binary
295
295
296 @reraise_safe_exceptions
296 @reraise_safe_exceptions
297 def blob_as_pretty_string(self, wire, sha):
297 def blob_as_pretty_string(self, wire, sha):
298 repo_init = self._factory.repo_libgit2(wire)
298 repo_init = self._factory.repo_libgit2(wire)
299 with repo_init as repo:
299 with repo_init as repo:
300 blob_obj = repo[sha]
300 blob_obj = repo[sha]
301 return BytesEnvelope(blob_obj.data)
301 return BytesEnvelope(blob_obj.data)
302
302
303 @reraise_safe_exceptions
303 @reraise_safe_exceptions
304 def blob_raw_length(self, wire, sha):
304 def blob_raw_length(self, wire, sha):
305 cache_on, context_uid, repo_id = self._cache_on(wire)
305 cache_on, context_uid, repo_id = self._cache_on(wire)
306 region = self._region(wire)
306 region = self._region(wire)
307
307
308 @region.conditional_cache_on_arguments(condition=cache_on)
308 @region.conditional_cache_on_arguments(condition=cache_on)
309 def _blob_raw_length(_repo_id, _sha):
309 def _blob_raw_length(_repo_id, _sha):
310
310
311 repo_init = self._factory.repo_libgit2(wire)
311 repo_init = self._factory.repo_libgit2(wire)
312 with repo_init as repo:
312 with repo_init as repo:
313 blob = repo[sha]
313 blob = repo[sha]
314 return blob.size
314 return blob.size
315
315
316 return _blob_raw_length(repo_id, sha)
316 return _blob_raw_length(repo_id, sha)
317
317
318 def _parse_lfs_pointer(self, raw_content):
318 def _parse_lfs_pointer(self, raw_content):
319 spec_string = b'version https://git-lfs.github.com/spec'
319 spec_string = b'version https://git-lfs.github.com/spec'
320 if raw_content and raw_content.startswith(spec_string):
320 if raw_content and raw_content.startswith(spec_string):
321
321
322 pattern = re.compile(rb"""
322 pattern = re.compile(rb"""
323 (?:\n)?
323 (?:\n)?
324 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
324 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
325 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
325 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
326 ^size[ ](?P<oid_size>[0-9]+)\n
326 ^size[ ](?P<oid_size>[0-9]+)\n
327 (?:\n)?
327 (?:\n)?
328 """, re.VERBOSE | re.MULTILINE)
328 """, re.VERBOSE | re.MULTILINE)
329 match = pattern.match(raw_content)
329 match = pattern.match(raw_content)
330 if match:
330 if match:
331 return match.groupdict()
331 return match.groupdict()
332
332
333 return {}
333 return {}
334
334
335 @reraise_safe_exceptions
335 @reraise_safe_exceptions
336 def is_large_file(self, wire, commit_id):
336 def is_large_file(self, wire, commit_id):
337 cache_on, context_uid, repo_id = self._cache_on(wire)
337 cache_on, context_uid, repo_id = self._cache_on(wire)
338 region = self._region(wire)
338 region = self._region(wire)
339
339
340 @region.conditional_cache_on_arguments(condition=cache_on)
340 @region.conditional_cache_on_arguments(condition=cache_on)
341 def _is_large_file(_repo_id, _sha):
341 def _is_large_file(_repo_id, _sha):
342 repo_init = self._factory.repo_libgit2(wire)
342 repo_init = self._factory.repo_libgit2(wire)
343 with repo_init as repo:
343 with repo_init as repo:
344 blob = repo[commit_id]
344 blob = repo[commit_id]
345 if blob.is_binary:
345 if blob.is_binary:
346 return {}
346 return {}
347
347
348 return self._parse_lfs_pointer(blob.data)
348 return self._parse_lfs_pointer(blob.data)
349
349
350 return _is_large_file(repo_id, commit_id)
350 return _is_large_file(repo_id, commit_id)
351
351
352 @reraise_safe_exceptions
352 @reraise_safe_exceptions
353 def is_binary(self, wire, tree_id):
353 def is_binary(self, wire, tree_id):
354 cache_on, context_uid, repo_id = self._cache_on(wire)
354 cache_on, context_uid, repo_id = self._cache_on(wire)
355 region = self._region(wire)
355 region = self._region(wire)
356
356
357 @region.conditional_cache_on_arguments(condition=cache_on)
357 @region.conditional_cache_on_arguments(condition=cache_on)
358 def _is_binary(_repo_id, _tree_id):
358 def _is_binary(_repo_id, _tree_id):
359 repo_init = self._factory.repo_libgit2(wire)
359 repo_init = self._factory.repo_libgit2(wire)
360 with repo_init as repo:
360 with repo_init as repo:
361 blob_obj = repo[tree_id]
361 blob_obj = repo[tree_id]
362 return blob_obj.is_binary
362 return blob_obj.is_binary
363
363
364 return _is_binary(repo_id, tree_id)
364 return _is_binary(repo_id, tree_id)
365
365
366 @reraise_safe_exceptions
366 @reraise_safe_exceptions
367 def md5_hash(self, wire, commit_id, path):
367 def md5_hash(self, wire, commit_id, path):
368 cache_on, context_uid, repo_id = self._cache_on(wire)
368 cache_on, context_uid, repo_id = self._cache_on(wire)
369 region = self._region(wire)
369 region = self._region(wire)
370
370
371 @region.conditional_cache_on_arguments(condition=cache_on)
371 @region.conditional_cache_on_arguments(condition=cache_on)
372 def _md5_hash(_repo_id, _commit_id, _path):
372 def _md5_hash(_repo_id, _commit_id, _path):
373 repo_init = self._factory.repo_libgit2(wire)
373 repo_init = self._factory.repo_libgit2(wire)
374 with repo_init as repo:
374 with repo_init as repo:
375 commit = repo[_commit_id]
375 commit = repo[_commit_id]
376 blob_obj = commit.tree[_path]
376 blob_obj = commit.tree[_path]
377
377
378 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
378 if blob_obj.type != pygit2.GIT_OBJ_BLOB:
379 raise exceptions.LookupException()(
379 raise exceptions.LookupException()(
380 f'Tree for commit_id:{_commit_id} is not a blob: {blob_obj.type_str}')
380 f'Tree for commit_id:{_commit_id} is not a blob: {blob_obj.type_str}')
381
381
382 return ''
382 return ''
383
383
384 return _md5_hash(repo_id, commit_id, path)
384 return _md5_hash(repo_id, commit_id, path)
385
385
386 @reraise_safe_exceptions
386 @reraise_safe_exceptions
387 def in_largefiles_store(self, wire, oid):
387 def in_largefiles_store(self, wire, oid):
388 conf = self._wire_to_config(wire)
388 conf = self._wire_to_config(wire)
389 repo_init = self._factory.repo_libgit2(wire)
389 repo_init = self._factory.repo_libgit2(wire)
390 with repo_init as repo:
390 with repo_init as repo:
391 repo_name = repo.path
391 repo_name = repo.path
392
392
393 store_location = conf.get('vcs_git_lfs_store_location')
393 store_location = conf.get('vcs_git_lfs_store_location')
394 if store_location:
394 if store_location:
395
395
396 store = LFSOidStore(
396 store = LFSOidStore(
397 oid=oid, repo=repo_name, store_location=store_location)
397 oid=oid, repo=repo_name, store_location=store_location)
398 return store.has_oid()
398 return store.has_oid()
399
399
400 return False
400 return False
401
401
402 @reraise_safe_exceptions
402 @reraise_safe_exceptions
403 def store_path(self, wire, oid):
403 def store_path(self, wire, oid):
404 conf = self._wire_to_config(wire)
404 conf = self._wire_to_config(wire)
405 repo_init = self._factory.repo_libgit2(wire)
405 repo_init = self._factory.repo_libgit2(wire)
406 with repo_init as repo:
406 with repo_init as repo:
407 repo_name = repo.path
407 repo_name = repo.path
408
408
409 store_location = conf.get('vcs_git_lfs_store_location')
409 store_location = conf.get('vcs_git_lfs_store_location')
410 if store_location:
410 if store_location:
411 store = LFSOidStore(
411 store = LFSOidStore(
412 oid=oid, repo=repo_name, store_location=store_location)
412 oid=oid, repo=repo_name, store_location=store_location)
413 return store.oid_path
413 return store.oid_path
414 raise ValueError(f'Unable to fetch oid with path {oid}')
414 raise ValueError(f'Unable to fetch oid with path {oid}')
415
415
416 @reraise_safe_exceptions
416 @reraise_safe_exceptions
417 def bulk_request(self, wire, rev, pre_load):
417 def bulk_request(self, wire, rev, pre_load):
418 cache_on, context_uid, repo_id = self._cache_on(wire)
418 cache_on, context_uid, repo_id = self._cache_on(wire)
419 region = self._region(wire)
419 region = self._region(wire)
420
420
421 @region.conditional_cache_on_arguments(condition=cache_on)
421 @region.conditional_cache_on_arguments(condition=cache_on)
422 def _bulk_request(_repo_id, _rev, _pre_load):
422 def _bulk_request(_repo_id, _rev, _pre_load):
423 result = {}
423 result = {}
424 for attr in pre_load:
424 for attr in pre_load:
425 try:
425 try:
426 method = self._bulk_methods[attr]
426 method = self._bulk_methods[attr]
427 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
427 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
428 args = [wire, rev]
428 args = [wire, rev]
429 result[attr] = method(*args)
429 result[attr] = method(*args)
430 except KeyError as e:
430 except KeyError as e:
431 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
431 raise exceptions.VcsException(e)(f"Unknown bulk attribute: {attr}")
432 return result
432 return result
433
433
434 return _bulk_request(repo_id, rev, sorted(pre_load))
434 return _bulk_request(repo_id, rev, sorted(pre_load))
435
435
436 @reraise_safe_exceptions
436 @reraise_safe_exceptions
437 def bulk_file_request(self, wire, commit_id, path, pre_load):
437 def bulk_file_request(self, wire, commit_id, path, pre_load):
438 cache_on, context_uid, repo_id = self._cache_on(wire)
438 cache_on, context_uid, repo_id = self._cache_on(wire)
439 region = self._region(wire)
439 region = self._region(wire)
440
440
441 @region.conditional_cache_on_arguments(condition=cache_on)
441 @region.conditional_cache_on_arguments(condition=cache_on)
442 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
442 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
443 result = {}
443 result = {}
444 for attr in pre_load:
444 for attr in pre_load:
445 try:
445 try:
446 method = self._bulk_file_methods[attr]
446 method = self._bulk_file_methods[attr]
447 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
447 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
448 result[attr] = method(wire, _commit_id, _path)
448 result[attr] = method(wire, _commit_id, _path)
449 except KeyError as e:
449 except KeyError as e:
450 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
450 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
451 return result
451 return result
452
452
453 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
453 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
454
454
455 def _build_opener(self, url: str):
455 def _build_opener(self, url: str):
456 handlers = []
456 handlers = []
457 url_obj = url_parser(safe_bytes(url))
457 url_obj = url_parser(safe_bytes(url))
458 authinfo = url_obj.authinfo()[1]
458 authinfo = url_obj.authinfo()[1]
459
459
460 if authinfo:
460 if authinfo:
461 # create a password manager
461 # create a password manager
462 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
462 passmgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
463 passmgr.add_password(*authinfo)
463 passmgr.add_password(*authinfo)
464
464
465 handlers.extend((httpbasicauthhandler(passmgr),
465 handlers.extend((httpbasicauthhandler(passmgr),
466 httpdigestauthhandler(passmgr)))
466 httpdigestauthhandler(passmgr)))
467
467
468 return urllib.request.build_opener(*handlers)
468 return urllib.request.build_opener(*handlers)
469
469
470 @reraise_safe_exceptions
470 @reraise_safe_exceptions
471 def check_url(self, url, config):
471 def check_url(self, url, config):
472 url_obj = url_parser(safe_bytes(url))
472 url_obj = url_parser(safe_bytes(url))
473
473
474 test_uri = safe_str(url_obj.authinfo()[0])
474 test_uri = safe_str(url_obj.authinfo()[0])
475 obfuscated_uri = get_obfuscated_url(url_obj)
475 obfuscated_uri = get_obfuscated_url(url_obj)
476
476
477 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
477 log.info("Checking URL for remote cloning/import: %s", obfuscated_uri)
478
478
479 if not test_uri.endswith('info/refs'):
479 if not test_uri.endswith('info/refs'):
480 test_uri = test_uri.rstrip('/') + '/info/refs'
480 test_uri = test_uri.rstrip('/') + '/info/refs'
481
481
482 o = self._build_opener(test_uri)
482 o = self._build_opener(test_uri)
483 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
483 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
484
484
485 q = {"service": 'git-upload-pack'}
485 q = {"service": 'git-upload-pack'}
486 qs = f'?{urllib.parse.urlencode(q)}'
486 qs = f'?{urllib.parse.urlencode(q)}'
487 cu = f"{test_uri}{qs}"
487 cu = f"{test_uri}{qs}"
488
488
489 try:
489 try:
490 req = urllib.request.Request(cu, None, {})
490 req = urllib.request.Request(cu, None, {})
491 log.debug("Trying to open URL %s", obfuscated_uri)
491 log.debug("Trying to open URL %s", obfuscated_uri)
492 resp = o.open(req)
492 resp = o.open(req)
493 if resp.code != 200:
493 if resp.code != 200:
494 raise exceptions.URLError()('Return Code is not 200')
494 raise exceptions.URLError()('Return Code is not 200')
495 except Exception as e:
495 except Exception as e:
496 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
496 log.warning("URL cannot be opened: %s", obfuscated_uri, exc_info=True)
497 # means it cannot be cloned
497 # means it cannot be cloned
498 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
498 raise exceptions.URLError(e)(f"[{obfuscated_uri}] org_exc: {e}")
499
499
500 # now detect if it's proper git repo
500 # now detect if it's proper git repo
501 gitdata: bytes = resp.read()
501 gitdata: bytes = resp.read()
502
502
503 if b'service=git-upload-pack' in gitdata:
503 if b'service=git-upload-pack' in gitdata:
504 pass
504 pass
505 elif re.findall(br'[0-9a-fA-F]{40}\s+refs', gitdata):
505 elif re.findall(br'[0-9a-fA-F]{40}\s+refs', gitdata):
506 # old style git can return some other format!
506 # old style git can return some other format!
507 pass
507 pass
508 else:
508 else:
509 e = None
509 e = None
510 raise exceptions.URLError(e)(
510 raise exceptions.URLError(e)(
511 f"url [{obfuscated_uri}] does not look like an hg repo org_exc: {e}")
511 f"url [{obfuscated_uri}] does not look like an hg repo org_exc: {e}")
512
512
513 return True
513 return True
514
514
515 @reraise_safe_exceptions
515 @reraise_safe_exceptions
516 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
516 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
517 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
517 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
518 remote_refs = self.pull(wire, url, apply_refs=False)
518 remote_refs = self.pull(wire, url, apply_refs=False)
519 repo = self._factory.repo(wire)
519 repo = self._factory.repo(wire)
520 if isinstance(valid_refs, list):
520 if isinstance(valid_refs, list):
521 valid_refs = tuple(valid_refs)
521 valid_refs = tuple(valid_refs)
522
522
523 for k in remote_refs:
523 for k in remote_refs:
524 # only parse heads/tags and skip so called deferred tags
524 # only parse heads/tags and skip so called deferred tags
525 if k.startswith(valid_refs) and not k.endswith(deferred):
525 if k.startswith(valid_refs) and not k.endswith(deferred):
526 repo[k] = remote_refs[k]
526 repo[k] = remote_refs[k]
527
527
528 if update_after_clone:
528 if update_after_clone:
529 # we want to checkout HEAD
529 # we want to checkout HEAD
530 repo["HEAD"] = remote_refs["HEAD"]
530 repo["HEAD"] = remote_refs["HEAD"]
531 index.build_index_from_tree(repo.path, repo.index_path(),
531 index.build_index_from_tree(repo.path, repo.index_path(),
532 repo.object_store, repo["HEAD"].tree)
532 repo.object_store, repo["HEAD"].tree)
533
533
534 @reraise_safe_exceptions
534 @reraise_safe_exceptions
535 def branch(self, wire, commit_id):
535 def branch(self, wire, commit_id):
536 cache_on, context_uid, repo_id = self._cache_on(wire)
536 cache_on, context_uid, repo_id = self._cache_on(wire)
537 region = self._region(wire)
537 region = self._region(wire)
538
538
539 @region.conditional_cache_on_arguments(condition=cache_on)
539 @region.conditional_cache_on_arguments(condition=cache_on)
540 def _branch(_context_uid, _repo_id, _commit_id):
540 def _branch(_context_uid, _repo_id, _commit_id):
541 regex = re.compile('^refs/heads')
541 regex = re.compile('^refs/heads')
542
542
543 def filter_with(ref):
543 def filter_with(ref):
544 return regex.match(ref[0]) and ref[1] == _commit_id
544 return regex.match(ref[0]) and ref[1] == _commit_id
545
545
546 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
546 branches = list(filter(filter_with, list(self.get_refs(wire).items())))
547 return [x[0].split('refs/heads/')[-1] for x in branches]
547 return [x[0].split('refs/heads/')[-1] for x in branches]
548
548
549 return _branch(context_uid, repo_id, commit_id)
549 return _branch(context_uid, repo_id, commit_id)
550
550
551 @reraise_safe_exceptions
551 @reraise_safe_exceptions
552 def commit_branches(self, wire, commit_id):
552 def commit_branches(self, wire, commit_id):
553 cache_on, context_uid, repo_id = self._cache_on(wire)
553 cache_on, context_uid, repo_id = self._cache_on(wire)
554 region = self._region(wire)
554 region = self._region(wire)
555
555
556 @region.conditional_cache_on_arguments(condition=cache_on)
556 @region.conditional_cache_on_arguments(condition=cache_on)
557 def _commit_branches(_context_uid, _repo_id, _commit_id):
557 def _commit_branches(_context_uid, _repo_id, _commit_id):
558 repo_init = self._factory.repo_libgit2(wire)
558 repo_init = self._factory.repo_libgit2(wire)
559 with repo_init as repo:
559 with repo_init as repo:
560 branches = [x for x in repo.branches.with_commit(_commit_id)]
560 branches = [x for x in repo.branches.with_commit(_commit_id)]
561 return branches
561 return branches
562
562
563 return _commit_branches(context_uid, repo_id, commit_id)
563 return _commit_branches(context_uid, repo_id, commit_id)
564
564
565 @reraise_safe_exceptions
565 @reraise_safe_exceptions
566 def add_object(self, wire, content):
566 def add_object(self, wire, content):
567 repo_init = self._factory.repo_libgit2(wire)
567 repo_init = self._factory.repo_libgit2(wire)
568 with repo_init as repo:
568 with repo_init as repo:
569 blob = objects.Blob()
569 blob = objects.Blob()
570 blob.set_raw_string(content)
570 blob.set_raw_string(content)
571 repo.object_store.add_object(blob)
571 repo.object_store.add_object(blob)
572 return blob.id
572 return blob.id
573
573
574 @reraise_safe_exceptions
574 @reraise_safe_exceptions
575 def create_commit(self, wire, author, committer, message, branch, new_tree_id,
575 def create_commit(self, wire, author, committer, message, branch, new_tree_id,
576 date_args: list[int, int] = None,
576 date_args: list[int, int] = None,
577 parents: list | None = None):
577 parents: list | None = None):
578
578
579 repo_init = self._factory.repo_libgit2(wire)
579 repo_init = self._factory.repo_libgit2(wire)
580 with repo_init as repo:
580 with repo_init as repo:
581
581
582 if date_args:
582 if date_args:
583 current_time, offset = date_args
583 current_time, offset = date_args
584
584
585 kw = {
585 kw = {
586 'time': current_time,
586 'time': current_time,
587 'offset': offset
587 'offset': offset
588 }
588 }
589 author = create_signature_from_string(author, **kw)
589 author = create_signature_from_string(author, **kw)
590 committer = create_signature_from_string(committer, **kw)
590 committer = create_signature_from_string(committer, **kw)
591
591
592 tree = new_tree_id
592 tree = new_tree_id
593 if isinstance(tree, (bytes, str)):
593 if isinstance(tree, (bytes, str)):
594 # validate this tree is in the repo...
594 # validate this tree is in the repo...
595 tree = repo[safe_str(tree)].id
595 tree = repo[safe_str(tree)].id
596
596
597 if parents:
597 if parents:
598 # run via sha's and validate them in repo
598 # run via sha's and validate them in repo
599 parents = [repo[c].id for c in parents]
599 parents = [repo[c].id for c in parents]
600 else:
600 else:
601 parents = []
601 parents = []
602 # ensure we COMMIT on top of given branch head
602 # ensure we COMMIT on top of given branch head
603 # check if this repo has ANY branches, otherwise it's a new branch case we need to make
603 # check if this repo has ANY branches, otherwise it's a new branch case we need to make
604 if branch in repo.branches.local:
604 if branch in repo.branches.local:
605 parents += [repo.branches[branch].target]
605 parents += [repo.branches[branch].target]
606 elif [x for x in repo.branches.local]:
606 elif [x for x in repo.branches.local]:
607 parents += [repo.head.target]
607 parents += [repo.head.target]
608 #else:
608 #else:
609 # in case we want to commit on new branch we create it on top of HEAD
609 # in case we want to commit on new branch we create it on top of HEAD
610 #repo.branches.local.create(branch, repo.revparse_single('HEAD'))
610 #repo.branches.local.create(branch, repo.revparse_single('HEAD'))
611
611
612 # # Create a new commit
612 # # Create a new commit
613 commit_oid = repo.create_commit(
613 commit_oid = repo.create_commit(
614 f'refs/heads/{branch}', # the name of the reference to update
614 f'refs/heads/{branch}', # the name of the reference to update
615 author, # the author of the commit
615 author, # the author of the commit
616 committer, # the committer of the commit
616 committer, # the committer of the commit
617 message, # the commit message
617 message, # the commit message
618 tree, # the tree produced by the index
618 tree, # the tree produced by the index
619 parents # list of parents for the new commit, usually just one,
619 parents # list of parents for the new commit, usually just one,
620 )
620 )
621
621
622 new_commit_id = safe_str(commit_oid)
622 new_commit_id = safe_str(commit_oid)
623
623
624 return new_commit_id
624 return new_commit_id
625
625
626 @reraise_safe_exceptions
626 @reraise_safe_exceptions
627 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
627 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
628
628
629 def mode2pygit(mode):
629 def mode2pygit(mode):
630 """
630 """
631 git only supports two filemode 644 and 755
631 git only supports two filemode 644 and 755
632
632
633 0o100755 -> 33261
633 0o100755 -> 33261
634 0o100644 -> 33188
634 0o100644 -> 33188
635 """
635 """
636 return {
636 return {
637 0o100644: pygit2.GIT_FILEMODE_BLOB,
637 0o100644: pygit2.GIT_FILEMODE_BLOB,
638 0o100755: pygit2.GIT_FILEMODE_BLOB_EXECUTABLE,
638 0o100755: pygit2.GIT_FILEMODE_BLOB_EXECUTABLE,
639 0o120000: pygit2.GIT_FILEMODE_LINK
639 0o120000: pygit2.GIT_FILEMODE_LINK
640 }.get(mode) or pygit2.GIT_FILEMODE_BLOB
640 }.get(mode) or pygit2.GIT_FILEMODE_BLOB
641
641
642 repo_init = self._factory.repo_libgit2(wire)
642 repo_init = self._factory.repo_libgit2(wire)
643 with repo_init as repo:
643 with repo_init as repo:
644 repo_index = repo.index
644 repo_index = repo.index
645
645
646 commit_parents = None
646 commit_parents = None
647 if commit_tree and commit_data['parents']:
647 if commit_tree and commit_data['parents']:
648 commit_parents = commit_data['parents']
648 commit_parents = commit_data['parents']
649 parent_commit = repo[commit_parents[0]]
649 parent_commit = repo[commit_parents[0]]
650 repo_index.read_tree(parent_commit.tree)
650 repo_index.read_tree(parent_commit.tree)
651
651
652 for pathspec in updated:
652 for pathspec in updated:
653 blob_id = repo.create_blob(pathspec['content'])
653 blob_id = repo.create_blob(pathspec['content'])
654 ie = pygit2.IndexEntry(pathspec['path'], blob_id, mode2pygit(pathspec['mode']))
654 ie = pygit2.IndexEntry(pathspec['path'], blob_id, mode2pygit(pathspec['mode']))
655 repo_index.add(ie)
655 repo_index.add(ie)
656
656
657 for pathspec in removed:
657 for pathspec in removed:
658 repo_index.remove(pathspec)
658 repo_index.remove(pathspec)
659
659
660 # Write changes to the index
660 # Write changes to the index
661 repo_index.write()
661 repo_index.write()
662
662
663 # Create a tree from the updated index
663 # Create a tree from the updated index
664 written_commit_tree = repo_index.write_tree()
664 written_commit_tree = repo_index.write_tree()
665
665
666 new_tree_id = written_commit_tree
666 new_tree_id = written_commit_tree
667
667
668 author = commit_data['author']
668 author = commit_data['author']
669 committer = commit_data['committer']
669 committer = commit_data['committer']
670 message = commit_data['message']
670 message = commit_data['message']
671
671
672 date_args = [int(commit_data['commit_time']), int(commit_data['commit_timezone'])]
672 date_args = [int(commit_data['commit_time']), int(commit_data['commit_timezone'])]
673
673
674 new_commit_id = self.create_commit(wire, author, committer, message, branch,
674 new_commit_id = self.create_commit(wire, author, committer, message, branch,
675 new_tree_id, date_args=date_args, parents=commit_parents)
675 new_tree_id, date_args=date_args, parents=commit_parents)
676
676
677 # libgit2, ensure the branch is there and exists
677 # libgit2, ensure the branch is there and exists
678 self.create_branch(wire, branch, new_commit_id)
678 self.create_branch(wire, branch, new_commit_id)
679
679
680 # libgit2, set new ref to this created commit
680 # libgit2, set new ref to this created commit
681 self.set_refs(wire, f'refs/heads/{branch}', new_commit_id)
681 self.set_refs(wire, f'refs/heads/{branch}', new_commit_id)
682
682
683 return new_commit_id
683 return new_commit_id
684
684
685 @reraise_safe_exceptions
685 @reraise_safe_exceptions
686 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
686 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
687 if url != 'default' and '://' not in url:
687 if url != 'default' and '://' not in url:
688 client = LocalGitClient(url)
688 client = LocalGitClient(url)
689 else:
689 else:
690 url_obj = url_parser(safe_bytes(url))
690 url_obj = url_parser(safe_bytes(url))
691 o = self._build_opener(url)
691 o = self._build_opener(url)
692 url = url_obj.authinfo()[0]
692 url = url_obj.authinfo()[0]
693 client = HttpGitClient(base_url=url, opener=o)
693 client = HttpGitClient(base_url=url, opener=o)
694 repo = self._factory.repo(wire)
694 repo = self._factory.repo(wire)
695
695
696 determine_wants = repo.object_store.determine_wants_all
696 determine_wants = repo.object_store.determine_wants_all
697
697
698 if refs:
698 if refs:
699 refs: list[bytes] = [ascii_bytes(x) for x in refs]
699 refs: list[bytes] = [ascii_bytes(x) for x in refs]
700
700
701 def determine_wants_requested(_remote_refs):
701 def determine_wants_requested(_remote_refs):
702 determined = []
702 determined = []
703 for ref_name, ref_hash in _remote_refs.items():
703 for ref_name, ref_hash in _remote_refs.items():
704 bytes_ref_name = safe_bytes(ref_name)
704 bytes_ref_name = safe_bytes(ref_name)
705
705
706 if bytes_ref_name in refs:
706 if bytes_ref_name in refs:
707 bytes_ref_hash = safe_bytes(ref_hash)
707 bytes_ref_hash = safe_bytes(ref_hash)
708 determined.append(bytes_ref_hash)
708 determined.append(bytes_ref_hash)
709 return determined
709 return determined
710
710
711 # swap with our custom requested wants
711 # swap with our custom requested wants
712 determine_wants = determine_wants_requested
712 determine_wants = determine_wants_requested
713
713
714 try:
714 try:
715 remote_refs = client.fetch(
715 remote_refs = client.fetch(
716 path=url, target=repo, determine_wants=determine_wants)
716 path=url, target=repo, determine_wants=determine_wants)
717
717
718 except NotGitRepository as e:
718 except NotGitRepository as e:
719 log.warning(
719 log.warning(
720 'Trying to fetch from "%s" failed, not a Git repository.', url)
720 'Trying to fetch from "%s" failed, not a Git repository.', url)
721 # Exception can contain unicode which we convert
721 # Exception can contain unicode which we convert
722 raise exceptions.AbortException(e)(repr(e))
722 raise exceptions.AbortException(e)(repr(e))
723
723
724 # mikhail: client.fetch() returns all the remote refs, but fetches only
724 # mikhail: client.fetch() returns all the remote refs, but fetches only
725 # refs filtered by `determine_wants` function. We need to filter result
725 # refs filtered by `determine_wants` function. We need to filter result
726 # as well
726 # as well
727 if refs:
727 if refs:
728 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
728 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
729
729
730 if apply_refs:
730 if apply_refs:
731 # TODO: johbo: Needs proper test coverage with a git repository
731 # TODO: johbo: Needs proper test coverage with a git repository
732 # that contains a tag object, so that we would end up with
732 # that contains a tag object, so that we would end up with
733 # a peeled ref at this point.
733 # a peeled ref at this point.
734 for k in remote_refs:
734 for k in remote_refs:
735 if k.endswith(PEELED_REF_MARKER):
735 if k.endswith(PEELED_REF_MARKER):
736 log.debug("Skipping peeled reference %s", k)
736 log.debug("Skipping peeled reference %s", k)
737 continue
737 continue
738 repo[k] = remote_refs[k]
738 repo[k] = remote_refs[k]
739
739
740 if refs and not update_after:
740 if refs and not update_after:
741 # update to ref
741 # update to ref
742 # mikhail: explicitly set the head to the last ref.
742 # mikhail: explicitly set the head to the last ref.
743 update_to_ref = refs[-1]
743 update_to_ref = refs[-1]
744 if isinstance(update_after, str):
744 if isinstance(update_after, str):
745 update_to_ref = update_after
745 update_to_ref = update_after
746
746
747 repo[HEAD_MARKER] = remote_refs[update_to_ref]
747 repo[HEAD_MARKER] = remote_refs[update_to_ref]
748
748
749 if update_after:
749 if update_after:
750 # we want to check out HEAD
750 # we want to check out HEAD
751 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
751 repo[HEAD_MARKER] = remote_refs[HEAD_MARKER]
752 index.build_index_from_tree(repo.path, repo.index_path(),
752 index.build_index_from_tree(repo.path, repo.index_path(),
753 repo.object_store, repo[HEAD_MARKER].tree)
753 repo.object_store, repo[HEAD_MARKER].tree)
754
754
755 if isinstance(remote_refs, FetchPackResult):
755 if isinstance(remote_refs, FetchPackResult):
756 return remote_refs.refs
756 return remote_refs.refs
757 return remote_refs
757 return remote_refs
758
758
759 @reraise_safe_exceptions
759 @reraise_safe_exceptions
760 def sync_fetch(self, wire, url, refs=None, all_refs=False):
760 def sync_fetch(self, wire, url, refs=None, all_refs=False):
761 self._factory.repo(wire)
761 self._factory.repo(wire)
762 if refs and not isinstance(refs, (list, tuple)):
762 if refs and not isinstance(refs, (list, tuple)):
763 refs = [refs]
763 refs = [refs]
764
764
765 config = self._wire_to_config(wire)
765 config = self._wire_to_config(wire)
766 # get all remote refs we'll use to fetch later
766 # get all remote refs we'll use to fetch later
767 cmd = ['ls-remote']
767 cmd = ['ls-remote']
768 if not all_refs:
768 if not all_refs:
769 cmd += ['--heads', '--tags']
769 cmd += ['--heads', '--tags']
770 cmd += [url]
770 cmd += [url]
771 output, __ = self.run_git_command(
771 output, __ = self.run_git_command(
772 wire, cmd, fail_on_stderr=False,
772 wire, cmd, fail_on_stderr=False,
773 _copts=self._remote_conf(config),
773 _copts=self._remote_conf(config),
774 extra_env={'GIT_TERMINAL_PROMPT': '0'})
774 extra_env={'GIT_TERMINAL_PROMPT': '0'})
775
775
776 remote_refs = collections.OrderedDict()
776 remote_refs = collections.OrderedDict()
777 fetch_refs = []
777 fetch_refs = []
778
778
779 for ref_line in output.splitlines():
779 for ref_line in output.splitlines():
780 sha, ref = ref_line.split(b'\t')
780 sha, ref = ref_line.split(b'\t')
781 sha = sha.strip()
781 sha = sha.strip()
782 if ref in remote_refs:
782 if ref in remote_refs:
783 # duplicate, skip
783 # duplicate, skip
784 continue
784 continue
785 if ref.endswith(PEELED_REF_MARKER):
785 if ref.endswith(PEELED_REF_MARKER):
786 log.debug("Skipping peeled reference %s", ref)
786 log.debug("Skipping peeled reference %s", ref)
787 continue
787 continue
788 # don't sync HEAD
788 # don't sync HEAD
789 if ref in [HEAD_MARKER]:
789 if ref in [HEAD_MARKER]:
790 continue
790 continue
791
791
792 remote_refs[ref] = sha
792 remote_refs[ref] = sha
793
793
794 if refs and sha in refs:
794 if refs and sha in refs:
795 # we filter fetch using our specified refs
795 # we filter fetch using our specified refs
796 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
796 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
797 elif not refs:
797 elif not refs:
798 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
798 fetch_refs.append(f'{safe_str(ref)}:{safe_str(ref)}')
799 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
799 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
800
800
801 if fetch_refs:
801 if fetch_refs:
802 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
802 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
803 fetch_refs_chunks = list(chunk)
803 fetch_refs_chunks = list(chunk)
804 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
804 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
805 self.run_git_command(
805 self.run_git_command(
806 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
806 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
807 fail_on_stderr=False,
807 fail_on_stderr=False,
808 _copts=self._remote_conf(config),
808 _copts=self._remote_conf(config),
809 extra_env={'GIT_TERMINAL_PROMPT': '0'})
809 extra_env={'GIT_TERMINAL_PROMPT': '0'})
810
810
811 return remote_refs
811 return remote_refs
812
812
813 @reraise_safe_exceptions
813 @reraise_safe_exceptions
814 def sync_push(self, wire, url, refs=None):
814 def sync_push(self, wire, url, refs=None):
815 if not self.check_url(url, wire):
815 if not self.check_url(url, wire):
816 return
816 return
817 config = self._wire_to_config(wire)
817 config = self._wire_to_config(wire)
818 self._factory.repo(wire)
818 self._factory.repo(wire)
819 self.run_git_command(
819 self.run_git_command(
820 wire, ['push', url, '--mirror'], fail_on_stderr=False,
820 wire, ['push', url, '--mirror'], fail_on_stderr=False,
821 _copts=self._remote_conf(config),
821 _copts=self._remote_conf(config),
822 extra_env={'GIT_TERMINAL_PROMPT': '0'})
822 extra_env={'GIT_TERMINAL_PROMPT': '0'})
823
823
824 @reraise_safe_exceptions
824 @reraise_safe_exceptions
825 def get_remote_refs(self, wire, url):
825 def get_remote_refs(self, wire, url):
826 repo = Repo(url)
826 repo = Repo(url)
827 return repo.get_refs()
827 return repo.get_refs()
828
828
829 @reraise_safe_exceptions
829 @reraise_safe_exceptions
830 def get_description(self, wire):
830 def get_description(self, wire):
831 repo = self._factory.repo(wire)
831 repo = self._factory.repo(wire)
832 return repo.get_description()
832 return repo.get_description()
833
833
834 @reraise_safe_exceptions
834 @reraise_safe_exceptions
835 def get_missing_revs(self, wire, rev1, rev2, other_repo_path):
835 def get_missing_revs(self, wire, rev1, rev2, other_repo_path):
836 origin_repo_path = wire['path']
836 origin_repo_path = wire['path']
837 repo = self._factory.repo(wire)
837 repo = self._factory.repo(wire)
838 # fetch from other_repo_path to our origin repo
838 # fetch from other_repo_path to our origin repo
839 LocalGitClient(thin_packs=False).fetch(other_repo_path, repo)
839 LocalGitClient(thin_packs=False).fetch(other_repo_path, repo)
840
840
841 wire_remote = wire.copy()
841 wire_remote = wire.copy()
842 wire_remote['path'] = other_repo_path
842 wire_remote['path'] = other_repo_path
843 repo_remote = self._factory.repo(wire_remote)
843 repo_remote = self._factory.repo(wire_remote)
844
844
845 # fetch from origin_repo_path to our remote repo
845 # fetch from origin_repo_path to our remote repo
846 LocalGitClient(thin_packs=False).fetch(origin_repo_path, repo_remote)
846 LocalGitClient(thin_packs=False).fetch(origin_repo_path, repo_remote)
847
847
848 revs = [
848 revs = [
849 x.commit.id
849 x.commit.id
850 for x in repo_remote.get_walker(include=[safe_bytes(rev2)], exclude=[safe_bytes(rev1)])]
850 for x in repo_remote.get_walker(include=[safe_bytes(rev2)], exclude=[safe_bytes(rev1)])]
851 return revs
851 return revs
852
852
853 @reraise_safe_exceptions
853 @reraise_safe_exceptions
854 def get_object(self, wire, sha, maybe_unreachable=False):
854 def get_object(self, wire, sha, maybe_unreachable=False):
855 cache_on, context_uid, repo_id = self._cache_on(wire)
855 cache_on, context_uid, repo_id = self._cache_on(wire)
856 region = self._region(wire)
856 region = self._region(wire)
857
857
858 @region.conditional_cache_on_arguments(condition=cache_on)
858 @region.conditional_cache_on_arguments(condition=cache_on)
859 def _get_object(_context_uid, _repo_id, _sha):
859 def _get_object(_context_uid, _repo_id, _sha):
860 repo_init = self._factory.repo_libgit2(wire)
860 repo_init = self._factory.repo_libgit2(wire)
861 with repo_init as repo:
861 with repo_init as repo:
862
862
863 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
863 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
864 try:
864 try:
865 commit = repo.revparse_single(sha)
865 commit = repo.revparse_single(sha)
866 except KeyError:
866 except KeyError:
867 # NOTE(marcink): KeyError doesn't give us any meaningful information
867 # NOTE(marcink): KeyError doesn't give us any meaningful information
868 # here, we instead give something more explicit
868 # here, we instead give something more explicit
869 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
869 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
870 raise exceptions.LookupException(e)(missing_commit_err)
870 raise exceptions.LookupException(e)(missing_commit_err)
871 except ValueError as e:
871 except ValueError as e:
872 raise exceptions.LookupException(e)(missing_commit_err)
872 raise exceptions.LookupException(e)(missing_commit_err)
873
873
874 is_tag = False
874 is_tag = False
875 if isinstance(commit, pygit2.Tag):
875 if isinstance(commit, pygit2.Tag):
876 commit = repo.get(commit.target)
876 commit = repo.get(commit.target)
877 is_tag = True
877 is_tag = True
878
878
879 check_dangling = True
879 check_dangling = True
880 if is_tag:
880 if is_tag:
881 check_dangling = False
881 check_dangling = False
882
882
883 if check_dangling and maybe_unreachable:
883 if check_dangling and maybe_unreachable:
884 check_dangling = False
884 check_dangling = False
885
885
886 # we used a reference and it parsed means we're not having a dangling commit
886 # we used a reference and it parsed means we're not having a dangling commit
887 if sha != commit.hex:
887 if sha != commit.hex:
888 check_dangling = False
888 check_dangling = False
889
889
890 if check_dangling:
890 if check_dangling:
891 # check for dangling commit
891 # check for dangling commit
892 for branch in repo.branches.with_commit(commit.hex):
892 for branch in repo.branches.with_commit(commit.hex):
893 if branch:
893 if branch:
894 break
894 break
895 else:
895 else:
896 # NOTE(marcink): Empty error doesn't give us any meaningful information
896 # NOTE(marcink): Empty error doesn't give us any meaningful information
897 # here, we instead give something more explicit
897 # here, we instead give something more explicit
898 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
898 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
899 raise exceptions.LookupException(e)(missing_commit_err)
899 raise exceptions.LookupException(e)(missing_commit_err)
900
900
901 commit_id = commit.hex
901 commit_id = commit.hex
902 type_str = commit.type_str
902 type_str = commit.type_str
903
903
904 return {
904 return {
905 'id': commit_id,
905 'id': commit_id,
906 'type': type_str,
906 'type': type_str,
907 'commit_id': commit_id,
907 'commit_id': commit_id,
908 'idx': 0
908 'idx': 0
909 }
909 }
910
910
911 return _get_object(context_uid, repo_id, sha)
911 return _get_object(context_uid, repo_id, sha)
912
912
913 @reraise_safe_exceptions
913 @reraise_safe_exceptions
914 def get_refs(self, wire):
914 def get_refs(self, wire):
915 cache_on, context_uid, repo_id = self._cache_on(wire)
915 cache_on, context_uid, repo_id = self._cache_on(wire)
916 region = self._region(wire)
916 region = self._region(wire)
917
917
918 @region.conditional_cache_on_arguments(condition=cache_on)
918 @region.conditional_cache_on_arguments(condition=cache_on)
919 def _get_refs(_context_uid, _repo_id):
919 def _get_refs(_context_uid, _repo_id):
920
920
921 repo_init = self._factory.repo_libgit2(wire)
921 repo_init = self._factory.repo_libgit2(wire)
922 with repo_init as repo:
922 with repo_init as repo:
923 regex = re.compile('^refs/(heads|tags)/')
923 regex = re.compile('^refs/(heads|tags)/')
924 return {x.name: x.target.hex for x in
924 return {x.name: x.target.hex for x in
925 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
925 [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]}
926
926
927 return _get_refs(context_uid, repo_id)
927 return _get_refs(context_uid, repo_id)
928
928
929 @reraise_safe_exceptions
929 @reraise_safe_exceptions
930 def get_branch_pointers(self, wire):
930 def get_branch_pointers(self, wire):
931 cache_on, context_uid, repo_id = self._cache_on(wire)
931 cache_on, context_uid, repo_id = self._cache_on(wire)
932 region = self._region(wire)
932 region = self._region(wire)
933
933
934 @region.conditional_cache_on_arguments(condition=cache_on)
934 @region.conditional_cache_on_arguments(condition=cache_on)
935 def _get_branch_pointers(_context_uid, _repo_id):
935 def _get_branch_pointers(_context_uid, _repo_id):
936
936
937 repo_init = self._factory.repo_libgit2(wire)
937 repo_init = self._factory.repo_libgit2(wire)
938 regex = re.compile('^refs/heads')
938 regex = re.compile('^refs/heads')
939 with repo_init as repo:
939 with repo_init as repo:
940 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
940 branches = [ref for ref in repo.listall_reference_objects() if regex.match(ref.name)]
941 return {x.target.hex: x.shorthand for x in branches}
941 return {x.target.hex: x.shorthand for x in branches}
942
942
943 return _get_branch_pointers(context_uid, repo_id)
943 return _get_branch_pointers(context_uid, repo_id)
944
944
945 @reraise_safe_exceptions
945 @reraise_safe_exceptions
946 def head(self, wire, show_exc=True):
946 def head(self, wire, show_exc=True):
947 cache_on, context_uid, repo_id = self._cache_on(wire)
947 cache_on, context_uid, repo_id = self._cache_on(wire)
948 region = self._region(wire)
948 region = self._region(wire)
949
949
950 @region.conditional_cache_on_arguments(condition=cache_on)
950 @region.conditional_cache_on_arguments(condition=cache_on)
951 def _head(_context_uid, _repo_id, _show_exc):
951 def _head(_context_uid, _repo_id, _show_exc):
952 repo_init = self._factory.repo_libgit2(wire)
952 repo_init = self._factory.repo_libgit2(wire)
953 with repo_init as repo:
953 with repo_init as repo:
954 try:
954 try:
955 return repo.head.peel().hex
955 return repo.head.peel().hex
956 except Exception:
956 except Exception:
957 if show_exc:
957 if show_exc:
958 raise
958 raise
959 return _head(context_uid, repo_id, show_exc)
959 return _head(context_uid, repo_id, show_exc)
960
960
961 @reraise_safe_exceptions
961 @reraise_safe_exceptions
962 def init(self, wire):
962 def init(self, wire):
963 repo_path = safe_str(wire['path'])
963 repo_path = safe_str(wire['path'])
964 os.makedirs(repo_path, mode=0o755)
964 pygit2.init_repository(repo_path, bare=False)
965 pygit2.init_repository(repo_path, bare=False)
965
966
966 @reraise_safe_exceptions
967 @reraise_safe_exceptions
967 def init_bare(self, wire):
968 def init_bare(self, wire):
968 repo_path = safe_str(wire['path'])
969 repo_path = safe_str(wire['path'])
970 os.makedirs(repo_path, mode=0o755)
969 pygit2.init_repository(repo_path, bare=True)
971 pygit2.init_repository(repo_path, bare=True)
970
972
971 @reraise_safe_exceptions
973 @reraise_safe_exceptions
972 def revision(self, wire, rev):
974 def revision(self, wire, rev):
973
975
974 cache_on, context_uid, repo_id = self._cache_on(wire)
976 cache_on, context_uid, repo_id = self._cache_on(wire)
975 region = self._region(wire)
977 region = self._region(wire)
976
978
977 @region.conditional_cache_on_arguments(condition=cache_on)
979 @region.conditional_cache_on_arguments(condition=cache_on)
978 def _revision(_context_uid, _repo_id, _rev):
980 def _revision(_context_uid, _repo_id, _rev):
979 repo_init = self._factory.repo_libgit2(wire)
981 repo_init = self._factory.repo_libgit2(wire)
980 with repo_init as repo:
982 with repo_init as repo:
981 commit = repo[rev]
983 commit = repo[rev]
982 obj_data = {
984 obj_data = {
983 'id': commit.id.hex,
985 'id': commit.id.hex,
984 }
986 }
985 # tree objects itself don't have tree_id attribute
987 # tree objects itself don't have tree_id attribute
986 if hasattr(commit, 'tree_id'):
988 if hasattr(commit, 'tree_id'):
987 obj_data['tree'] = commit.tree_id.hex
989 obj_data['tree'] = commit.tree_id.hex
988
990
989 return obj_data
991 return obj_data
990 return _revision(context_uid, repo_id, rev)
992 return _revision(context_uid, repo_id, rev)
991
993
992 @reraise_safe_exceptions
994 @reraise_safe_exceptions
993 def date(self, wire, commit_id):
995 def date(self, wire, commit_id):
994 cache_on, context_uid, repo_id = self._cache_on(wire)
996 cache_on, context_uid, repo_id = self._cache_on(wire)
995 region = self._region(wire)
997 region = self._region(wire)
996
998
997 @region.conditional_cache_on_arguments(condition=cache_on)
999 @region.conditional_cache_on_arguments(condition=cache_on)
998 def _date(_repo_id, _commit_id):
1000 def _date(_repo_id, _commit_id):
999 repo_init = self._factory.repo_libgit2(wire)
1001 repo_init = self._factory.repo_libgit2(wire)
1000 with repo_init as repo:
1002 with repo_init as repo:
1001 commit = repo[commit_id]
1003 commit = repo[commit_id]
1002
1004
1003 if hasattr(commit, 'commit_time'):
1005 if hasattr(commit, 'commit_time'):
1004 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
1006 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
1005 else:
1007 else:
1006 commit = commit.get_object()
1008 commit = commit.get_object()
1007 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
1009 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
1008
1010
1009 # TODO(marcink): check dulwich difference of offset vs timezone
1011 # TODO(marcink): check dulwich difference of offset vs timezone
1010 return [commit_time, commit_time_offset]
1012 return [commit_time, commit_time_offset]
1011 return _date(repo_id, commit_id)
1013 return _date(repo_id, commit_id)
1012
1014
1013 @reraise_safe_exceptions
1015 @reraise_safe_exceptions
1014 def author(self, wire, commit_id):
1016 def author(self, wire, commit_id):
1015 cache_on, context_uid, repo_id = self._cache_on(wire)
1017 cache_on, context_uid, repo_id = self._cache_on(wire)
1016 region = self._region(wire)
1018 region = self._region(wire)
1017
1019
1018 @region.conditional_cache_on_arguments(condition=cache_on)
1020 @region.conditional_cache_on_arguments(condition=cache_on)
1019 def _author(_repo_id, _commit_id):
1021 def _author(_repo_id, _commit_id):
1020 repo_init = self._factory.repo_libgit2(wire)
1022 repo_init = self._factory.repo_libgit2(wire)
1021 with repo_init as repo:
1023 with repo_init as repo:
1022 commit = repo[commit_id]
1024 commit = repo[commit_id]
1023
1025
1024 if hasattr(commit, 'author'):
1026 if hasattr(commit, 'author'):
1025 author = commit.author
1027 author = commit.author
1026 else:
1028 else:
1027 author = commit.get_object().author
1029 author = commit.get_object().author
1028
1030
1029 if author.email:
1031 if author.email:
1030 return f"{author.name} <{author.email}>"
1032 return f"{author.name} <{author.email}>"
1031
1033
1032 try:
1034 try:
1033 return f"{author.name}"
1035 return f"{author.name}"
1034 except Exception:
1036 except Exception:
1035 return f"{safe_str(author.raw_name)}"
1037 return f"{safe_str(author.raw_name)}"
1036
1038
1037 return _author(repo_id, commit_id)
1039 return _author(repo_id, commit_id)
1038
1040
1039 @reraise_safe_exceptions
1041 @reraise_safe_exceptions
1040 def message(self, wire, commit_id):
1042 def message(self, wire, commit_id):
1041 cache_on, context_uid, repo_id = self._cache_on(wire)
1043 cache_on, context_uid, repo_id = self._cache_on(wire)
1042 region = self._region(wire)
1044 region = self._region(wire)
1043
1045
1044 @region.conditional_cache_on_arguments(condition=cache_on)
1046 @region.conditional_cache_on_arguments(condition=cache_on)
1045 def _message(_repo_id, _commit_id):
1047 def _message(_repo_id, _commit_id):
1046 repo_init = self._factory.repo_libgit2(wire)
1048 repo_init = self._factory.repo_libgit2(wire)
1047 with repo_init as repo:
1049 with repo_init as repo:
1048 commit = repo[commit_id]
1050 commit = repo[commit_id]
1049 return commit.message
1051 return commit.message
1050 return _message(repo_id, commit_id)
1052 return _message(repo_id, commit_id)
1051
1053
1052 @reraise_safe_exceptions
1054 @reraise_safe_exceptions
1053 def parents(self, wire, commit_id):
1055 def parents(self, wire, commit_id):
1054 cache_on, context_uid, repo_id = self._cache_on(wire)
1056 cache_on, context_uid, repo_id = self._cache_on(wire)
1055 region = self._region(wire)
1057 region = self._region(wire)
1056
1058
1057 @region.conditional_cache_on_arguments(condition=cache_on)
1059 @region.conditional_cache_on_arguments(condition=cache_on)
1058 def _parents(_repo_id, _commit_id):
1060 def _parents(_repo_id, _commit_id):
1059 repo_init = self._factory.repo_libgit2(wire)
1061 repo_init = self._factory.repo_libgit2(wire)
1060 with repo_init as repo:
1062 with repo_init as repo:
1061 commit = repo[commit_id]
1063 commit = repo[commit_id]
1062 if hasattr(commit, 'parent_ids'):
1064 if hasattr(commit, 'parent_ids'):
1063 parent_ids = commit.parent_ids
1065 parent_ids = commit.parent_ids
1064 else:
1066 else:
1065 parent_ids = commit.get_object().parent_ids
1067 parent_ids = commit.get_object().parent_ids
1066
1068
1067 return [x.hex for x in parent_ids]
1069 return [x.hex for x in parent_ids]
1068 return _parents(repo_id, commit_id)
1070 return _parents(repo_id, commit_id)
1069
1071
1070 @reraise_safe_exceptions
1072 @reraise_safe_exceptions
1071 def children(self, wire, commit_id):
1073 def children(self, wire, commit_id):
1072 cache_on, context_uid, repo_id = self._cache_on(wire)
1074 cache_on, context_uid, repo_id = self._cache_on(wire)
1073 region = self._region(wire)
1075 region = self._region(wire)
1074
1076
1075 head = self.head(wire)
1077 head = self.head(wire)
1076
1078
1077 @region.conditional_cache_on_arguments(condition=cache_on)
1079 @region.conditional_cache_on_arguments(condition=cache_on)
1078 def _children(_repo_id, _commit_id):
1080 def _children(_repo_id, _commit_id):
1079
1081
1080 output, __ = self.run_git_command(
1082 output, __ = self.run_git_command(
1081 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
1083 wire, ['rev-list', '--all', '--children', f'{commit_id}^..{head}'])
1082
1084
1083 child_ids = []
1085 child_ids = []
1084 pat = re.compile(fr'^{commit_id}')
1086 pat = re.compile(fr'^{commit_id}')
1085 for line in output.splitlines():
1087 for line in output.splitlines():
1086 line = safe_str(line)
1088 line = safe_str(line)
1087 if pat.match(line):
1089 if pat.match(line):
1088 found_ids = line.split(' ')[1:]
1090 found_ids = line.split(' ')[1:]
1089 child_ids.extend(found_ids)
1091 child_ids.extend(found_ids)
1090 break
1092 break
1091
1093
1092 return child_ids
1094 return child_ids
1093 return _children(repo_id, commit_id)
1095 return _children(repo_id, commit_id)
1094
1096
1095 @reraise_safe_exceptions
1097 @reraise_safe_exceptions
1096 def set_refs(self, wire, key, value):
1098 def set_refs(self, wire, key, value):
1097 repo_init = self._factory.repo_libgit2(wire)
1099 repo_init = self._factory.repo_libgit2(wire)
1098 with repo_init as repo:
1100 with repo_init as repo:
1099 repo.references.create(key, value, force=True)
1101 repo.references.create(key, value, force=True)
1100
1102
1101 @reraise_safe_exceptions
1103 @reraise_safe_exceptions
1102 def create_branch(self, wire, branch_name, commit_id, force=False):
1104 def create_branch(self, wire, branch_name, commit_id, force=False):
1103 repo_init = self._factory.repo_libgit2(wire)
1105 repo_init = self._factory.repo_libgit2(wire)
1104 with repo_init as repo:
1106 with repo_init as repo:
1105 if commit_id:
1107 if commit_id:
1106 commit = repo[commit_id]
1108 commit = repo[commit_id]
1107 else:
1109 else:
1108 # if commit is not given just use the HEAD
1110 # if commit is not given just use the HEAD
1109 commit = repo.head()
1111 commit = repo.head()
1110
1112
1111 if force:
1113 if force:
1112 repo.branches.local.create(branch_name, commit, force=force)
1114 repo.branches.local.create(branch_name, commit, force=force)
1113 elif not repo.branches.get(branch_name):
1115 elif not repo.branches.get(branch_name):
1114 # create only if that branch isn't existing
1116 # create only if that branch isn't existing
1115 repo.branches.local.create(branch_name, commit, force=force)
1117 repo.branches.local.create(branch_name, commit, force=force)
1116
1118
1117 @reraise_safe_exceptions
1119 @reraise_safe_exceptions
1118 def remove_ref(self, wire, key):
1120 def remove_ref(self, wire, key):
1119 repo_init = self._factory.repo_libgit2(wire)
1121 repo_init = self._factory.repo_libgit2(wire)
1120 with repo_init as repo:
1122 with repo_init as repo:
1121 repo.references.delete(key)
1123 repo.references.delete(key)
1122
1124
1123 @reraise_safe_exceptions
1125 @reraise_safe_exceptions
1124 def tag_remove(self, wire, tag_name):
1126 def tag_remove(self, wire, tag_name):
1125 repo_init = self._factory.repo_libgit2(wire)
1127 repo_init = self._factory.repo_libgit2(wire)
1126 with repo_init as repo:
1128 with repo_init as repo:
1127 key = f'refs/tags/{tag_name}'
1129 key = f'refs/tags/{tag_name}'
1128 repo.references.delete(key)
1130 repo.references.delete(key)
1129
1131
1130 @reraise_safe_exceptions
1132 @reraise_safe_exceptions
1131 def tree_changes(self, wire, source_id, target_id):
1133 def tree_changes(self, wire, source_id, target_id):
1132 repo = self._factory.repo(wire)
1134 repo = self._factory.repo(wire)
1133 # source can be empty
1135 # source can be empty
1134 source_id = safe_bytes(source_id if source_id else b'')
1136 source_id = safe_bytes(source_id if source_id else b'')
1135 target_id = safe_bytes(target_id)
1137 target_id = safe_bytes(target_id)
1136
1138
1137 source = repo[source_id].tree if source_id else None
1139 source = repo[source_id].tree if source_id else None
1138 target = repo[target_id].tree
1140 target = repo[target_id].tree
1139 result = repo.object_store.tree_changes(source, target)
1141 result = repo.object_store.tree_changes(source, target)
1140
1142
1141 added = set()
1143 added = set()
1142 modified = set()
1144 modified = set()
1143 deleted = set()
1145 deleted = set()
1144 for (old_path, new_path), (_, _), (_, _) in list(result):
1146 for (old_path, new_path), (_, _), (_, _) in list(result):
1145 if new_path and old_path:
1147 if new_path and old_path:
1146 modified.add(new_path)
1148 modified.add(new_path)
1147 elif new_path and not old_path:
1149 elif new_path and not old_path:
1148 added.add(new_path)
1150 added.add(new_path)
1149 elif not new_path and old_path:
1151 elif not new_path and old_path:
1150 deleted.add(old_path)
1152 deleted.add(old_path)
1151
1153
1152 return list(added), list(modified), list(deleted)
1154 return list(added), list(modified), list(deleted)
1153
1155
1154 @reraise_safe_exceptions
1156 @reraise_safe_exceptions
1155 def tree_and_type_for_path(self, wire, commit_id, path):
1157 def tree_and_type_for_path(self, wire, commit_id, path):
1156
1158
1157 cache_on, context_uid, repo_id = self._cache_on(wire)
1159 cache_on, context_uid, repo_id = self._cache_on(wire)
1158 region = self._region(wire)
1160 region = self._region(wire)
1159
1161
1160 @region.conditional_cache_on_arguments(condition=cache_on)
1162 @region.conditional_cache_on_arguments(condition=cache_on)
1161 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1163 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
1162 repo_init = self._factory.repo_libgit2(wire)
1164 repo_init = self._factory.repo_libgit2(wire)
1163
1165
1164 with repo_init as repo:
1166 with repo_init as repo:
1165 commit = repo[commit_id]
1167 commit = repo[commit_id]
1166 try:
1168 try:
1167 tree = commit.tree[path]
1169 tree = commit.tree[path]
1168 except KeyError:
1170 except KeyError:
1169 return None, None, None
1171 return None, None, None
1170
1172
1171 return tree.id.hex, tree.type_str, tree.filemode
1173 return tree.id.hex, tree.type_str, tree.filemode
1172 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1174 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1173
1175
1174 @reraise_safe_exceptions
1176 @reraise_safe_exceptions
1175 def tree_items(self, wire, tree_id):
1177 def tree_items(self, wire, tree_id):
1176 cache_on, context_uid, repo_id = self._cache_on(wire)
1178 cache_on, context_uid, repo_id = self._cache_on(wire)
1177 region = self._region(wire)
1179 region = self._region(wire)
1178
1180
1179 @region.conditional_cache_on_arguments(condition=cache_on)
1181 @region.conditional_cache_on_arguments(condition=cache_on)
1180 def _tree_items(_repo_id, _tree_id):
1182 def _tree_items(_repo_id, _tree_id):
1181
1183
1182 repo_init = self._factory.repo_libgit2(wire)
1184 repo_init = self._factory.repo_libgit2(wire)
1183 with repo_init as repo:
1185 with repo_init as repo:
1184 try:
1186 try:
1185 tree = repo[tree_id]
1187 tree = repo[tree_id]
1186 except KeyError:
1188 except KeyError:
1187 raise ObjectMissing(f'No tree with id: {tree_id}')
1189 raise ObjectMissing(f'No tree with id: {tree_id}')
1188
1190
1189 result = []
1191 result = []
1190 for item in tree:
1192 for item in tree:
1191 item_sha = item.hex
1193 item_sha = item.hex
1192 item_mode = item.filemode
1194 item_mode = item.filemode
1193 item_type = item.type_str
1195 item_type = item.type_str
1194
1196
1195 if item_type == 'commit':
1197 if item_type == 'commit':
1196 # NOTE(marcink): submodules we translate to 'link' for backward compat
1198 # NOTE(marcink): submodules we translate to 'link' for backward compat
1197 item_type = 'link'
1199 item_type = 'link'
1198
1200
1199 result.append((item.name, item_mode, item_sha, item_type))
1201 result.append((item.name, item_mode, item_sha, item_type))
1200 return result
1202 return result
1201 return _tree_items(repo_id, tree_id)
1203 return _tree_items(repo_id, tree_id)
1202
1204
1203 @reraise_safe_exceptions
1205 @reraise_safe_exceptions
1204 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1206 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1205 """
1207 """
1206 Old version that uses subprocess to call diff
1208 Old version that uses subprocess to call diff
1207 """
1209 """
1208
1210
1209 flags = [
1211 flags = [
1210 f'-U{context}', '--patch',
1212 f'-U{context}', '--patch',
1211 '--binary',
1213 '--binary',
1212 '--find-renames',
1214 '--find-renames',
1213 '--no-indent-heuristic',
1215 '--no-indent-heuristic',
1214 # '--indent-heuristic',
1216 # '--indent-heuristic',
1215 #'--full-index',
1217 #'--full-index',
1216 #'--abbrev=40'
1218 #'--abbrev=40'
1217 ]
1219 ]
1218
1220
1219 if opt_ignorews:
1221 if opt_ignorews:
1220 flags.append('--ignore-all-space')
1222 flags.append('--ignore-all-space')
1221
1223
1222 if commit_id_1 == self.EMPTY_COMMIT:
1224 if commit_id_1 == self.EMPTY_COMMIT:
1223 cmd = ['show'] + flags + [commit_id_2]
1225 cmd = ['show'] + flags + [commit_id_2]
1224 else:
1226 else:
1225 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1227 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1226
1228
1227 if file_filter:
1229 if file_filter:
1228 cmd.extend(['--', file_filter])
1230 cmd.extend(['--', file_filter])
1229
1231
1230 diff, __ = self.run_git_command(wire, cmd)
1232 diff, __ = self.run_git_command(wire, cmd)
1231 # If we used 'show' command, strip first few lines (until actual diff
1233 # If we used 'show' command, strip first few lines (until actual diff
1232 # starts)
1234 # starts)
1233 if commit_id_1 == self.EMPTY_COMMIT:
1235 if commit_id_1 == self.EMPTY_COMMIT:
1234 lines = diff.splitlines()
1236 lines = diff.splitlines()
1235 x = 0
1237 x = 0
1236 for line in lines:
1238 for line in lines:
1237 if line.startswith(b'diff'):
1239 if line.startswith(b'diff'):
1238 break
1240 break
1239 x += 1
1241 x += 1
1240 # Append new line just like 'diff' command do
1242 # Append new line just like 'diff' command do
1241 diff = '\n'.join(lines[x:]) + '\n'
1243 diff = '\n'.join(lines[x:]) + '\n'
1242 return diff
1244 return diff
1243
1245
1244 @reraise_safe_exceptions
1246 @reraise_safe_exceptions
1245 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1247 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1246 repo_init = self._factory.repo_libgit2(wire)
1248 repo_init = self._factory.repo_libgit2(wire)
1247
1249
1248 with repo_init as repo:
1250 with repo_init as repo:
1249 swap = True
1251 swap = True
1250 flags = 0
1252 flags = 0
1251 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1253 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1252
1254
1253 if opt_ignorews:
1255 if opt_ignorews:
1254 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1256 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1255
1257
1256 if commit_id_1 == self.EMPTY_COMMIT:
1258 if commit_id_1 == self.EMPTY_COMMIT:
1257 comm1 = repo[commit_id_2]
1259 comm1 = repo[commit_id_2]
1258 diff_obj = comm1.tree.diff_to_tree(
1260 diff_obj = comm1.tree.diff_to_tree(
1259 flags=flags, context_lines=context, swap=swap)
1261 flags=flags, context_lines=context, swap=swap)
1260
1262
1261 else:
1263 else:
1262 comm1 = repo[commit_id_2]
1264 comm1 = repo[commit_id_2]
1263 comm2 = repo[commit_id_1]
1265 comm2 = repo[commit_id_1]
1264 diff_obj = comm1.tree.diff_to_tree(
1266 diff_obj = comm1.tree.diff_to_tree(
1265 comm2.tree, flags=flags, context_lines=context, swap=swap)
1267 comm2.tree, flags=flags, context_lines=context, swap=swap)
1266 similar_flags = 0
1268 similar_flags = 0
1267 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1269 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1268 diff_obj.find_similar(flags=similar_flags)
1270 diff_obj.find_similar(flags=similar_flags)
1269
1271
1270 if file_filter:
1272 if file_filter:
1271 for p in diff_obj:
1273 for p in diff_obj:
1272 if p.delta.old_file.path == file_filter:
1274 if p.delta.old_file.path == file_filter:
1273 return BytesEnvelope(p.data) or BytesEnvelope(b'')
1275 return BytesEnvelope(p.data) or BytesEnvelope(b'')
1274 # fo matching path == no diff
1276 # fo matching path == no diff
1275 return BytesEnvelope(b'')
1277 return BytesEnvelope(b'')
1276
1278
1277 return BytesEnvelope(safe_bytes(diff_obj.patch)) or BytesEnvelope(b'')
1279 return BytesEnvelope(safe_bytes(diff_obj.patch)) or BytesEnvelope(b'')
1278
1280
1279 @reraise_safe_exceptions
1281 @reraise_safe_exceptions
1280 def node_history(self, wire, commit_id, path, limit):
1282 def node_history(self, wire, commit_id, path, limit):
1281 cache_on, context_uid, repo_id = self._cache_on(wire)
1283 cache_on, context_uid, repo_id = self._cache_on(wire)
1282 region = self._region(wire)
1284 region = self._region(wire)
1283
1285
1284 @region.conditional_cache_on_arguments(condition=cache_on)
1286 @region.conditional_cache_on_arguments(condition=cache_on)
1285 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1287 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1286 # optimize for n==1, rev-list is much faster for that use-case
1288 # optimize for n==1, rev-list is much faster for that use-case
1287 if limit == 1:
1289 if limit == 1:
1288 cmd = ['rev-list', '-1', commit_id, '--', path]
1290 cmd = ['rev-list', '-1', commit_id, '--', path]
1289 else:
1291 else:
1290 cmd = ['log']
1292 cmd = ['log']
1291 if limit:
1293 if limit:
1292 cmd.extend(['-n', str(safe_int(limit, 0))])
1294 cmd.extend(['-n', str(safe_int(limit, 0))])
1293 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1295 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1294
1296
1295 output, __ = self.run_git_command(wire, cmd)
1297 output, __ = self.run_git_command(wire, cmd)
1296 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1298 commit_ids = re.findall(rb'[0-9a-fA-F]{40}', output)
1297
1299
1298 return [x for x in commit_ids]
1300 return [x for x in commit_ids]
1299 return _node_history(context_uid, repo_id, commit_id, path, limit)
1301 return _node_history(context_uid, repo_id, commit_id, path, limit)
1300
1302
1301 @reraise_safe_exceptions
1303 @reraise_safe_exceptions
1302 def node_annotate_legacy(self, wire, commit_id, path):
1304 def node_annotate_legacy(self, wire, commit_id, path):
1303 # note: replaced by pygit2 implementation
1305 # note: replaced by pygit2 implementation
1304 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1306 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1305 # -l ==> outputs long shas (and we need all 40 characters)
1307 # -l ==> outputs long shas (and we need all 40 characters)
1306 # --root ==> doesn't put '^' character for boundaries
1308 # --root ==> doesn't put '^' character for boundaries
1307 # -r commit_id ==> blames for the given commit
1309 # -r commit_id ==> blames for the given commit
1308 output, __ = self.run_git_command(wire, cmd)
1310 output, __ = self.run_git_command(wire, cmd)
1309
1311
1310 result = []
1312 result = []
1311 for i, blame_line in enumerate(output.splitlines()[:-1]):
1313 for i, blame_line in enumerate(output.splitlines()[:-1]):
1312 line_no = i + 1
1314 line_no = i + 1
1313 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1315 blame_commit_id, line = re.split(rb' ', blame_line, 1)
1314 result.append((line_no, blame_commit_id, line))
1316 result.append((line_no, blame_commit_id, line))
1315
1317
1316 return result
1318 return result
1317
1319
1318 @reraise_safe_exceptions
1320 @reraise_safe_exceptions
1319 def node_annotate(self, wire, commit_id, path):
1321 def node_annotate(self, wire, commit_id, path):
1320
1322
1321 result_libgit = []
1323 result_libgit = []
1322 repo_init = self._factory.repo_libgit2(wire)
1324 repo_init = self._factory.repo_libgit2(wire)
1323 with repo_init as repo:
1325 with repo_init as repo:
1324 commit = repo[commit_id]
1326 commit = repo[commit_id]
1325 blame_obj = repo.blame(path, newest_commit=commit_id)
1327 blame_obj = repo.blame(path, newest_commit=commit_id)
1326 for i, line in enumerate(commit.tree[path].data.splitlines()):
1328 for i, line in enumerate(commit.tree[path].data.splitlines()):
1327 line_no = i + 1
1329 line_no = i + 1
1328 hunk = blame_obj.for_line(line_no)
1330 hunk = blame_obj.for_line(line_no)
1329 blame_commit_id = hunk.final_commit_id.hex
1331 blame_commit_id = hunk.final_commit_id.hex
1330
1332
1331 result_libgit.append((line_no, blame_commit_id, line))
1333 result_libgit.append((line_no, blame_commit_id, line))
1332
1334
1333 return BinaryEnvelope(result_libgit)
1335 return BinaryEnvelope(result_libgit)
1334
1336
1335 @reraise_safe_exceptions
1337 @reraise_safe_exceptions
1336 def update_server_info(self, wire):
1338 def update_server_info(self, wire):
1337 repo = self._factory.repo(wire)
1339 repo = self._factory.repo(wire)
1338 update_server_info(repo)
1340 update_server_info(repo)
1339
1341
1340 @reraise_safe_exceptions
1342 @reraise_safe_exceptions
1341 def get_all_commit_ids(self, wire):
1343 def get_all_commit_ids(self, wire):
1342
1344
1343 cache_on, context_uid, repo_id = self._cache_on(wire)
1345 cache_on, context_uid, repo_id = self._cache_on(wire)
1344 region = self._region(wire)
1346 region = self._region(wire)
1345
1347
1346 @region.conditional_cache_on_arguments(condition=cache_on)
1348 @region.conditional_cache_on_arguments(condition=cache_on)
1347 def _get_all_commit_ids(_context_uid, _repo_id):
1349 def _get_all_commit_ids(_context_uid, _repo_id):
1348
1350
1349 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1351 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1350 try:
1352 try:
1351 output, __ = self.run_git_command(wire, cmd)
1353 output, __ = self.run_git_command(wire, cmd)
1352 return output.splitlines()
1354 return output.splitlines()
1353 except Exception:
1355 except Exception:
1354 # Can be raised for empty repositories
1356 # Can be raised for empty repositories
1355 return []
1357 return []
1356
1358
1357 @region.conditional_cache_on_arguments(condition=cache_on)
1359 @region.conditional_cache_on_arguments(condition=cache_on)
1358 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1360 def _get_all_commit_ids_pygit2(_context_uid, _repo_id):
1359 repo_init = self._factory.repo_libgit2(wire)
1361 repo_init = self._factory.repo_libgit2(wire)
1360 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1362 from pygit2 import GIT_SORT_REVERSE, GIT_SORT_TIME, GIT_BRANCH_ALL
1361 results = []
1363 results = []
1362 with repo_init as repo:
1364 with repo_init as repo:
1363 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1365 for commit in repo.walk(repo.head.target, GIT_SORT_TIME | GIT_BRANCH_ALL | GIT_SORT_REVERSE):
1364 results.append(commit.id.hex)
1366 results.append(commit.id.hex)
1365
1367
1366 return _get_all_commit_ids(context_uid, repo_id)
1368 return _get_all_commit_ids(context_uid, repo_id)
1367
1369
1368 @reraise_safe_exceptions
1370 @reraise_safe_exceptions
1369 def run_git_command(self, wire, cmd, **opts):
1371 def run_git_command(self, wire, cmd, **opts):
1370 path = wire.get('path', None)
1372 path = wire.get('path', None)
1371 debug_mode = rhodecode.ConfigGet().get_bool('debug')
1373 debug_mode = rhodecode.ConfigGet().get_bool('debug')
1372
1374
1373 if path and os.path.isdir(path):
1375 if path and os.path.isdir(path):
1374 opts['cwd'] = path
1376 opts['cwd'] = path
1375
1377
1376 if '_bare' in opts:
1378 if '_bare' in opts:
1377 _copts = []
1379 _copts = []
1378 del opts['_bare']
1380 del opts['_bare']
1379 else:
1381 else:
1380 _copts = ['-c', 'core.quotepath=false', '-c', 'advice.diverging=false']
1382 _copts = ['-c', 'core.quotepath=false', '-c', 'advice.diverging=false']
1381 safe_call = False
1383 safe_call = False
1382 if '_safe' in opts:
1384 if '_safe' in opts:
1383 # no exc on failure
1385 # no exc on failure
1384 del opts['_safe']
1386 del opts['_safe']
1385 safe_call = True
1387 safe_call = True
1386
1388
1387 if '_copts' in opts:
1389 if '_copts' in opts:
1388 _copts.extend(opts['_copts'] or [])
1390 _copts.extend(opts['_copts'] or [])
1389 del opts['_copts']
1391 del opts['_copts']
1390
1392
1391 gitenv = os.environ.copy()
1393 gitenv = os.environ.copy()
1392 gitenv.update(opts.pop('extra_env', {}))
1394 gitenv.update(opts.pop('extra_env', {}))
1393 # need to clean fix GIT_DIR !
1395 # need to clean fix GIT_DIR !
1394 if 'GIT_DIR' in gitenv:
1396 if 'GIT_DIR' in gitenv:
1395 del gitenv['GIT_DIR']
1397 del gitenv['GIT_DIR']
1396 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1398 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1397 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1399 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1398
1400
1399 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1401 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1400 _opts = {'env': gitenv, 'shell': False}
1402 _opts = {'env': gitenv, 'shell': False}
1401
1403
1402 proc = None
1404 proc = None
1403 try:
1405 try:
1404 _opts.update(opts)
1406 _opts.update(opts)
1405 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1407 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1406
1408
1407 return b''.join(proc), b''.join(proc.stderr)
1409 return b''.join(proc), b''.join(proc.stderr)
1408 except OSError as err:
1410 except OSError as err:
1409 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1411 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
1410 call_opts = {}
1412 call_opts = {}
1411 if debug_mode:
1413 if debug_mode:
1412 call_opts = _opts
1414 call_opts = _opts
1413
1415
1414 tb_err = ("Couldn't run git command ({}).\n"
1416 tb_err = ("Couldn't run git command ({}).\n"
1415 "Original error was:{}\n"
1417 "Original error was:{}\n"
1416 "Call options:{}\n"
1418 "Call options:{}\n"
1417 .format(cmd, err, call_opts))
1419 .format(cmd, err, call_opts))
1418 log.exception(tb_err)
1420 log.exception(tb_err)
1419 if safe_call:
1421 if safe_call:
1420 return '', err
1422 return '', err
1421 else:
1423 else:
1422 raise exceptions.VcsException()(tb_err)
1424 raise exceptions.VcsException()(tb_err)
1423 finally:
1425 finally:
1424 if proc:
1426 if proc:
1425 proc.close()
1427 proc.close()
1426
1428
1427 @reraise_safe_exceptions
1429 @reraise_safe_exceptions
1428 def install_hooks(self, wire, force=False):
1430 def install_hooks(self, wire, force=False):
1429 from vcsserver.hook_utils import install_git_hooks
1431 from vcsserver.hook_utils import install_git_hooks
1430 bare = self.bare(wire)
1432 bare = self.bare(wire)
1431 path = wire['path']
1433 path = wire['path']
1432 binary_dir = settings.BINARY_DIR
1434 binary_dir = settings.BINARY_DIR
1433 if binary_dir:
1435 if binary_dir:
1434 os.path.join(binary_dir, 'python3')
1436 os.path.join(binary_dir, 'python3')
1435 return install_git_hooks(path, bare, force_create=force)
1437 return install_git_hooks(path, bare, force_create=force)
1436
1438
1437 @reraise_safe_exceptions
1439 @reraise_safe_exceptions
1438 def get_hooks_info(self, wire):
1440 def get_hooks_info(self, wire):
1439 from vcsserver.hook_utils import (
1441 from vcsserver.hook_utils import (
1440 get_git_pre_hook_version, get_git_post_hook_version)
1442 get_git_pre_hook_version, get_git_post_hook_version)
1441 bare = self.bare(wire)
1443 bare = self.bare(wire)
1442 path = wire['path']
1444 path = wire['path']
1443 return {
1445 return {
1444 'pre_version': get_git_pre_hook_version(path, bare),
1446 'pre_version': get_git_pre_hook_version(path, bare),
1445 'post_version': get_git_post_hook_version(path, bare),
1447 'post_version': get_git_post_hook_version(path, bare),
1446 }
1448 }
1447
1449
1448 @reraise_safe_exceptions
1450 @reraise_safe_exceptions
1449 def set_head_ref(self, wire, head_name):
1451 def set_head_ref(self, wire, head_name):
1450 log.debug('Setting refs/head to `%s`', head_name)
1452 log.debug('Setting refs/head to `%s`', head_name)
1451 repo_init = self._factory.repo_libgit2(wire)
1453 repo_init = self._factory.repo_libgit2(wire)
1452 with repo_init as repo:
1454 with repo_init as repo:
1453 repo.set_head(f'refs/heads/{head_name}')
1455 repo.set_head(f'refs/heads/{head_name}')
1454
1456
1455 return [head_name] + [f'set HEAD to refs/heads/{head_name}']
1457 return [head_name] + [f'set HEAD to refs/heads/{head_name}']
1456
1458
1457 @reraise_safe_exceptions
1459 @reraise_safe_exceptions
1458 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1460 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
1459 archive_dir_name, commit_id, cache_config):
1461 archive_dir_name, commit_id, cache_config):
1460
1462
1461 def file_walker(_commit_id, path):
1463 def file_walker(_commit_id, path):
1462 repo_init = self._factory.repo_libgit2(wire)
1464 repo_init = self._factory.repo_libgit2(wire)
1463
1465
1464 with repo_init as repo:
1466 with repo_init as repo:
1465 commit = repo[commit_id]
1467 commit = repo[commit_id]
1466
1468
1467 if path in ['', '/']:
1469 if path in ['', '/']:
1468 tree = commit.tree
1470 tree = commit.tree
1469 else:
1471 else:
1470 tree = commit.tree[path.rstrip('/')]
1472 tree = commit.tree[path.rstrip('/')]
1471 tree_id = tree.id.hex
1473 tree_id = tree.id.hex
1472 try:
1474 try:
1473 tree = repo[tree_id]
1475 tree = repo[tree_id]
1474 except KeyError:
1476 except KeyError:
1475 raise ObjectMissing(f'No tree with id: {tree_id}')
1477 raise ObjectMissing(f'No tree with id: {tree_id}')
1476
1478
1477 index = LibGit2Index.Index()
1479 index = LibGit2Index.Index()
1478 index.read_tree(tree)
1480 index.read_tree(tree)
1479 file_iter = index
1481 file_iter = index
1480
1482
1481 for file_node in file_iter:
1483 for file_node in file_iter:
1482 file_path = file_node.path
1484 file_path = file_node.path
1483 mode = file_node.mode
1485 mode = file_node.mode
1484 is_link = stat.S_ISLNK(mode)
1486 is_link = stat.S_ISLNK(mode)
1485 if mode == pygit2.GIT_FILEMODE_COMMIT:
1487 if mode == pygit2.GIT_FILEMODE_COMMIT:
1486 log.debug('Skipping path %s as a commit node', file_path)
1488 log.debug('Skipping path %s as a commit node', file_path)
1487 continue
1489 continue
1488 yield ArchiveNode(file_path, mode, is_link, repo[file_node.hex].read_raw)
1490 yield ArchiveNode(file_path, mode, is_link, repo[file_node.hex].read_raw)
1489
1491
1490 return store_archive_in_cache(
1492 return store_archive_in_cache(
1491 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
1493 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
@@ -1,946 +1,954 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2023 RhodeCode GmbH
2 # Copyright (C) 2014-2023 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 import os
19 import os
20 import subprocess
20 import subprocess
21 from urllib.error import URLError
21 from urllib.error import URLError
22 import urllib.parse
22 import urllib.parse
23 import logging
23 import logging
24 import posixpath as vcspath
24 import posixpath as vcspath
25 import io
25 import io
26 import urllib.request
26 import urllib.request
27 import urllib.parse
27 import urllib.parse
28 import urllib.error
28 import urllib.error
29 import traceback
29 import traceback
30
30
31
31
32 import svn.client # noqa
32 import svn.client # noqa
33 import svn.core # noqa
33 import svn.core # noqa
34 import svn.delta # noqa
34 import svn.delta # noqa
35 import svn.diff # noqa
35 import svn.diff # noqa
36 import svn.fs # noqa
36 import svn.fs # noqa
37 import svn.repos # noqa
37 import svn.repos # noqa
38
38
39 import rhodecode
39 import rhodecode
40 from vcsserver import svn_diff, exceptions, subprocessio, settings
40 from vcsserver import svn_diff, exceptions, subprocessio, settings
41 from vcsserver.base import (
41 from vcsserver.base import (
42 RepoFactory,
42 RepoFactory,
43 raise_from_original,
43 raise_from_original,
44 ArchiveNode,
44 ArchiveNode,
45 store_archive_in_cache,
45 store_archive_in_cache,
46 BytesEnvelope,
46 BytesEnvelope,
47 BinaryEnvelope,
47 BinaryEnvelope,
48 )
48 )
49 from vcsserver.exceptions import NoContentException
49 from vcsserver.exceptions import NoContentException
50 from vcsserver.str_utils import safe_str, safe_bytes
50 from vcsserver.str_utils import safe_str, safe_bytes
51 from vcsserver.type_utils import assert_bytes
51 from vcsserver.type_utils import assert_bytes
52 from vcsserver.vcs_base import RemoteBase
52 from vcsserver.vcs_base import RemoteBase
53 from vcsserver.lib.svnremoterepo import svnremoterepo
53 from vcsserver.lib.svnremoterepo import svnremoterepo
54
54
55 log = logging.getLogger(__name__)
55 log = logging.getLogger(__name__)
56
56
57
57
58 svn_compatible_versions_map = {
58 svn_compatible_versions_map = {
59 'pre-1.4-compatible': '1.3',
59 'pre-1.4-compatible': '1.3',
60 'pre-1.5-compatible': '1.4',
60 'pre-1.5-compatible': '1.4',
61 'pre-1.6-compatible': '1.5',
61 'pre-1.6-compatible': '1.5',
62 'pre-1.8-compatible': '1.7',
62 'pre-1.8-compatible': '1.7',
63 'pre-1.9-compatible': '1.8',
63 'pre-1.9-compatible': '1.8',
64 }
64 }
65
65
66 current_compatible_version = '1.14'
66 current_compatible_version = '1.14'
67
67
68
68
69 def reraise_safe_exceptions(func):
69 def reraise_safe_exceptions(func):
70 """Decorator for converting svn exceptions to something neutral."""
70 """Decorator for converting svn exceptions to something neutral."""
71 def wrapper(*args, **kwargs):
71 def wrapper(*args, **kwargs):
72 try:
72 try:
73 return func(*args, **kwargs)
73 return func(*args, **kwargs)
74 except Exception as e:
74 except Exception as e:
75 if not hasattr(e, '_vcs_kind'):
75 if not hasattr(e, '_vcs_kind'):
76 log.exception("Unhandled exception in svn remote call")
76 log.exception("Unhandled exception in svn remote call")
77 raise_from_original(exceptions.UnhandledException(e), e)
77 raise_from_original(exceptions.UnhandledException(e), e)
78 raise
78 raise
79 return wrapper
79 return wrapper
80
80
81
81
82 class SubversionFactory(RepoFactory):
82 class SubversionFactory(RepoFactory):
83 repo_type = 'svn'
83 repo_type = 'svn'
84
84
85 def _create_repo(self, wire, create, compatible_version):
85 def _create_repo(self, wire, create, compatible_version):
86 path = svn.core.svn_path_canonicalize(wire['path'])
86 path = svn.core.svn_path_canonicalize(wire['path'])
87 if create:
87 if create:
88 fs_config = {'compatible-version': current_compatible_version}
88 fs_config = {'compatible-version': current_compatible_version}
89 if compatible_version:
89 if compatible_version:
90
90
91 compatible_version_string = \
91 compatible_version_string = \
92 svn_compatible_versions_map.get(compatible_version) \
92 svn_compatible_versions_map.get(compatible_version) \
93 or compatible_version
93 or compatible_version
94 fs_config['compatible-version'] = compatible_version_string
94 fs_config['compatible-version'] = compatible_version_string
95
95
96 log.debug('Create SVN repo with config `%s`', fs_config)
96 log.debug('Create SVN repo with config `%s`', fs_config)
97 repo = svn.repos.create(path, "", "", None, fs_config)
97 repo = svn.repos.create(path, "", "", None, fs_config)
98 else:
98 else:
99 repo = svn.repos.open(path)
99 repo = svn.repos.open(path)
100
100
101 log.debug('repository created: got SVN object: %s', repo)
101 log.debug('repository created: got SVN object: %s', repo)
102 return repo
102 return repo
103
103
104 def repo(self, wire, create=False, compatible_version=None):
104 def repo(self, wire, create=False, compatible_version=None):
105 """
105 """
106 Get a repository instance for the given path.
106 Get a repository instance for the given path.
107 """
107 """
108 return self._create_repo(wire, create, compatible_version)
108 return self._create_repo(wire, create, compatible_version)
109
109
110
110
111 NODE_TYPE_MAPPING = {
111 NODE_TYPE_MAPPING = {
112 svn.core.svn_node_file: 'file',
112 svn.core.svn_node_file: 'file',
113 svn.core.svn_node_dir: 'dir',
113 svn.core.svn_node_dir: 'dir',
114 }
114 }
115
115
116
116
117 class SvnRemote(RemoteBase):
117 class SvnRemote(RemoteBase):
118
118
119 def __init__(self, factory, hg_factory=None):
119 def __init__(self, factory, hg_factory=None):
120 self._factory = factory
120 self._factory = factory
121
121
122 self._bulk_methods = {
122 self._bulk_methods = {
123 # NOT supported in SVN ATM...
123 # NOT supported in SVN ATM...
124 }
124 }
125 self._bulk_file_methods = {
125 self._bulk_file_methods = {
126 "size": self.get_file_size,
126 "size": self.get_file_size,
127 "data": self.get_file_content,
127 "data": self.get_file_content,
128 "flags": self.get_node_type,
128 "flags": self.get_node_type,
129 "is_binary": self.is_binary,
129 "is_binary": self.is_binary,
130 "md5": self.md5_hash
130 "md5": self.md5_hash
131 }
131 }
132
132
133 @reraise_safe_exceptions
133 @reraise_safe_exceptions
134 def bulk_file_request(self, wire, commit_id, path, pre_load):
134 def bulk_file_request(self, wire, commit_id, path, pre_load):
135 cache_on, context_uid, repo_id = self._cache_on(wire)
135 cache_on, context_uid, repo_id = self._cache_on(wire)
136 region = self._region(wire)
136 region = self._region(wire)
137
137
138 # since we use unified API, we need to cast from str to in for SVN
138 # since we use unified API, we need to cast from str to in for SVN
139 commit_id = int(commit_id)
139 commit_id = int(commit_id)
140
140
141 @region.conditional_cache_on_arguments(condition=cache_on)
141 @region.conditional_cache_on_arguments(condition=cache_on)
142 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
142 def _bulk_file_request(_repo_id, _commit_id, _path, _pre_load):
143 result = {}
143 result = {}
144 for attr in pre_load:
144 for attr in pre_load:
145 try:
145 try:
146 method = self._bulk_file_methods[attr]
146 method = self._bulk_file_methods[attr]
147 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
147 wire.update({'cache': False}) # disable cache for bulk calls so we don't double cache
148 result[attr] = method(wire, _commit_id, _path)
148 result[attr] = method(wire, _commit_id, _path)
149 except KeyError as e:
149 except KeyError as e:
150 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
150 raise exceptions.VcsException(e)(f'Unknown bulk attribute: "{attr}"')
151 return result
151 return result
152
152
153 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
153 return BinaryEnvelope(_bulk_file_request(repo_id, commit_id, path, sorted(pre_load)))
154
154
155 @reraise_safe_exceptions
155 @reraise_safe_exceptions
156 def discover_svn_version(self):
156 def discover_svn_version(self):
157 try:
157 try:
158 import svn.core
158 import svn.core
159 svn_ver = svn.core.SVN_VERSION
159 svn_ver = svn.core.SVN_VERSION
160 except ImportError:
160 except ImportError:
161 svn_ver = None
161 svn_ver = None
162 return safe_str(svn_ver)
162 return safe_str(svn_ver)
163
163
164 @reraise_safe_exceptions
164 @reraise_safe_exceptions
165 def is_empty(self, wire):
165 def is_empty(self, wire):
166 try:
166 try:
167 return self.lookup(wire, -1) == 0
167 return self.lookup(wire, -1) == 0
168 except Exception:
168 except Exception:
169 log.exception("failed to read object_store")
169 log.exception("failed to read object_store")
170 return False
170 return False
171
171
172 def check_url(self, url, config):
172 def check_url(self, url, config):
173
173
174 # uuid function gets only valid UUID from proper repo, else
174 # uuid function gets only valid UUID from proper repo, else
175 # throws exception
175 # throws exception
176 username, password, src_url = self.get_url_and_credentials(url)
176 username, password, src_url = self.get_url_and_credentials(url)
177 try:
177 try:
178 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
178 svnremoterepo(safe_bytes(username), safe_bytes(password), safe_bytes(src_url)).svn().uuid
179 except Exception:
179 except Exception:
180 tb = traceback.format_exc()
180 tb = traceback.format_exc()
181 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
181 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
182 raise URLError(f'"{url}" is not a valid Subversion source url.')
182 raise URLError(f'"{url}" is not a valid Subversion source url.')
183 return True
183 return True
184
184
185 def is_path_valid_repository(self, wire, path):
185 def is_path_valid_repository(self, wire, path):
186 # NOTE(marcink): short circuit the check for SVN repo
186 # NOTE(marcink): short circuit the check for SVN repo
187 # the repos.open might be expensive to check, but we have one cheap
187 # the repos.open might be expensive to check, but we have one cheap
188 # pre-condition that we can use, to check for 'format' file
188 # pre-condition that we can use, to check for 'format' file
189 if not os.path.isfile(os.path.join(path, 'format')):
189 if not os.path.isfile(os.path.join(path, 'format')):
190 return False
190 return False
191
191
192 try:
192 cache_on, context_uid, repo_id = self._cache_on(wire)
193 svn.repos.open(path)
193 region = self._region(wire)
194 except svn.core.SubversionException:
194
195 tb = traceback.format_exc()
195 @region.conditional_cache_on_arguments(condition=cache_on)
196 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
196 def _assert_correct_path(_context_uid, _repo_id, fast_check):
197 return False
197
198 return True
198 try:
199 svn.repos.open(path)
200 except svn.core.SubversionException:
201 tb = traceback.format_exc()
202 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
203 return False
204 return True
205
206 return _assert_correct_path(context_uid, repo_id, True)
199
207
200 @reraise_safe_exceptions
208 @reraise_safe_exceptions
201 def verify(self, wire,):
209 def verify(self, wire,):
202 repo_path = wire['path']
210 repo_path = wire['path']
203 if not self.is_path_valid_repository(wire, repo_path):
211 if not self.is_path_valid_repository(wire, repo_path):
204 raise Exception(
212 raise Exception(
205 f"Path {repo_path} is not a valid Subversion repository.")
213 f"Path {repo_path} is not a valid Subversion repository.")
206
214
207 cmd = ['svnadmin', 'info', repo_path]
215 cmd = ['svnadmin', 'info', repo_path]
208 stdout, stderr = subprocessio.run_command(cmd)
216 stdout, stderr = subprocessio.run_command(cmd)
209 return stdout
217 return stdout
210
218
211 @reraise_safe_exceptions
219 @reraise_safe_exceptions
212 def lookup(self, wire, revision):
220 def lookup(self, wire, revision):
213 if revision not in [-1, None, 'HEAD']:
221 if revision not in [-1, None, 'HEAD']:
214 raise NotImplementedError
222 raise NotImplementedError
215 repo = self._factory.repo(wire)
223 repo = self._factory.repo(wire)
216 fs_ptr = svn.repos.fs(repo)
224 fs_ptr = svn.repos.fs(repo)
217 head = svn.fs.youngest_rev(fs_ptr)
225 head = svn.fs.youngest_rev(fs_ptr)
218 return head
226 return head
219
227
220 @reraise_safe_exceptions
228 @reraise_safe_exceptions
221 def lookup_interval(self, wire, start_ts, end_ts):
229 def lookup_interval(self, wire, start_ts, end_ts):
222 repo = self._factory.repo(wire)
230 repo = self._factory.repo(wire)
223 fsobj = svn.repos.fs(repo)
231 fsobj = svn.repos.fs(repo)
224 start_rev = None
232 start_rev = None
225 end_rev = None
233 end_rev = None
226 if start_ts:
234 if start_ts:
227 start_ts_svn = apr_time_t(start_ts)
235 start_ts_svn = apr_time_t(start_ts)
228 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
236 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
229 else:
237 else:
230 start_rev = 1
238 start_rev = 1
231 if end_ts:
239 if end_ts:
232 end_ts_svn = apr_time_t(end_ts)
240 end_ts_svn = apr_time_t(end_ts)
233 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
241 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
234 else:
242 else:
235 end_rev = svn.fs.youngest_rev(fsobj)
243 end_rev = svn.fs.youngest_rev(fsobj)
236 return start_rev, end_rev
244 return start_rev, end_rev
237
245
238 @reraise_safe_exceptions
246 @reraise_safe_exceptions
239 def revision_properties(self, wire, revision):
247 def revision_properties(self, wire, revision):
240
248
241 cache_on, context_uid, repo_id = self._cache_on(wire)
249 cache_on, context_uid, repo_id = self._cache_on(wire)
242 region = self._region(wire)
250 region = self._region(wire)
243
251
244 @region.conditional_cache_on_arguments(condition=cache_on)
252 @region.conditional_cache_on_arguments(condition=cache_on)
245 def _revision_properties(_repo_id, _revision):
253 def _revision_properties(_repo_id, _revision):
246 repo = self._factory.repo(wire)
254 repo = self._factory.repo(wire)
247 fs_ptr = svn.repos.fs(repo)
255 fs_ptr = svn.repos.fs(repo)
248 return svn.fs.revision_proplist(fs_ptr, revision)
256 return svn.fs.revision_proplist(fs_ptr, revision)
249 return _revision_properties(repo_id, revision)
257 return _revision_properties(repo_id, revision)
250
258
251 def revision_changes(self, wire, revision):
259 def revision_changes(self, wire, revision):
252
260
253 repo = self._factory.repo(wire)
261 repo = self._factory.repo(wire)
254 fsobj = svn.repos.fs(repo)
262 fsobj = svn.repos.fs(repo)
255 rev_root = svn.fs.revision_root(fsobj, revision)
263 rev_root = svn.fs.revision_root(fsobj, revision)
256
264
257 editor = svn.repos.ChangeCollector(fsobj, rev_root)
265 editor = svn.repos.ChangeCollector(fsobj, rev_root)
258 editor_ptr, editor_baton = svn.delta.make_editor(editor)
266 editor_ptr, editor_baton = svn.delta.make_editor(editor)
259 base_dir = ""
267 base_dir = ""
260 send_deltas = False
268 send_deltas = False
261 svn.repos.replay2(
269 svn.repos.replay2(
262 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
270 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
263 editor_ptr, editor_baton, None)
271 editor_ptr, editor_baton, None)
264
272
265 added = []
273 added = []
266 changed = []
274 changed = []
267 removed = []
275 removed = []
268
276
269 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
277 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
270 for path, change in editor.changes.items():
278 for path, change in editor.changes.items():
271 # TODO: Decide what to do with directory nodes. Subversion can add
279 # TODO: Decide what to do with directory nodes. Subversion can add
272 # empty directories.
280 # empty directories.
273
281
274 if change.item_kind == svn.core.svn_node_dir:
282 if change.item_kind == svn.core.svn_node_dir:
275 continue
283 continue
276 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
284 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
277 added.append(path)
285 added.append(path)
278 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
286 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
279 svn.repos.CHANGE_ACTION_REPLACE]:
287 svn.repos.CHANGE_ACTION_REPLACE]:
280 changed.append(path)
288 changed.append(path)
281 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
289 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
282 removed.append(path)
290 removed.append(path)
283 else:
291 else:
284 raise NotImplementedError(
292 raise NotImplementedError(
285 "Action {} not supported on path {}".format(
293 "Action {} not supported on path {}".format(
286 change.action, path))
294 change.action, path))
287
295
288 changes = {
296 changes = {
289 'added': added,
297 'added': added,
290 'changed': changed,
298 'changed': changed,
291 'removed': removed,
299 'removed': removed,
292 }
300 }
293 return changes
301 return changes
294
302
295 @reraise_safe_exceptions
303 @reraise_safe_exceptions
296 def node_history(self, wire, path, revision, limit):
304 def node_history(self, wire, path, revision, limit):
297 cache_on, context_uid, repo_id = self._cache_on(wire)
305 cache_on, context_uid, repo_id = self._cache_on(wire)
298 region = self._region(wire)
306 region = self._region(wire)
299
307
300 @region.conditional_cache_on_arguments(condition=cache_on)
308 @region.conditional_cache_on_arguments(condition=cache_on)
301 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
309 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
302 cross_copies = False
310 cross_copies = False
303 repo = self._factory.repo(wire)
311 repo = self._factory.repo(wire)
304 fsobj = svn.repos.fs(repo)
312 fsobj = svn.repos.fs(repo)
305 rev_root = svn.fs.revision_root(fsobj, revision)
313 rev_root = svn.fs.revision_root(fsobj, revision)
306
314
307 history_revisions = []
315 history_revisions = []
308 history = svn.fs.node_history(rev_root, path)
316 history = svn.fs.node_history(rev_root, path)
309 history = svn.fs.history_prev(history, cross_copies)
317 history = svn.fs.history_prev(history, cross_copies)
310 while history:
318 while history:
311 __, node_revision = svn.fs.history_location(history)
319 __, node_revision = svn.fs.history_location(history)
312 history_revisions.append(node_revision)
320 history_revisions.append(node_revision)
313 if limit and len(history_revisions) >= limit:
321 if limit and len(history_revisions) >= limit:
314 break
322 break
315 history = svn.fs.history_prev(history, cross_copies)
323 history = svn.fs.history_prev(history, cross_copies)
316 return history_revisions
324 return history_revisions
317 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
325 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
318
326
319 @reraise_safe_exceptions
327 @reraise_safe_exceptions
320 def node_properties(self, wire, path, revision):
328 def node_properties(self, wire, path, revision):
321 cache_on, context_uid, repo_id = self._cache_on(wire)
329 cache_on, context_uid, repo_id = self._cache_on(wire)
322 region = self._region(wire)
330 region = self._region(wire)
323
331
324 @region.conditional_cache_on_arguments(condition=cache_on)
332 @region.conditional_cache_on_arguments(condition=cache_on)
325 def _node_properties(_repo_id, _path, _revision):
333 def _node_properties(_repo_id, _path, _revision):
326 repo = self._factory.repo(wire)
334 repo = self._factory.repo(wire)
327 fsobj = svn.repos.fs(repo)
335 fsobj = svn.repos.fs(repo)
328 rev_root = svn.fs.revision_root(fsobj, revision)
336 rev_root = svn.fs.revision_root(fsobj, revision)
329 return svn.fs.node_proplist(rev_root, path)
337 return svn.fs.node_proplist(rev_root, path)
330 return _node_properties(repo_id, path, revision)
338 return _node_properties(repo_id, path, revision)
331
339
332 def file_annotate(self, wire, path, revision):
340 def file_annotate(self, wire, path, revision):
333 abs_path = 'file://' + urllib.request.pathname2url(
341 abs_path = 'file://' + urllib.request.pathname2url(
334 vcspath.join(wire['path'], path))
342 vcspath.join(wire['path'], path))
335 file_uri = svn.core.svn_path_canonicalize(abs_path)
343 file_uri = svn.core.svn_path_canonicalize(abs_path)
336
344
337 start_rev = svn_opt_revision_value_t(0)
345 start_rev = svn_opt_revision_value_t(0)
338 peg_rev = svn_opt_revision_value_t(revision)
346 peg_rev = svn_opt_revision_value_t(revision)
339 end_rev = peg_rev
347 end_rev = peg_rev
340
348
341 annotations = []
349 annotations = []
342
350
343 def receiver(line_no, revision, author, date, line, pool):
351 def receiver(line_no, revision, author, date, line, pool):
344 annotations.append((line_no, revision, line))
352 annotations.append((line_no, revision, line))
345
353
346 # TODO: Cannot use blame5, missing typemap function in the swig code
354 # TODO: Cannot use blame5, missing typemap function in the swig code
347 try:
355 try:
348 svn.client.blame2(
356 svn.client.blame2(
349 file_uri, peg_rev, start_rev, end_rev,
357 file_uri, peg_rev, start_rev, end_rev,
350 receiver, svn.client.create_context())
358 receiver, svn.client.create_context())
351 except svn.core.SubversionException as exc:
359 except svn.core.SubversionException as exc:
352 log.exception("Error during blame operation.")
360 log.exception("Error during blame operation.")
353 raise Exception(
361 raise Exception(
354 f"Blame not supported or file does not exist at path {path}. "
362 f"Blame not supported or file does not exist at path {path}. "
355 f"Error {exc}.")
363 f"Error {exc}.")
356
364
357 return BinaryEnvelope(annotations)
365 return BinaryEnvelope(annotations)
358
366
359 @reraise_safe_exceptions
367 @reraise_safe_exceptions
360 def get_node_type(self, wire, revision=None, path=''):
368 def get_node_type(self, wire, revision=None, path=''):
361
369
362 cache_on, context_uid, repo_id = self._cache_on(wire)
370 cache_on, context_uid, repo_id = self._cache_on(wire)
363 region = self._region(wire)
371 region = self._region(wire)
364
372
365 @region.conditional_cache_on_arguments(condition=cache_on)
373 @region.conditional_cache_on_arguments(condition=cache_on)
366 def _get_node_type(_repo_id, _revision, _path):
374 def _get_node_type(_repo_id, _revision, _path):
367 repo = self._factory.repo(wire)
375 repo = self._factory.repo(wire)
368 fs_ptr = svn.repos.fs(repo)
376 fs_ptr = svn.repos.fs(repo)
369 if _revision is None:
377 if _revision is None:
370 _revision = svn.fs.youngest_rev(fs_ptr)
378 _revision = svn.fs.youngest_rev(fs_ptr)
371 root = svn.fs.revision_root(fs_ptr, _revision)
379 root = svn.fs.revision_root(fs_ptr, _revision)
372 node = svn.fs.check_path(root, path)
380 node = svn.fs.check_path(root, path)
373 return NODE_TYPE_MAPPING.get(node, None)
381 return NODE_TYPE_MAPPING.get(node, None)
374 return _get_node_type(repo_id, revision, path)
382 return _get_node_type(repo_id, revision, path)
375
383
376 @reraise_safe_exceptions
384 @reraise_safe_exceptions
377 def get_nodes(self, wire, revision=None, path=''):
385 def get_nodes(self, wire, revision=None, path=''):
378
386
379 cache_on, context_uid, repo_id = self._cache_on(wire)
387 cache_on, context_uid, repo_id = self._cache_on(wire)
380 region = self._region(wire)
388 region = self._region(wire)
381
389
382 @region.conditional_cache_on_arguments(condition=cache_on)
390 @region.conditional_cache_on_arguments(condition=cache_on)
383 def _get_nodes(_repo_id, _path, _revision):
391 def _get_nodes(_repo_id, _path, _revision):
384 repo = self._factory.repo(wire)
392 repo = self._factory.repo(wire)
385 fsobj = svn.repos.fs(repo)
393 fsobj = svn.repos.fs(repo)
386 if _revision is None:
394 if _revision is None:
387 _revision = svn.fs.youngest_rev(fsobj)
395 _revision = svn.fs.youngest_rev(fsobj)
388 root = svn.fs.revision_root(fsobj, _revision)
396 root = svn.fs.revision_root(fsobj, _revision)
389 entries = svn.fs.dir_entries(root, path)
397 entries = svn.fs.dir_entries(root, path)
390 result = []
398 result = []
391 for entry_path, entry_info in entries.items():
399 for entry_path, entry_info in entries.items():
392 result.append(
400 result.append(
393 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
401 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
394 return result
402 return result
395 return _get_nodes(repo_id, path, revision)
403 return _get_nodes(repo_id, path, revision)
396
404
397 @reraise_safe_exceptions
405 @reraise_safe_exceptions
398 def get_file_content(self, wire, rev=None, path=''):
406 def get_file_content(self, wire, rev=None, path=''):
399 repo = self._factory.repo(wire)
407 repo = self._factory.repo(wire)
400 fsobj = svn.repos.fs(repo)
408 fsobj = svn.repos.fs(repo)
401
409
402 if rev is None:
410 if rev is None:
403 rev = svn.fs.youngest_rev(fsobj)
411 rev = svn.fs.youngest_rev(fsobj)
404
412
405 root = svn.fs.revision_root(fsobj, rev)
413 root = svn.fs.revision_root(fsobj, rev)
406 content = svn.core.Stream(svn.fs.file_contents(root, path))
414 content = svn.core.Stream(svn.fs.file_contents(root, path))
407 return BytesEnvelope(content.read())
415 return BytesEnvelope(content.read())
408
416
409 @reraise_safe_exceptions
417 @reraise_safe_exceptions
410 def get_file_size(self, wire, revision=None, path=''):
418 def get_file_size(self, wire, revision=None, path=''):
411
419
412 cache_on, context_uid, repo_id = self._cache_on(wire)
420 cache_on, context_uid, repo_id = self._cache_on(wire)
413 region = self._region(wire)
421 region = self._region(wire)
414
422
415 @region.conditional_cache_on_arguments(condition=cache_on)
423 @region.conditional_cache_on_arguments(condition=cache_on)
416 def _get_file_size(_repo_id, _revision, _path):
424 def _get_file_size(_repo_id, _revision, _path):
417 repo = self._factory.repo(wire)
425 repo = self._factory.repo(wire)
418 fsobj = svn.repos.fs(repo)
426 fsobj = svn.repos.fs(repo)
419 if _revision is None:
427 if _revision is None:
420 _revision = svn.fs.youngest_revision(fsobj)
428 _revision = svn.fs.youngest_revision(fsobj)
421 root = svn.fs.revision_root(fsobj, _revision)
429 root = svn.fs.revision_root(fsobj, _revision)
422 size = svn.fs.file_length(root, path)
430 size = svn.fs.file_length(root, path)
423 return size
431 return size
424 return _get_file_size(repo_id, revision, path)
432 return _get_file_size(repo_id, revision, path)
425
433
426 def create_repository(self, wire, compatible_version=None):
434 def create_repository(self, wire, compatible_version=None):
427 log.info('Creating Subversion repository in path "%s"', wire['path'])
435 log.info('Creating Subversion repository in path "%s"', wire['path'])
428 self._factory.repo(wire, create=True,
436 self._factory.repo(wire, create=True,
429 compatible_version=compatible_version)
437 compatible_version=compatible_version)
430
438
431 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
439 def get_url_and_credentials(self, src_url) -> tuple[str, str, str]:
432 obj = urllib.parse.urlparse(src_url)
440 obj = urllib.parse.urlparse(src_url)
433 username = obj.username or ''
441 username = obj.username or ''
434 password = obj.password or ''
442 password = obj.password or ''
435 return username, password, src_url
443 return username, password, src_url
436
444
437 def import_remote_repository(self, wire, src_url):
445 def import_remote_repository(self, wire, src_url):
438 repo_path = wire['path']
446 repo_path = wire['path']
439 if not self.is_path_valid_repository(wire, repo_path):
447 if not self.is_path_valid_repository(wire, repo_path):
440 raise Exception(
448 raise Exception(
441 f"Path {repo_path} is not a valid Subversion repository.")
449 f"Path {repo_path} is not a valid Subversion repository.")
442
450
443 username, password, src_url = self.get_url_and_credentials(src_url)
451 username, password, src_url = self.get_url_and_credentials(src_url)
444 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
452 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
445 '--trust-server-cert-failures=unknown-ca']
453 '--trust-server-cert-failures=unknown-ca']
446 if username and password:
454 if username and password:
447 rdump_cmd += ['--username', username, '--password', password]
455 rdump_cmd += ['--username', username, '--password', password]
448 rdump_cmd += [src_url]
456 rdump_cmd += [src_url]
449
457
450 rdump = subprocess.Popen(
458 rdump = subprocess.Popen(
451 rdump_cmd,
459 rdump_cmd,
452 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
460 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
453 load = subprocess.Popen(
461 load = subprocess.Popen(
454 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
462 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
455
463
456 # TODO: johbo: This can be a very long operation, might be better
464 # TODO: johbo: This can be a very long operation, might be better
457 # to track some kind of status and provide an api to check if the
465 # to track some kind of status and provide an api to check if the
458 # import is done.
466 # import is done.
459 rdump.wait()
467 rdump.wait()
460 load.wait()
468 load.wait()
461
469
462 log.debug('Return process ended with code: %s', rdump.returncode)
470 log.debug('Return process ended with code: %s', rdump.returncode)
463 if rdump.returncode != 0:
471 if rdump.returncode != 0:
464 errors = rdump.stderr.read()
472 errors = rdump.stderr.read()
465 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
473 log.error('svnrdump dump failed: statuscode %s: message: %s', rdump.returncode, errors)
466
474
467 reason = 'UNKNOWN'
475 reason = 'UNKNOWN'
468 if b'svnrdump: E230001:' in errors:
476 if b'svnrdump: E230001:' in errors:
469 reason = 'INVALID_CERTIFICATE'
477 reason = 'INVALID_CERTIFICATE'
470
478
471 if reason == 'UNKNOWN':
479 if reason == 'UNKNOWN':
472 reason = f'UNKNOWN:{safe_str(errors)}'
480 reason = f'UNKNOWN:{safe_str(errors)}'
473
481
474 raise Exception(
482 raise Exception(
475 'Failed to dump the remote repository from {}. Reason:{}'.format(
483 'Failed to dump the remote repository from {}. Reason:{}'.format(
476 src_url, reason))
484 src_url, reason))
477 if load.returncode != 0:
485 if load.returncode != 0:
478 raise Exception(
486 raise Exception(
479 f'Failed to load the dump of remote repository from {src_url}.')
487 f'Failed to load the dump of remote repository from {src_url}.')
480
488
481 def commit(self, wire, message, author, timestamp, updated, removed):
489 def commit(self, wire, message, author, timestamp, updated, removed):
482
490
483 message = safe_bytes(message)
491 message = safe_bytes(message)
484 author = safe_bytes(author)
492 author = safe_bytes(author)
485
493
486 repo = self._factory.repo(wire)
494 repo = self._factory.repo(wire)
487 fsobj = svn.repos.fs(repo)
495 fsobj = svn.repos.fs(repo)
488
496
489 rev = svn.fs.youngest_rev(fsobj)
497 rev = svn.fs.youngest_rev(fsobj)
490 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
498 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
491 txn_root = svn.fs.txn_root(txn)
499 txn_root = svn.fs.txn_root(txn)
492
500
493 for node in updated:
501 for node in updated:
494 TxnNodeProcessor(node, txn_root).update()
502 TxnNodeProcessor(node, txn_root).update()
495 for node in removed:
503 for node in removed:
496 TxnNodeProcessor(node, txn_root).remove()
504 TxnNodeProcessor(node, txn_root).remove()
497
505
498 commit_id = svn.repos.fs_commit_txn(repo, txn)
506 commit_id = svn.repos.fs_commit_txn(repo, txn)
499
507
500 if timestamp:
508 if timestamp:
501 apr_time = apr_time_t(timestamp)
509 apr_time = apr_time_t(timestamp)
502 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
510 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
503 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
511 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
504
512
505 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
513 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
506 return commit_id
514 return commit_id
507
515
508 @reraise_safe_exceptions
516 @reraise_safe_exceptions
509 def diff(self, wire, rev1, rev2, path1=None, path2=None,
517 def diff(self, wire, rev1, rev2, path1=None, path2=None,
510 ignore_whitespace=False, context=3):
518 ignore_whitespace=False, context=3):
511
519
512 wire.update(cache=False)
520 wire.update(cache=False)
513 repo = self._factory.repo(wire)
521 repo = self._factory.repo(wire)
514 diff_creator = SvnDiffer(
522 diff_creator = SvnDiffer(
515 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
523 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
516 try:
524 try:
517 return BytesEnvelope(diff_creator.generate_diff())
525 return BytesEnvelope(diff_creator.generate_diff())
518 except svn.core.SubversionException as e:
526 except svn.core.SubversionException as e:
519 log.exception(
527 log.exception(
520 "Error during diff operation operation. "
528 "Error during diff operation operation. "
521 "Path might not exist %s, %s", path1, path2)
529 "Path might not exist %s, %s", path1, path2)
522 return BytesEnvelope(b'')
530 return BytesEnvelope(b'')
523
531
524 @reraise_safe_exceptions
532 @reraise_safe_exceptions
525 def is_large_file(self, wire, path):
533 def is_large_file(self, wire, path):
526 return False
534 return False
527
535
528 @reraise_safe_exceptions
536 @reraise_safe_exceptions
529 def is_binary(self, wire, rev, path):
537 def is_binary(self, wire, rev, path):
530 cache_on, context_uid, repo_id = self._cache_on(wire)
538 cache_on, context_uid, repo_id = self._cache_on(wire)
531 region = self._region(wire)
539 region = self._region(wire)
532
540
533 @region.conditional_cache_on_arguments(condition=cache_on)
541 @region.conditional_cache_on_arguments(condition=cache_on)
534 def _is_binary(_repo_id, _rev, _path):
542 def _is_binary(_repo_id, _rev, _path):
535 raw_bytes = self.get_file_content(wire, rev, path)
543 raw_bytes = self.get_file_content(wire, rev, path)
536 if not raw_bytes:
544 if not raw_bytes:
537 return False
545 return False
538 return b'\0' in raw_bytes
546 return b'\0' in raw_bytes
539
547
540 return _is_binary(repo_id, rev, path)
548 return _is_binary(repo_id, rev, path)
541
549
542 @reraise_safe_exceptions
550 @reraise_safe_exceptions
543 def md5_hash(self, wire, rev, path):
551 def md5_hash(self, wire, rev, path):
544 cache_on, context_uid, repo_id = self._cache_on(wire)
552 cache_on, context_uid, repo_id = self._cache_on(wire)
545 region = self._region(wire)
553 region = self._region(wire)
546
554
547 @region.conditional_cache_on_arguments(condition=cache_on)
555 @region.conditional_cache_on_arguments(condition=cache_on)
548 def _md5_hash(_repo_id, _rev, _path):
556 def _md5_hash(_repo_id, _rev, _path):
549 return ''
557 return ''
550
558
551 return _md5_hash(repo_id, rev, path)
559 return _md5_hash(repo_id, rev, path)
552
560
553 @reraise_safe_exceptions
561 @reraise_safe_exceptions
554 def run_svn_command(self, wire, cmd, **opts):
562 def run_svn_command(self, wire, cmd, **opts):
555 path = wire.get('path', None)
563 path = wire.get('path', None)
556 debug_mode = rhodecode.ConfigGet().get_bool('debug')
564 debug_mode = rhodecode.ConfigGet().get_bool('debug')
557
565
558 if path and os.path.isdir(path):
566 if path and os.path.isdir(path):
559 opts['cwd'] = path
567 opts['cwd'] = path
560
568
561 safe_call = opts.pop('_safe', False)
569 safe_call = opts.pop('_safe', False)
562
570
563 svnenv = os.environ.copy()
571 svnenv = os.environ.copy()
564 svnenv.update(opts.pop('extra_env', {}))
572 svnenv.update(opts.pop('extra_env', {}))
565
573
566 _opts = {'env': svnenv, 'shell': False}
574 _opts = {'env': svnenv, 'shell': False}
567
575
568 try:
576 try:
569 _opts.update(opts)
577 _opts.update(opts)
570 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
578 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
571
579
572 return b''.join(proc), b''.join(proc.stderr)
580 return b''.join(proc), b''.join(proc.stderr)
573 except OSError as err:
581 except OSError as err:
574 if safe_call:
582 if safe_call:
575 return '', safe_str(err).strip()
583 return '', safe_str(err).strip()
576 else:
584 else:
577 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
585 cmd = ' '.join(map(safe_str, cmd)) # human friendly CMD
578 call_opts = {}
586 call_opts = {}
579 if debug_mode:
587 if debug_mode:
580 call_opts = _opts
588 call_opts = _opts
581
589
582 tb_err = ("Couldn't run svn command ({}).\n"
590 tb_err = ("Couldn't run svn command ({}).\n"
583 "Original error was:{}\n"
591 "Original error was:{}\n"
584 "Call options:{}\n"
592 "Call options:{}\n"
585 .format(cmd, err, call_opts))
593 .format(cmd, err, call_opts))
586 log.exception(tb_err)
594 log.exception(tb_err)
587 raise exceptions.VcsException()(tb_err)
595 raise exceptions.VcsException()(tb_err)
588
596
589 @reraise_safe_exceptions
597 @reraise_safe_exceptions
590 def install_hooks(self, wire, force=False):
598 def install_hooks(self, wire, force=False):
591 from vcsserver.hook_utils import install_svn_hooks
599 from vcsserver.hook_utils import install_svn_hooks
592 repo_path = wire['path']
600 repo_path = wire['path']
593 binary_dir = settings.BINARY_DIR
601 binary_dir = settings.BINARY_DIR
594 executable = None
602 executable = None
595 if binary_dir:
603 if binary_dir:
596 executable = os.path.join(binary_dir, 'python3')
604 executable = os.path.join(binary_dir, 'python3')
597 return install_svn_hooks(repo_path, force_create=force)
605 return install_svn_hooks(repo_path, force_create=force)
598
606
599 @reraise_safe_exceptions
607 @reraise_safe_exceptions
600 def get_hooks_info(self, wire):
608 def get_hooks_info(self, wire):
601 from vcsserver.hook_utils import (
609 from vcsserver.hook_utils import (
602 get_svn_pre_hook_version, get_svn_post_hook_version)
610 get_svn_pre_hook_version, get_svn_post_hook_version)
603 repo_path = wire['path']
611 repo_path = wire['path']
604 return {
612 return {
605 'pre_version': get_svn_pre_hook_version(repo_path),
613 'pre_version': get_svn_pre_hook_version(repo_path),
606 'post_version': get_svn_post_hook_version(repo_path),
614 'post_version': get_svn_post_hook_version(repo_path),
607 }
615 }
608
616
609 @reraise_safe_exceptions
617 @reraise_safe_exceptions
610 def set_head_ref(self, wire, head_name):
618 def set_head_ref(self, wire, head_name):
611 pass
619 pass
612
620
613 @reraise_safe_exceptions
621 @reraise_safe_exceptions
614 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
622 def archive_repo(self, wire, archive_name_key, kind, mtime, archive_at_path,
615 archive_dir_name, commit_id, cache_config):
623 archive_dir_name, commit_id, cache_config):
616
624
617 def walk_tree(root, root_dir, _commit_id):
625 def walk_tree(root, root_dir, _commit_id):
618 """
626 """
619 Special recursive svn repo walker
627 Special recursive svn repo walker
620 """
628 """
621 root_dir = safe_bytes(root_dir)
629 root_dir = safe_bytes(root_dir)
622
630
623 filemode_default = 0o100644
631 filemode_default = 0o100644
624 filemode_executable = 0o100755
632 filemode_executable = 0o100755
625
633
626 file_iter = svn.fs.dir_entries(root, root_dir)
634 file_iter = svn.fs.dir_entries(root, root_dir)
627 for f_name in file_iter:
635 for f_name in file_iter:
628 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
636 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
629
637
630 if f_type == 'dir':
638 if f_type == 'dir':
631 # return only DIR, and then all entries in that dir
639 # return only DIR, and then all entries in that dir
632 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
640 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
633 new_root = os.path.join(root_dir, f_name)
641 new_root = os.path.join(root_dir, f_name)
634 yield from walk_tree(root, new_root, _commit_id)
642 yield from walk_tree(root, new_root, _commit_id)
635 else:
643 else:
636
644
637 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
645 f_path = os.path.join(root_dir, f_name).rstrip(b'/')
638 prop_list = svn.fs.node_proplist(root, f_path)
646 prop_list = svn.fs.node_proplist(root, f_path)
639
647
640 f_mode = filemode_default
648 f_mode = filemode_default
641 if prop_list.get('svn:executable'):
649 if prop_list.get('svn:executable'):
642 f_mode = filemode_executable
650 f_mode = filemode_executable
643
651
644 f_is_link = False
652 f_is_link = False
645 if prop_list.get('svn:special'):
653 if prop_list.get('svn:special'):
646 f_is_link = True
654 f_is_link = True
647
655
648 data = {
656 data = {
649 'is_link': f_is_link,
657 'is_link': f_is_link,
650 'mode': f_mode,
658 'mode': f_mode,
651 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
659 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
652 }
660 }
653
661
654 yield f_path, data, f_type
662 yield f_path, data, f_type
655
663
656 def file_walker(_commit_id, path):
664 def file_walker(_commit_id, path):
657 repo = self._factory.repo(wire)
665 repo = self._factory.repo(wire)
658 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
666 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
659
667
660 def no_content():
668 def no_content():
661 raise NoContentException()
669 raise NoContentException()
662
670
663 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
671 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
664 file_path = f_name
672 file_path = f_name
665
673
666 if f_type == 'dir':
674 if f_type == 'dir':
667 mode = f_data['mode']
675 mode = f_data['mode']
668 yield ArchiveNode(file_path, mode, False, no_content)
676 yield ArchiveNode(file_path, mode, False, no_content)
669 else:
677 else:
670 mode = f_data['mode']
678 mode = f_data['mode']
671 is_link = f_data['is_link']
679 is_link = f_data['is_link']
672 data_stream = f_data['content_stream']
680 data_stream = f_data['content_stream']
673 yield ArchiveNode(file_path, mode, is_link, data_stream)
681 yield ArchiveNode(file_path, mode, is_link, data_stream)
674
682
675 return store_archive_in_cache(
683 return store_archive_in_cache(
676 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
684 file_walker, archive_name_key, kind, mtime, archive_at_path, archive_dir_name, commit_id, cache_config=cache_config)
677
685
678
686
679 class SvnDiffer:
687 class SvnDiffer:
680 """
688 """
681 Utility to create diffs based on difflib and the Subversion api
689 Utility to create diffs based on difflib and the Subversion api
682 """
690 """
683
691
684 binary_content = False
692 binary_content = False
685
693
686 def __init__(
694 def __init__(
687 self, repo, src_rev, src_path, tgt_rev, tgt_path,
695 self, repo, src_rev, src_path, tgt_rev, tgt_path,
688 ignore_whitespace, context):
696 ignore_whitespace, context):
689 self.repo = repo
697 self.repo = repo
690 self.ignore_whitespace = ignore_whitespace
698 self.ignore_whitespace = ignore_whitespace
691 self.context = context
699 self.context = context
692
700
693 fsobj = svn.repos.fs(repo)
701 fsobj = svn.repos.fs(repo)
694
702
695 self.tgt_rev = tgt_rev
703 self.tgt_rev = tgt_rev
696 self.tgt_path = tgt_path or ''
704 self.tgt_path = tgt_path or ''
697 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
705 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
698 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
706 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
699
707
700 self.src_rev = src_rev
708 self.src_rev = src_rev
701 self.src_path = src_path or self.tgt_path
709 self.src_path = src_path or self.tgt_path
702 self.src_root = svn.fs.revision_root(fsobj, src_rev)
710 self.src_root = svn.fs.revision_root(fsobj, src_rev)
703 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
711 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
704
712
705 self._validate()
713 self._validate()
706
714
707 def _validate(self):
715 def _validate(self):
708 if (self.tgt_kind != svn.core.svn_node_none and
716 if (self.tgt_kind != svn.core.svn_node_none and
709 self.src_kind != svn.core.svn_node_none and
717 self.src_kind != svn.core.svn_node_none and
710 self.src_kind != self.tgt_kind):
718 self.src_kind != self.tgt_kind):
711 # TODO: johbo: proper error handling
719 # TODO: johbo: proper error handling
712 raise Exception(
720 raise Exception(
713 "Source and target are not compatible for diff generation. "
721 "Source and target are not compatible for diff generation. "
714 "Source type: %s, target type: %s" %
722 "Source type: %s, target type: %s" %
715 (self.src_kind, self.tgt_kind))
723 (self.src_kind, self.tgt_kind))
716
724
717 def generate_diff(self) -> bytes:
725 def generate_diff(self) -> bytes:
718 buf = io.BytesIO()
726 buf = io.BytesIO()
719 if self.tgt_kind == svn.core.svn_node_dir:
727 if self.tgt_kind == svn.core.svn_node_dir:
720 self._generate_dir_diff(buf)
728 self._generate_dir_diff(buf)
721 else:
729 else:
722 self._generate_file_diff(buf)
730 self._generate_file_diff(buf)
723 return buf.getvalue()
731 return buf.getvalue()
724
732
725 def _generate_dir_diff(self, buf: io.BytesIO):
733 def _generate_dir_diff(self, buf: io.BytesIO):
726 editor = DiffChangeEditor()
734 editor = DiffChangeEditor()
727 editor_ptr, editor_baton = svn.delta.make_editor(editor)
735 editor_ptr, editor_baton = svn.delta.make_editor(editor)
728 svn.repos.dir_delta2(
736 svn.repos.dir_delta2(
729 self.src_root,
737 self.src_root,
730 self.src_path,
738 self.src_path,
731 '', # src_entry
739 '', # src_entry
732 self.tgt_root,
740 self.tgt_root,
733 self.tgt_path,
741 self.tgt_path,
734 editor_ptr, editor_baton,
742 editor_ptr, editor_baton,
735 authorization_callback_allow_all,
743 authorization_callback_allow_all,
736 False, # text_deltas
744 False, # text_deltas
737 svn.core.svn_depth_infinity, # depth
745 svn.core.svn_depth_infinity, # depth
738 False, # entry_props
746 False, # entry_props
739 False, # ignore_ancestry
747 False, # ignore_ancestry
740 )
748 )
741
749
742 for path, __, change in sorted(editor.changes):
750 for path, __, change in sorted(editor.changes):
743 self._generate_node_diff(
751 self._generate_node_diff(
744 buf, change, path, self.tgt_path, path, self.src_path)
752 buf, change, path, self.tgt_path, path, self.src_path)
745
753
746 def _generate_file_diff(self, buf: io.BytesIO):
754 def _generate_file_diff(self, buf: io.BytesIO):
747 change = None
755 change = None
748 if self.src_kind == svn.core.svn_node_none:
756 if self.src_kind == svn.core.svn_node_none:
749 change = "add"
757 change = "add"
750 elif self.tgt_kind == svn.core.svn_node_none:
758 elif self.tgt_kind == svn.core.svn_node_none:
751 change = "delete"
759 change = "delete"
752 tgt_base, tgt_path = vcspath.split(self.tgt_path)
760 tgt_base, tgt_path = vcspath.split(self.tgt_path)
753 src_base, src_path = vcspath.split(self.src_path)
761 src_base, src_path = vcspath.split(self.src_path)
754 self._generate_node_diff(
762 self._generate_node_diff(
755 buf, change, tgt_path, tgt_base, src_path, src_base)
763 buf, change, tgt_path, tgt_base, src_path, src_base)
756
764
757 def _generate_node_diff(
765 def _generate_node_diff(
758 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
766 self, buf: io.BytesIO, change, tgt_path, tgt_base, src_path, src_base):
759
767
760 tgt_path_bytes = safe_bytes(tgt_path)
768 tgt_path_bytes = safe_bytes(tgt_path)
761 tgt_path = safe_str(tgt_path)
769 tgt_path = safe_str(tgt_path)
762
770
763 src_path_bytes = safe_bytes(src_path)
771 src_path_bytes = safe_bytes(src_path)
764 src_path = safe_str(src_path)
772 src_path = safe_str(src_path)
765
773
766 if self.src_rev == self.tgt_rev and tgt_base == src_base:
774 if self.src_rev == self.tgt_rev and tgt_base == src_base:
767 # makes consistent behaviour with git/hg to return empty diff if
775 # makes consistent behaviour with git/hg to return empty diff if
768 # we compare same revisions
776 # we compare same revisions
769 return
777 return
770
778
771 tgt_full_path = vcspath.join(tgt_base, tgt_path)
779 tgt_full_path = vcspath.join(tgt_base, tgt_path)
772 src_full_path = vcspath.join(src_base, src_path)
780 src_full_path = vcspath.join(src_base, src_path)
773
781
774 self.binary_content = False
782 self.binary_content = False
775 mime_type = self._get_mime_type(tgt_full_path)
783 mime_type = self._get_mime_type(tgt_full_path)
776
784
777 if mime_type and not mime_type.startswith(b'text'):
785 if mime_type and not mime_type.startswith(b'text'):
778 self.binary_content = True
786 self.binary_content = True
779 buf.write(b"=" * 67 + b'\n')
787 buf.write(b"=" * 67 + b'\n')
780 buf.write(b"Cannot display: file marked as a binary type.\n")
788 buf.write(b"Cannot display: file marked as a binary type.\n")
781 buf.write(b"svn:mime-type = %s\n" % mime_type)
789 buf.write(b"svn:mime-type = %s\n" % mime_type)
782 buf.write(b"Index: %b\n" % tgt_path_bytes)
790 buf.write(b"Index: %b\n" % tgt_path_bytes)
783 buf.write(b"=" * 67 + b'\n')
791 buf.write(b"=" * 67 + b'\n')
784 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
792 buf.write(b"diff --git a/%b b/%b\n" % (tgt_path_bytes, tgt_path_bytes))
785
793
786 if change == 'add':
794 if change == 'add':
787 # TODO: johbo: SVN is missing a zero here compared to git
795 # TODO: johbo: SVN is missing a zero here compared to git
788 buf.write(b"new file mode 10644\n")
796 buf.write(b"new file mode 10644\n")
789
797
790 # TODO(marcink): intro to binary detection of svn patches
798 # TODO(marcink): intro to binary detection of svn patches
791 # if self.binary_content:
799 # if self.binary_content:
792 # buf.write(b'GIT binary patch\n')
800 # buf.write(b'GIT binary patch\n')
793
801
794 buf.write(b"--- /dev/null\t(revision 0)\n")
802 buf.write(b"--- /dev/null\t(revision 0)\n")
795 src_lines = []
803 src_lines = []
796 else:
804 else:
797 if change == 'delete':
805 if change == 'delete':
798 buf.write(b"deleted file mode 10644\n")
806 buf.write(b"deleted file mode 10644\n")
799
807
800 # TODO(marcink): intro to binary detection of svn patches
808 # TODO(marcink): intro to binary detection of svn patches
801 # if self.binary_content:
809 # if self.binary_content:
802 # buf.write('GIT binary patch\n')
810 # buf.write('GIT binary patch\n')
803
811
804 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
812 buf.write(b"--- a/%b\t(revision %d)\n" % (src_path_bytes, self.src_rev))
805 src_lines = self._svn_readlines(self.src_root, src_full_path)
813 src_lines = self._svn_readlines(self.src_root, src_full_path)
806
814
807 if change == 'delete':
815 if change == 'delete':
808 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
816 buf.write(b"+++ /dev/null\t(revision %d)\n" % self.tgt_rev)
809 tgt_lines = []
817 tgt_lines = []
810 else:
818 else:
811 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
819 buf.write(b"+++ b/%b\t(revision %d)\n" % (tgt_path_bytes, self.tgt_rev))
812 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
820 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
813
821
814 # we made our diff header, time to generate the diff content into our buffer
822 # we made our diff header, time to generate the diff content into our buffer
815
823
816 if not self.binary_content:
824 if not self.binary_content:
817 udiff = svn_diff.unified_diff(
825 udiff = svn_diff.unified_diff(
818 src_lines, tgt_lines, context=self.context,
826 src_lines, tgt_lines, context=self.context,
819 ignore_blank_lines=self.ignore_whitespace,
827 ignore_blank_lines=self.ignore_whitespace,
820 ignore_case=False,
828 ignore_case=False,
821 ignore_space_changes=self.ignore_whitespace)
829 ignore_space_changes=self.ignore_whitespace)
822
830
823 buf.writelines(udiff)
831 buf.writelines(udiff)
824
832
825 def _get_mime_type(self, path) -> bytes:
833 def _get_mime_type(self, path) -> bytes:
826 try:
834 try:
827 mime_type = svn.fs.node_prop(
835 mime_type = svn.fs.node_prop(
828 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
836 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
829 except svn.core.SubversionException:
837 except svn.core.SubversionException:
830 mime_type = svn.fs.node_prop(
838 mime_type = svn.fs.node_prop(
831 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
839 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
832 return mime_type
840 return mime_type
833
841
834 def _svn_readlines(self, fs_root, node_path):
842 def _svn_readlines(self, fs_root, node_path):
835 if self.binary_content:
843 if self.binary_content:
836 return []
844 return []
837 node_kind = svn.fs.check_path(fs_root, node_path)
845 node_kind = svn.fs.check_path(fs_root, node_path)
838 if node_kind not in (
846 if node_kind not in (
839 svn.core.svn_node_file, svn.core.svn_node_symlink):
847 svn.core.svn_node_file, svn.core.svn_node_symlink):
840 return []
848 return []
841 content = svn.core.Stream(
849 content = svn.core.Stream(
842 svn.fs.file_contents(fs_root, node_path)).read()
850 svn.fs.file_contents(fs_root, node_path)).read()
843
851
844 return content.splitlines(True)
852 return content.splitlines(True)
845
853
846
854
847 class DiffChangeEditor(svn.delta.Editor):
855 class DiffChangeEditor(svn.delta.Editor):
848 """
856 """
849 Records changes between two given revisions
857 Records changes between two given revisions
850 """
858 """
851
859
852 def __init__(self):
860 def __init__(self):
853 self.changes = []
861 self.changes = []
854
862
855 def delete_entry(self, path, revision, parent_baton, pool=None):
863 def delete_entry(self, path, revision, parent_baton, pool=None):
856 self.changes.append((path, None, 'delete'))
864 self.changes.append((path, None, 'delete'))
857
865
858 def add_file(
866 def add_file(
859 self, path, parent_baton, copyfrom_path, copyfrom_revision,
867 self, path, parent_baton, copyfrom_path, copyfrom_revision,
860 file_pool=None):
868 file_pool=None):
861 self.changes.append((path, 'file', 'add'))
869 self.changes.append((path, 'file', 'add'))
862
870
863 def open_file(self, path, parent_baton, base_revision, file_pool=None):
871 def open_file(self, path, parent_baton, base_revision, file_pool=None):
864 self.changes.append((path, 'file', 'change'))
872 self.changes.append((path, 'file', 'change'))
865
873
866
874
867 def authorization_callback_allow_all(root, path, pool):
875 def authorization_callback_allow_all(root, path, pool):
868 return True
876 return True
869
877
870
878
871 class TxnNodeProcessor:
879 class TxnNodeProcessor:
872 """
880 """
873 Utility to process the change of one node within a transaction root.
881 Utility to process the change of one node within a transaction root.
874
882
875 It encapsulates the knowledge of how to add, update or remove
883 It encapsulates the knowledge of how to add, update or remove
876 a node for a given transaction root. The purpose is to support the method
884 a node for a given transaction root. The purpose is to support the method
877 `SvnRemote.commit`.
885 `SvnRemote.commit`.
878 """
886 """
879
887
880 def __init__(self, node, txn_root):
888 def __init__(self, node, txn_root):
881 assert_bytes(node['path'])
889 assert_bytes(node['path'])
882
890
883 self.node = node
891 self.node = node
884 self.txn_root = txn_root
892 self.txn_root = txn_root
885
893
886 def update(self):
894 def update(self):
887 self._ensure_parent_dirs()
895 self._ensure_parent_dirs()
888 self._add_file_if_node_does_not_exist()
896 self._add_file_if_node_does_not_exist()
889 self._update_file_content()
897 self._update_file_content()
890 self._update_file_properties()
898 self._update_file_properties()
891
899
892 def remove(self):
900 def remove(self):
893 svn.fs.delete(self.txn_root, self.node['path'])
901 svn.fs.delete(self.txn_root, self.node['path'])
894 # TODO: Clean up directory if empty
902 # TODO: Clean up directory if empty
895
903
896 def _ensure_parent_dirs(self):
904 def _ensure_parent_dirs(self):
897 curdir = vcspath.dirname(self.node['path'])
905 curdir = vcspath.dirname(self.node['path'])
898 dirs_to_create = []
906 dirs_to_create = []
899 while not self._svn_path_exists(curdir):
907 while not self._svn_path_exists(curdir):
900 dirs_to_create.append(curdir)
908 dirs_to_create.append(curdir)
901 curdir = vcspath.dirname(curdir)
909 curdir = vcspath.dirname(curdir)
902
910
903 for curdir in reversed(dirs_to_create):
911 for curdir in reversed(dirs_to_create):
904 log.debug('Creating missing directory "%s"', curdir)
912 log.debug('Creating missing directory "%s"', curdir)
905 svn.fs.make_dir(self.txn_root, curdir)
913 svn.fs.make_dir(self.txn_root, curdir)
906
914
907 def _svn_path_exists(self, path):
915 def _svn_path_exists(self, path):
908 path_status = svn.fs.check_path(self.txn_root, path)
916 path_status = svn.fs.check_path(self.txn_root, path)
909 return path_status != svn.core.svn_node_none
917 return path_status != svn.core.svn_node_none
910
918
911 def _add_file_if_node_does_not_exist(self):
919 def _add_file_if_node_does_not_exist(self):
912 kind = svn.fs.check_path(self.txn_root, self.node['path'])
920 kind = svn.fs.check_path(self.txn_root, self.node['path'])
913 if kind == svn.core.svn_node_none:
921 if kind == svn.core.svn_node_none:
914 svn.fs.make_file(self.txn_root, self.node['path'])
922 svn.fs.make_file(self.txn_root, self.node['path'])
915
923
916 def _update_file_content(self):
924 def _update_file_content(self):
917 assert_bytes(self.node['content'])
925 assert_bytes(self.node['content'])
918
926
919 handler, baton = svn.fs.apply_textdelta(
927 handler, baton = svn.fs.apply_textdelta(
920 self.txn_root, self.node['path'], None, None)
928 self.txn_root, self.node['path'], None, None)
921 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
929 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
922
930
923 def _update_file_properties(self):
931 def _update_file_properties(self):
924 properties = self.node.get('properties', {})
932 properties = self.node.get('properties', {})
925 for key, value in properties.items():
933 for key, value in properties.items():
926 svn.fs.change_node_prop(
934 svn.fs.change_node_prop(
927 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
935 self.txn_root, self.node['path'], safe_bytes(key), safe_bytes(value))
928
936
929
937
930 def apr_time_t(timestamp):
938 def apr_time_t(timestamp):
931 """
939 """
932 Convert a Python timestamp into APR timestamp type apr_time_t
940 Convert a Python timestamp into APR timestamp type apr_time_t
933 """
941 """
934 return int(timestamp * 1E6)
942 return int(timestamp * 1E6)
935
943
936
944
937 def svn_opt_revision_value_t(num):
945 def svn_opt_revision_value_t(num):
938 """
946 """
939 Put `num` into a `svn_opt_revision_value_t` structure.
947 Put `num` into a `svn_opt_revision_value_t` structure.
940 """
948 """
941 value = svn.core.svn_opt_revision_value_t()
949 value = svn.core.svn_opt_revision_value_t()
942 value.number = num
950 value.number = num
943 revision = svn.core.svn_opt_revision_t()
951 revision = svn.core.svn_opt_revision_t()
944 revision.kind = svn.core.svn_opt_revision_number
952 revision.kind = svn.core.svn_opt_revision_number
945 revision.value = value
953 revision.value = value
946 return revision
954 return revision
General Comments 0
You need to be logged in to leave comments. Login now