##// END OF EJS Templates
release: merge back stable branch into default
marcink -
r830:96a2fc09 merge default
parent child Browse files
Show More
@@ -1,59 +1,63 b''
1 1 c6fad7d1e61f22b1f4a4863eff207a04c27e9462 v4.0.0
2 2 77b6e243b4cc5b702c15abd6d737798edbac60dc v4.0.1
3 3 a359c072337fdd8e1e71df72cc520b8a9b042f80 v4.1.0
4 4 49aa7ed030a36b7ceba149a21e587cb5d20b4946 v4.1.1
5 5 f38ed1e1a31dce3c170b4d31585ba43471cf0705 v4.1.2
6 6 21269ba7bafd8f0c77e79dd86a31eb9bce7643d2 v4.2.0
7 7 b53930c918c25b2c8f69ceddc6641e511be27fd3 v4.2.1
8 8 6627ff4119723d8b2b60918e8b1aa49e9f055aab v4.3.0
9 9 d38f2c2b861dde6c4178923f7cf15ea58b85aa92 v4.3.1
10 10 1232313f9e6adac5ce5399c2a891dc1e72b79022 v4.4.0
11 11 cbb9f1d329ae5768379cdec55a62ebdd546c4e27 v4.4.1
12 12 24ffe44a27fcd1c5b6936144e176b9f6dd2f3a17 v4.4.2
13 13 beaeeaa440cd17471110d4621b8816506c0dff4a v4.5.0
14 14 668e5c656f61dd94595611844e1106d1361aa6a7 v4.5.1
15 15 ae0640240cb7a77e6dc8c77e64dd80d79732cb5b v4.5.2
16 16 7af06899f426813583fe60449d7517cc49c15b28 v4.6.0
17 17 8f7f4299bf341b43f94dadafa1ea73d6cea2c9ba v4.6.1
18 18 de00a831a0709ffaac57f948738ea927b97223a9 v4.7.0
19 19 57f527e0646d731768fb5e0fe742b12a35bdc63b v4.7.1
20 20 f9b09787da9845e4a105f4bffdc252099902cefb v4.7.2
21 21 0b7c790b726f08385e6ebdf4f257c905787b9244 v4.8.0
22 22 f4123e725b74d0e82fe89982ab8791a66062e2b3 v4.9.0
23 23 940bac044a0fe1ec839759df81399b50141be720 v4.9.1
24 24 582d9ebbe46bdddac4b26eacae36ee5ecabca267 v4.10.0
25 25 12fbd08d0ab57acce9c0bdccee75633cfa08d7f4 v4.10.1
26 26 78352f95021a9d128f5803fdbca7036daef5dabe v4.10.2
27 27 a47ccfb020cda78c8680e3844aaf0b82b1390f3b v4.10.3
28 28 347ae9ae544bba8deb417995285287a3b6be1611 v4.10.4
29 29 9b257ac49841f850434be0d518baca0827e6c8cc v4.10.5
30 30 e8bf26eea118694edc4ffe50c6c5aa91022bc434 v4.10.6
31 31 71fa9274ba59fb982104f0b9b3d0d024c78675f7 v4.11.0
32 32 92471577ef25636e5babe8001d47fc8e51521522 v4.11.1
33 33 0277edbcda5a8d075e1e41a95bcee6dcf21f3f77 v4.11.2
34 34 6c5ecbf0778ef870e5b23d9fad5340135b563356 v4.11.3
35 35 be788a89a939ebd63606220064bd624fa9d5c9c9 v4.11.4
36 36 15c90a04098a373ac761fab07695fd80dde3bcdb v4.11.5
37 37 77aff155b3251cc00394a49f5e8f2c99e33149a7 v4.11.6
38 38 c218a1ce5d370c2e671d42a91684b3fc2c91b81d v4.12.0
39 39 80085fb846cc948195a5c76b579ca34cbc49b59b v4.12.1
40 40 346f04fc8a18df3235defbe6e71bd552c0d46481 v4.12.2
41 41 764fdd752322f3e0c13ea00957f2d548bf4363a7 v4.12.3
42 42 b58038974a5cecbb9c100d32ad2e4c68582f1a78 v4.12.4
43 43 e1d42d92a0fec0c80b56c82f37bc7b5472613706 v4.13.0
44 44 c3ded3ff17e9bb2a47002a808984a7a946f58a1c v4.13.1
45 45 7ff81aa47b1b40cdef9dd5bcdd439f59c269db3d v4.13.2
46 46 628a08e6aaeff2c3f9e0e268e854f870e6778e53 v4.13.3
47 47 941d675f10cfa7d774815bfacfb37085751b7a0d v4.14.0
48 48 75e11d32c0be0a457198f07888e7ef650cfa6888 v4.14.1
49 49 6c6f49fda0191c4641dcd43aa0d4376b8b728d40 v4.15.0
50 50 184dea5e01c36e6474c83d3bb34719cdfec22b0d v4.15.1
51 51 a4dc3669345553582296b2ce1485229a6c6f0522 v4.15.2
52 52 d2a4a1a66f204668841da1cdccfa29083e1ef7a3 v4.16.0
53 53 744cf8f2c8f23051978fc293404bf475cc5a31f6 v4.16.1
54 54 e68aff93ce4ad11fea13420e914f7dfb05c39566 v4.16.2
55 55 647aeff9752dc1aa00796fa280d0d2ce2f511bc9 v4.17.0
56 56 5e0c2990e095bba1dc903cf0e6ef6ac035e0ccf9 v4.17.1
57 57 8a824544d95037d76d99b104b5d2363858101d53 v4.17.2
58 58 ccd806a2d9482f61bd7e8956a02a28eb24a1d46a v4.17.3
59 59 e533ca02ccc205189b7bad9f227a312212772022 v4.17.4
60 ba6a6dc9ecd7fd8b1dcd6eb0c4ee0210e897c426 v4.18.0
61 17bc818b41bcf6883b9ff0da31f01d8c2a5d0781 v4.18.1
62 1e9f12aa01f82c335abc9017efe94ce1c30b52ba v4.18.2
63 f4cc6b3c5680bdf4541d7d442fbb7086640fb547 v4.18.3
@@ -1,1177 +1,1181 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import collections
19 19 import logging
20 20 import os
21 21 import posixpath as vcspath
22 22 import re
23 23 import stat
24 24 import traceback
25 25 import urllib
26 26 import urllib2
27 27 from functools import wraps
28 28
29 29 import more_itertools
30 30 import pygit2
31 31 from pygit2 import Repository as LibGit2Repo
32 32 from dulwich import index, objects
33 33 from dulwich.client import HttpGitClient, LocalGitClient
34 34 from dulwich.errors import (
35 35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 36 MissingCommitError, ObjectMissing, HangupException,
37 37 UnexpectedCommandError)
38 38 from dulwich.repo import Repo as DulwichRepo
39 39 from dulwich.server import update_server_info
40 40
41 41 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver.utils import safe_str, safe_int
42 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 43 from vcsserver.base import RepoFactory, obfuscate_qs
44 44 from vcsserver.hgcompat import (
45 45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 46 from vcsserver.git_lfs.lib import LFSOidStore
47 47 from vcsserver.vcs_base import RemoteBase
48 48
49 49 DIR_STAT = stat.S_IFDIR
50 50 FILE_MODE = stat.S_IFMT
51 51 GIT_LINK = objects.S_IFGITLINK
52 52 PEELED_REF_MARKER = '^{}'
53 53
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 def str_to_dulwich(value):
59 59 """
60 60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 61 """
62 62 return value.decode(settings.WIRE_ENCODING)
63 63
64 64
65 65 def reraise_safe_exceptions(func):
66 66 """Converts Dulwich exceptions to something neutral."""
67 67
68 68 @wraps(func)
69 69 def wrapper(*args, **kwargs):
70 70 try:
71 71 return func(*args, **kwargs)
72 72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 73 exc = exceptions.LookupException(org_exc=e)
74 74 raise exc(safe_str(e))
75 75 except (HangupException, UnexpectedCommandError) as e:
76 76 exc = exceptions.VcsException(org_exc=e)
77 77 raise exc(safe_str(e))
78 78 except Exception as e:
79 79 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 80 # (KeyError on empty repos), we cannot track this and catch all
81 81 # exceptions, it's an exceptions from other handlers
82 82 #if not hasattr(e, '_vcs_kind'):
83 83 #log.exception("Unhandled exception in git remote call")
84 84 #raise_from_original(exceptions.UnhandledException)
85 85 raise
86 86 return wrapper
87 87
88 88
89 89 class Repo(DulwichRepo):
90 90 """
91 91 A wrapper for dulwich Repo class.
92 92
93 93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 94 "Too many open files" error. We need to close all opened file descriptors
95 95 once the repo object is destroyed.
96 96 """
97 97 def __del__(self):
98 98 if hasattr(self, 'object_store'):
99 99 self.close()
100 100
101 101
102 102 class Repository(LibGit2Repo):
103 103
104 104 def __enter__(self):
105 105 return self
106 106
107 107 def __exit__(self, exc_type, exc_val, exc_tb):
108 108 self.free()
109 109
110 110
111 111 class GitFactory(RepoFactory):
112 112 repo_type = 'git'
113 113
114 114 def _create_repo(self, wire, create, use_libgit2=False):
115 115 if use_libgit2:
116 116 return Repository(wire['path'])
117 117 else:
118 118 repo_path = str_to_dulwich(wire['path'])
119 119 return Repo(repo_path)
120 120
121 121 def repo(self, wire, create=False, use_libgit2=False):
122 122 """
123 123 Get a repository instance for the given path.
124 124 """
125 125 return self._create_repo(wire, create, use_libgit2)
126 126
127 127 def repo_libgit2(self, wire):
128 128 return self.repo(wire, use_libgit2=True)
129 129
130 130
131 131 class GitRemote(RemoteBase):
132 132
133 133 def __init__(self, factory):
134 134 self._factory = factory
135 135 self._bulk_methods = {
136 136 "date": self.date,
137 137 "author": self.author,
138 138 "branch": self.branch,
139 139 "message": self.message,
140 140 "parents": self.parents,
141 141 "_commit": self.revision,
142 142 }
143 143
144 144 def _wire_to_config(self, wire):
145 145 if 'config' in wire:
146 146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 147 return {}
148 148
149 149 def _remote_conf(self, config):
150 150 params = [
151 151 '-c', 'core.askpass=""',
152 152 ]
153 153 ssl_cert_dir = config.get('vcs_ssl_dir')
154 154 if ssl_cert_dir:
155 155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 156 return params
157 157
158 158 @reraise_safe_exceptions
159 159 def discover_git_version(self):
160 160 stdout, _ = self.run_git_command(
161 161 {}, ['--version'], _bare=True, _safe=True)
162 162 prefix = 'git version'
163 163 if stdout.startswith(prefix):
164 164 stdout = stdout[len(prefix):]
165 165 return stdout.strip()
166 166
167 167 @reraise_safe_exceptions
168 168 def is_empty(self, wire):
169 169 repo_init = self._factory.repo_libgit2(wire)
170 170 with repo_init as repo:
171 171
172 172 try:
173 173 has_head = repo.head.name
174 174 if has_head:
175 175 return False
176 176
177 177 # NOTE(marcink): check again using more expensive method
178 178 return repo.is_empty
179 179 except Exception:
180 180 pass
181 181
182 182 return True
183 183
184 184 @reraise_safe_exceptions
185 185 def assert_correct_path(self, wire):
186 186 cache_on, context_uid, repo_id = self._cache_on(wire)
187 187 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 188 def _assert_correct_path(_context_uid, _repo_id):
189 189 try:
190 190 repo_init = self._factory.repo_libgit2(wire)
191 191 with repo_init as repo:
192 192 pass
193 193 except pygit2.GitError:
194 194 path = wire.get('path')
195 195 tb = traceback.format_exc()
196 196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 197 return False
198 198
199 199 return True
200 200 return _assert_correct_path(context_uid, repo_id)
201 201
202 202 @reraise_safe_exceptions
203 203 def bare(self, wire):
204 204 repo_init = self._factory.repo_libgit2(wire)
205 205 with repo_init as repo:
206 206 return repo.is_bare
207 207
208 208 @reraise_safe_exceptions
209 209 def blob_as_pretty_string(self, wire, sha):
210 210 repo_init = self._factory.repo_libgit2(wire)
211 211 with repo_init as repo:
212 212 blob_obj = repo[sha]
213 213 blob = blob_obj.data
214 214 return blob
215 215
216 216 @reraise_safe_exceptions
217 217 def blob_raw_length(self, wire, sha):
218 218 cache_on, context_uid, repo_id = self._cache_on(wire)
219 219 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 220 def _blob_raw_length(_repo_id, _sha):
221 221
222 222 repo_init = self._factory.repo_libgit2(wire)
223 223 with repo_init as repo:
224 224 blob = repo[sha]
225 225 return blob.size
226 226
227 227 return _blob_raw_length(repo_id, sha)
228 228
229 229 def _parse_lfs_pointer(self, raw_content):
230 230
231 231 spec_string = 'version https://git-lfs.github.com/spec'
232 232 if raw_content and raw_content.startswith(spec_string):
233 233 pattern = re.compile(r"""
234 234 (?:\n)?
235 235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 237 ^size[ ](?P<oid_size>[0-9]+)\n
238 238 (?:\n)?
239 239 """, re.VERBOSE | re.MULTILINE)
240 240 match = pattern.match(raw_content)
241 241 if match:
242 242 return match.groupdict()
243 243
244 244 return {}
245 245
246 246 @reraise_safe_exceptions
247 247 def is_large_file(self, wire, commit_id):
248 248 cache_on, context_uid, repo_id = self._cache_on(wire)
249 249
250 250 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 251 def _is_large_file(_repo_id, _sha):
252 252 repo_init = self._factory.repo_libgit2(wire)
253 253 with repo_init as repo:
254 254 blob = repo[commit_id]
255 255 if blob.is_binary:
256 256 return {}
257 257
258 258 return self._parse_lfs_pointer(blob.data)
259 259
260 260 return _is_large_file(repo_id, commit_id)
261 261
262 262 @reraise_safe_exceptions
263 263 def is_binary(self, wire, tree_id):
264 264 cache_on, context_uid, repo_id = self._cache_on(wire)
265 265
266 266 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 267 def _is_binary(_repo_id, _tree_id):
268 268 repo_init = self._factory.repo_libgit2(wire)
269 269 with repo_init as repo:
270 270 blob_obj = repo[tree_id]
271 271 return blob_obj.is_binary
272 272
273 273 return _is_binary(repo_id, tree_id)
274 274
275 275 @reraise_safe_exceptions
276 276 def in_largefiles_store(self, wire, oid):
277 277 conf = self._wire_to_config(wire)
278 278 repo_init = self._factory.repo_libgit2(wire)
279 279 with repo_init as repo:
280 280 repo_name = repo.path
281 281
282 282 store_location = conf.get('vcs_git_lfs_store_location')
283 283 if store_location:
284 284
285 285 store = LFSOidStore(
286 286 oid=oid, repo=repo_name, store_location=store_location)
287 287 return store.has_oid()
288 288
289 289 return False
290 290
291 291 @reraise_safe_exceptions
292 292 def store_path(self, wire, oid):
293 293 conf = self._wire_to_config(wire)
294 294 repo_init = self._factory.repo_libgit2(wire)
295 295 with repo_init as repo:
296 296 repo_name = repo.path
297 297
298 298 store_location = conf.get('vcs_git_lfs_store_location')
299 299 if store_location:
300 300 store = LFSOidStore(
301 301 oid=oid, repo=repo_name, store_location=store_location)
302 302 return store.oid_path
303 303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
304 304
305 305 @reraise_safe_exceptions
306 306 def bulk_request(self, wire, rev, pre_load):
307 307 cache_on, context_uid, repo_id = self._cache_on(wire)
308 308 @self.region.conditional_cache_on_arguments(condition=cache_on)
309 309 def _bulk_request(_repo_id, _rev, _pre_load):
310 310 result = {}
311 311 for attr in pre_load:
312 312 try:
313 313 method = self._bulk_methods[attr]
314 314 args = [wire, rev]
315 315 result[attr] = method(*args)
316 316 except KeyError as e:
317 317 raise exceptions.VcsException(e)(
318 318 "Unknown bulk attribute: %s" % attr)
319 319 return result
320 320
321 321 return _bulk_request(repo_id, rev, sorted(pre_load))
322 322
323 323 def _build_opener(self, url):
324 324 handlers = []
325 325 url_obj = url_parser(url)
326 326 _, authinfo = url_obj.authinfo()
327 327
328 328 if authinfo:
329 329 # create a password manager
330 330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
331 331 passmgr.add_password(*authinfo)
332 332
333 333 handlers.extend((httpbasicauthhandler(passmgr),
334 334 httpdigestauthhandler(passmgr)))
335 335
336 336 return urllib2.build_opener(*handlers)
337 337
338 338 def _type_id_to_name(self, type_id):
339 339 return {
340 340 1: b'commit',
341 341 2: b'tree',
342 342 3: b'blob',
343 343 4: b'tag'
344 344 }[type_id]
345 345
346 346 @reraise_safe_exceptions
347 347 def check_url(self, url, config):
348 348 url_obj = url_parser(url)
349 349 test_uri, _ = url_obj.authinfo()
350 350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
351 351 url_obj.query = obfuscate_qs(url_obj.query)
352 352 cleaned_uri = str(url_obj)
353 353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
354 354
355 355 if not test_uri.endswith('info/refs'):
356 356 test_uri = test_uri.rstrip('/') + '/info/refs'
357 357
358 358 o = self._build_opener(url)
359 359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
360 360
361 361 q = {"service": 'git-upload-pack'}
362 362 qs = '?%s' % urllib.urlencode(q)
363 363 cu = "%s%s" % (test_uri, qs)
364 364 req = urllib2.Request(cu, None, {})
365 365
366 366 try:
367 367 log.debug("Trying to open URL %s", cleaned_uri)
368 368 resp = o.open(req)
369 369 if resp.code != 200:
370 370 raise exceptions.URLError()('Return Code is not 200')
371 371 except Exception as e:
372 372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 373 # means it cannot be cloned
374 374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
375 375
376 376 # now detect if it's proper git repo
377 377 gitdata = resp.read()
378 378 if 'service=git-upload-pack' in gitdata:
379 379 pass
380 380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
381 381 # old style git can return some other format !
382 382 pass
383 383 else:
384 384 raise exceptions.URLError()(
385 385 "url [%s] does not look like an git" % (cleaned_uri,))
386 386
387 387 return True
388 388
389 389 @reraise_safe_exceptions
390 390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
391 391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
392 392 remote_refs = self.pull(wire, url, apply_refs=False)
393 393 repo = self._factory.repo(wire)
394 394 if isinstance(valid_refs, list):
395 395 valid_refs = tuple(valid_refs)
396 396
397 397 for k in remote_refs:
398 398 # only parse heads/tags and skip so called deferred tags
399 399 if k.startswith(valid_refs) and not k.endswith(deferred):
400 400 repo[k] = remote_refs[k]
401 401
402 402 if update_after_clone:
403 403 # we want to checkout HEAD
404 404 repo["HEAD"] = remote_refs["HEAD"]
405 405 index.build_index_from_tree(repo.path, repo.index_path(),
406 406 repo.object_store, repo["HEAD"].tree)
407 407
408 408 @reraise_safe_exceptions
409 409 def branch(self, wire, commit_id):
410 410 cache_on, context_uid, repo_id = self._cache_on(wire)
411 411 @self.region.conditional_cache_on_arguments(condition=cache_on)
412 412 def _branch(_context_uid, _repo_id, _commit_id):
413 413 regex = re.compile('^refs/heads')
414 414
415 415 def filter_with(ref):
416 416 return regex.match(ref[0]) and ref[1] == _commit_id
417 417
418 418 branches = filter(filter_with, self.get_refs(wire).items())
419 419 return [x[0].split('refs/heads/')[-1] for x in branches]
420 420
421 421 return _branch(context_uid, repo_id, commit_id)
422 422
423 423 @reraise_safe_exceptions
424 424 def commit_branches(self, wire, commit_id):
425 425 cache_on, context_uid, repo_id = self._cache_on(wire)
426 426 @self.region.conditional_cache_on_arguments(condition=cache_on)
427 427 def _commit_branches(_context_uid, _repo_id, _commit_id):
428 428 repo_init = self._factory.repo_libgit2(wire)
429 429 with repo_init as repo:
430 430 branches = [x for x in repo.branches.with_commit(_commit_id)]
431 431 return branches
432 432
433 433 return _commit_branches(context_uid, repo_id, commit_id)
434 434
435 435 @reraise_safe_exceptions
436 436 def add_object(self, wire, content):
437 437 repo_init = self._factory.repo_libgit2(wire)
438 438 with repo_init as repo:
439 439 blob = objects.Blob()
440 440 blob.set_raw_string(content)
441 441 repo.object_store.add_object(blob)
442 442 return blob.id
443 443
444 444 # TODO: this is quite complex, check if that can be simplified
445 445 @reraise_safe_exceptions
446 446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
447 447 repo = self._factory.repo(wire)
448 448 object_store = repo.object_store
449 449
450 450 # Create tree and populates it with blobs
451 451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
452 452
453 453 for node in updated:
454 454 # Compute subdirs if needed
455 455 dirpath, nodename = vcspath.split(node['path'])
456 456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
457 457 parent = commit_tree
458 458 ancestors = [('', parent)]
459 459
460 460 # Tries to dig for the deepest existing tree
461 461 while dirnames:
462 462 curdir = dirnames.pop(0)
463 463 try:
464 464 dir_id = parent[curdir][1]
465 465 except KeyError:
466 466 # put curdir back into dirnames and stops
467 467 dirnames.insert(0, curdir)
468 468 break
469 469 else:
470 470 # If found, updates parent
471 471 parent = repo[dir_id]
472 472 ancestors.append((curdir, parent))
473 473 # Now parent is deepest existing tree and we need to create
474 474 # subtrees for dirnames (in reverse order)
475 475 # [this only applies for nodes from added]
476 476 new_trees = []
477 477
478 478 blob = objects.Blob.from_string(node['content'])
479 479
480 480 if dirnames:
481 481 # If there are trees which should be created we need to build
482 482 # them now (in reverse order)
483 483 reversed_dirnames = list(reversed(dirnames))
484 484 curtree = objects.Tree()
485 485 curtree[node['node_path']] = node['mode'], blob.id
486 486 new_trees.append(curtree)
487 487 for dirname in reversed_dirnames[:-1]:
488 488 newtree = objects.Tree()
489 489 newtree[dirname] = (DIR_STAT, curtree.id)
490 490 new_trees.append(newtree)
491 491 curtree = newtree
492 492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
493 493 else:
494 494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
495 495
496 496 new_trees.append(parent)
497 497 # Update ancestors
498 498 reversed_ancestors = reversed(
499 499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
500 500 for parent, tree, path in reversed_ancestors:
501 501 parent[path] = (DIR_STAT, tree.id)
502 502 object_store.add_object(tree)
503 503
504 504 object_store.add_object(blob)
505 505 for tree in new_trees:
506 506 object_store.add_object(tree)
507 507
508 508 for node_path in removed:
509 509 paths = node_path.split('/')
510 510 tree = commit_tree
511 511 trees = [tree]
512 512 # Traverse deep into the forest...
513 513 for path in paths:
514 514 try:
515 515 obj = repo[tree[path][1]]
516 516 if isinstance(obj, objects.Tree):
517 517 trees.append(obj)
518 518 tree = obj
519 519 except KeyError:
520 520 break
521 521 # Cut down the blob and all rotten trees on the way back...
522 522 for path, tree in reversed(zip(paths, trees)):
523 523 del tree[path]
524 524 if tree:
525 525 # This tree still has elements - don't remove it or any
526 526 # of it's parents
527 527 break
528 528
529 529 object_store.add_object(commit_tree)
530 530
531 531 # Create commit
532 532 commit = objects.Commit()
533 533 commit.tree = commit_tree.id
534 534 for k, v in commit_data.iteritems():
535 535 setattr(commit, k, v)
536 536 object_store.add_object(commit)
537 537
538 538 self.create_branch(wire, branch, commit.id)
539 539
540 540 # dulwich set-ref
541 541 ref = 'refs/heads/%s' % branch
542 542 repo.refs[ref] = commit.id
543 543
544 544 return commit.id
545 545
546 546 @reraise_safe_exceptions
547 547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
548 548 if url != 'default' and '://' not in url:
549 549 client = LocalGitClient(url)
550 550 else:
551 551 url_obj = url_parser(url)
552 552 o = self._build_opener(url)
553 553 url, _ = url_obj.authinfo()
554 554 client = HttpGitClient(base_url=url, opener=o)
555 555 repo = self._factory.repo(wire)
556 556
557 557 determine_wants = repo.object_store.determine_wants_all
558 558 if refs:
559 559 def determine_wants_requested(references):
560 560 return [references[r] for r in references if r in refs]
561 561 determine_wants = determine_wants_requested
562 562
563 563 try:
564 564 remote_refs = client.fetch(
565 565 path=url, target=repo, determine_wants=determine_wants)
566 566 except NotGitRepository as e:
567 567 log.warning(
568 568 'Trying to fetch from "%s" failed, not a Git repository.', url)
569 569 # Exception can contain unicode which we convert
570 570 raise exceptions.AbortException(e)(repr(e))
571 571
572 572 # mikhail: client.fetch() returns all the remote refs, but fetches only
573 573 # refs filtered by `determine_wants` function. We need to filter result
574 574 # as well
575 575 if refs:
576 576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
577 577
578 578 if apply_refs:
579 579 # TODO: johbo: Needs proper test coverage with a git repository
580 580 # that contains a tag object, so that we would end up with
581 581 # a peeled ref at this point.
582 582 for k in remote_refs:
583 583 if k.endswith(PEELED_REF_MARKER):
584 584 log.debug("Skipping peeled reference %s", k)
585 585 continue
586 586 repo[k] = remote_refs[k]
587 587
588 588 if refs and not update_after:
589 589 # mikhail: explicitly set the head to the last ref.
590 590 repo['HEAD'] = remote_refs[refs[-1]]
591 591
592 592 if update_after:
593 593 # we want to checkout HEAD
594 594 repo["HEAD"] = remote_refs["HEAD"]
595 595 index.build_index_from_tree(repo.path, repo.index_path(),
596 596 repo.object_store, repo["HEAD"].tree)
597 597 return remote_refs
598 598
599 599 @reraise_safe_exceptions
600 600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
601 601 repo = self._factory.repo(wire)
602 602 if refs and not isinstance(refs, (list, tuple)):
603 603 refs = [refs]
604 604
605 605 config = self._wire_to_config(wire)
606 606 # get all remote refs we'll use to fetch later
607 607 cmd = ['ls-remote']
608 608 if not all_refs:
609 609 cmd += ['--heads', '--tags']
610 610 cmd += [url]
611 611 output, __ = self.run_git_command(
612 612 wire, cmd, fail_on_stderr=False,
613 613 _copts=self._remote_conf(config),
614 614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
615 615
616 616 remote_refs = collections.OrderedDict()
617 617 fetch_refs = []
618 618
619 619 for ref_line in output.splitlines():
620 620 sha, ref = ref_line.split('\t')
621 621 sha = sha.strip()
622 622 if ref in remote_refs:
623 623 # duplicate, skip
624 624 continue
625 625 if ref.endswith(PEELED_REF_MARKER):
626 626 log.debug("Skipping peeled reference %s", ref)
627 627 continue
628 628 # don't sync HEAD
629 629 if ref in ['HEAD']:
630 630 continue
631 631
632 632 remote_refs[ref] = sha
633 633
634 634 if refs and sha in refs:
635 635 # we filter fetch using our specified refs
636 636 fetch_refs.append('{}:{}'.format(ref, ref))
637 637 elif not refs:
638 638 fetch_refs.append('{}:{}'.format(ref, ref))
639 639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
640 640
641 641 if fetch_refs:
642 642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
643 643 fetch_refs_chunks = list(chunk)
644 644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
645 645 _out, _err = self.run_git_command(
646 646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
647 647 fail_on_stderr=False,
648 648 _copts=self._remote_conf(config),
649 649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
650 650
651 651 return remote_refs
652 652
653 653 @reraise_safe_exceptions
654 654 def sync_push(self, wire, url, refs=None):
655 655 if not self.check_url(url, wire):
656 656 return
657 657 config = self._wire_to_config(wire)
658 658 self._factory.repo(wire)
659 659 self.run_git_command(
660 660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
661 661 _copts=self._remote_conf(config),
662 662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663 663
664 664 @reraise_safe_exceptions
665 665 def get_remote_refs(self, wire, url):
666 666 repo = Repo(url)
667 667 return repo.get_refs()
668 668
669 669 @reraise_safe_exceptions
670 670 def get_description(self, wire):
671 671 repo = self._factory.repo(wire)
672 672 return repo.get_description()
673 673
674 674 @reraise_safe_exceptions
675 675 def get_missing_revs(self, wire, rev1, rev2, path2):
676 676 repo = self._factory.repo(wire)
677 677 LocalGitClient(thin_packs=False).fetch(path2, repo)
678 678
679 679 wire_remote = wire.copy()
680 680 wire_remote['path'] = path2
681 681 repo_remote = self._factory.repo(wire_remote)
682 682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
683 683
684 684 revs = [
685 685 x.commit.id
686 686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
687 687 return revs
688 688
689 689 @reraise_safe_exceptions
690 690 def get_object(self, wire, sha):
691 691 cache_on, context_uid, repo_id = self._cache_on(wire)
692 692 @self.region.conditional_cache_on_arguments(condition=cache_on)
693 693 def _get_object(_context_uid, _repo_id, _sha):
694 694 repo_init = self._factory.repo_libgit2(wire)
695 695 with repo_init as repo:
696 696
697 697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
698 698 try:
699 699 commit = repo.revparse_single(sha)
700 700 except (KeyError, ValueError) as e:
701 701 raise exceptions.LookupException(e)(missing_commit_err)
702 702
703 703 is_tag = False
704 704 if isinstance(commit, pygit2.Tag):
705 705 commit = repo.get(commit.target)
706 706 is_tag = True
707 707
708 708 check_dangling = True
709 709 if is_tag:
710 710 check_dangling = False
711 711
712 712 # we used a reference and it parsed means we're not having a dangling commit
713 713 if sha != commit.hex:
714 714 check_dangling = False
715 715
716 716 if check_dangling:
717 717 # check for dangling commit
718 718 for branch in repo.branches.with_commit(commit.hex):
719 719 if branch:
720 720 break
721 721 else:
722 722 raise exceptions.LookupException(None)(missing_commit_err)
723 723
724 724 commit_id = commit.hex
725 725 type_id = commit.type
726 726
727 727 return {
728 728 'id': commit_id,
729 729 'type': self._type_id_to_name(type_id),
730 730 'commit_id': commit_id,
731 731 'idx': 0
732 732 }
733 733
734 734 return _get_object(context_uid, repo_id, sha)
735 735
736 736 @reraise_safe_exceptions
737 737 def get_refs(self, wire):
738 738 cache_on, context_uid, repo_id = self._cache_on(wire)
739 739 @self.region.conditional_cache_on_arguments(condition=cache_on)
740 740 def _get_refs(_context_uid, _repo_id):
741 741
742 742 repo_init = self._factory.repo_libgit2(wire)
743 743 with repo_init as repo:
744 744 regex = re.compile('^refs/(heads|tags)/')
745 745 return {x.name: x.target.hex for x in
746 746 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
747 747
748 748 return _get_refs(context_uid, repo_id)
749 749
750 750 @reraise_safe_exceptions
751 751 def get_branch_pointers(self, wire):
752 752 cache_on, context_uid, repo_id = self._cache_on(wire)
753 753 @self.region.conditional_cache_on_arguments(condition=cache_on)
754 754 def _get_branch_pointers(_context_uid, _repo_id):
755 755
756 756 repo_init = self._factory.repo_libgit2(wire)
757 757 regex = re.compile('^refs/heads')
758 758 with repo_init as repo:
759 759 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
760 760 return {x.target.hex: x.shorthand for x in branches}
761 761
762 762 return _get_branch_pointers(context_uid, repo_id)
763 763
764 764 @reraise_safe_exceptions
765 765 def head(self, wire, show_exc=True):
766 766 cache_on, context_uid, repo_id = self._cache_on(wire)
767 767 @self.region.conditional_cache_on_arguments(condition=cache_on)
768 768 def _head(_context_uid, _repo_id, _show_exc):
769 769 repo_init = self._factory.repo_libgit2(wire)
770 770 with repo_init as repo:
771 771 try:
772 772 return repo.head.peel().hex
773 773 except Exception:
774 774 if show_exc:
775 775 raise
776 776 return _head(context_uid, repo_id, show_exc)
777 777
778 778 @reraise_safe_exceptions
779 779 def init(self, wire):
780 780 repo_path = str_to_dulwich(wire['path'])
781 781 self.repo = Repo.init(repo_path)
782 782
783 783 @reraise_safe_exceptions
784 784 def init_bare(self, wire):
785 785 repo_path = str_to_dulwich(wire['path'])
786 786 self.repo = Repo.init_bare(repo_path)
787 787
788 788 @reraise_safe_exceptions
789 789 def revision(self, wire, rev):
790 790
791 791 cache_on, context_uid, repo_id = self._cache_on(wire)
792 792 @self.region.conditional_cache_on_arguments(condition=cache_on)
793 793 def _revision(_context_uid, _repo_id, _rev):
794 794 repo_init = self._factory.repo_libgit2(wire)
795 795 with repo_init as repo:
796 796 commit = repo[rev]
797 797 obj_data = {
798 798 'id': commit.id.hex,
799 799 }
800 800 # tree objects itself don't have tree_id attribute
801 801 if hasattr(commit, 'tree_id'):
802 802 obj_data['tree'] = commit.tree_id.hex
803 803
804 804 return obj_data
805 805 return _revision(context_uid, repo_id, rev)
806 806
807 807 @reraise_safe_exceptions
808 808 def date(self, wire, commit_id):
809 809 cache_on, context_uid, repo_id = self._cache_on(wire)
810 810 @self.region.conditional_cache_on_arguments(condition=cache_on)
811 811 def _date(_repo_id, _commit_id):
812 812 repo_init = self._factory.repo_libgit2(wire)
813 813 with repo_init as repo:
814 814 commit = repo[commit_id]
815 815
816 816 if hasattr(commit, 'commit_time'):
817 817 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
818 818 else:
819 819 commit = commit.get_object()
820 820 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
821 821
822 822 # TODO(marcink): check dulwich difference of offset vs timezone
823 823 return [commit_time, commit_time_offset]
824 824 return _date(repo_id, commit_id)
825 825
826 826 @reraise_safe_exceptions
827 827 def author(self, wire, commit_id):
828 828 cache_on, context_uid, repo_id = self._cache_on(wire)
829 829 @self.region.conditional_cache_on_arguments(condition=cache_on)
830 830 def _author(_repo_id, _commit_id):
831 831 repo_init = self._factory.repo_libgit2(wire)
832 832 with repo_init as repo:
833 833 commit = repo[commit_id]
834 834
835 835 if hasattr(commit, 'author'):
836 836 author = commit.author
837 837 else:
838 838 author = commit.get_object().author
839 839
840 840 if author.email:
841 841 return u"{} <{}>".format(author.name, author.email)
842 842
843 return u"{}".format(author.raw_name)
843 try:
844 return u"{}".format(author.name)
845 except Exception:
846 return u"{}".format(safe_unicode(author.raw_name))
847
844 848 return _author(repo_id, commit_id)
845 849
846 850 @reraise_safe_exceptions
847 851 def message(self, wire, commit_id):
848 852 cache_on, context_uid, repo_id = self._cache_on(wire)
849 853 @self.region.conditional_cache_on_arguments(condition=cache_on)
850 854 def _message(_repo_id, _commit_id):
851 855 repo_init = self._factory.repo_libgit2(wire)
852 856 with repo_init as repo:
853 857 commit = repo[commit_id]
854 858 return commit.message
855 859 return _message(repo_id, commit_id)
856 860
857 861 @reraise_safe_exceptions
858 862 def parents(self, wire, commit_id):
859 863 cache_on, context_uid, repo_id = self._cache_on(wire)
860 864 @self.region.conditional_cache_on_arguments(condition=cache_on)
861 865 def _parents(_repo_id, _commit_id):
862 866 repo_init = self._factory.repo_libgit2(wire)
863 867 with repo_init as repo:
864 868 commit = repo[commit_id]
865 869 if hasattr(commit, 'parent_ids'):
866 870 parent_ids = commit.parent_ids
867 871 else:
868 872 parent_ids = commit.get_object().parent_ids
869 873
870 874 return [x.hex for x in parent_ids]
871 875 return _parents(repo_id, commit_id)
872 876
873 877 @reraise_safe_exceptions
874 878 def children(self, wire, commit_id):
875 879 cache_on, context_uid, repo_id = self._cache_on(wire)
876 880 @self.region.conditional_cache_on_arguments(condition=cache_on)
877 881 def _children(_repo_id, _commit_id):
878 882 output, __ = self.run_git_command(
879 883 wire, ['rev-list', '--all', '--children'])
880 884
881 885 child_ids = []
882 886 pat = re.compile(r'^%s' % commit_id)
883 887 for l in output.splitlines():
884 888 if pat.match(l):
885 889 found_ids = l.split(' ')[1:]
886 890 child_ids.extend(found_ids)
887 891
888 892 return child_ids
889 893 return _children(repo_id, commit_id)
890 894
891 895 @reraise_safe_exceptions
892 896 def set_refs(self, wire, key, value):
893 897 repo_init = self._factory.repo_libgit2(wire)
894 898 with repo_init as repo:
895 899 repo.references.create(key, value, force=True)
896 900
897 901 @reraise_safe_exceptions
898 902 def create_branch(self, wire, branch_name, commit_id, force=False):
899 903 repo_init = self._factory.repo_libgit2(wire)
900 904 with repo_init as repo:
901 905 commit = repo[commit_id]
902 906
903 907 if force:
904 908 repo.branches.local.create(branch_name, commit, force=force)
905 909 elif not repo.branches.get(branch_name):
906 910 # create only if that branch isn't existing
907 911 repo.branches.local.create(branch_name, commit, force=force)
908 912
909 913 @reraise_safe_exceptions
910 914 def remove_ref(self, wire, key):
911 915 repo_init = self._factory.repo_libgit2(wire)
912 916 with repo_init as repo:
913 917 repo.references.delete(key)
914 918
915 919 @reraise_safe_exceptions
916 920 def tag_remove(self, wire, tag_name):
917 921 repo_init = self._factory.repo_libgit2(wire)
918 922 with repo_init as repo:
919 923 key = 'refs/tags/{}'.format(tag_name)
920 924 repo.references.delete(key)
921 925
922 926 @reraise_safe_exceptions
923 927 def tree_changes(self, wire, source_id, target_id):
924 928 # TODO(marcink): remove this seems it's only used by tests
925 929 repo = self._factory.repo(wire)
926 930 source = repo[source_id].tree if source_id else None
927 931 target = repo[target_id].tree
928 932 result = repo.object_store.tree_changes(source, target)
929 933 return list(result)
930 934
931 935 @reraise_safe_exceptions
932 936 def tree_and_type_for_path(self, wire, commit_id, path):
933 937
934 938 cache_on, context_uid, repo_id = self._cache_on(wire)
935 939 @self.region.conditional_cache_on_arguments(condition=cache_on)
936 940 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
937 941 repo_init = self._factory.repo_libgit2(wire)
938 942
939 943 with repo_init as repo:
940 944 commit = repo[commit_id]
941 945 try:
942 946 tree = commit.tree[path]
943 947 except KeyError:
944 948 return None, None, None
945 949
946 950 return tree.id.hex, tree.type, tree.filemode
947 951 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
948 952
949 953 @reraise_safe_exceptions
950 954 def tree_items(self, wire, tree_id):
951 955 cache_on, context_uid, repo_id = self._cache_on(wire)
952 956 @self.region.conditional_cache_on_arguments(condition=cache_on)
953 957 def _tree_items(_repo_id, _tree_id):
954 958
955 959 repo_init = self._factory.repo_libgit2(wire)
956 960 with repo_init as repo:
957 961 try:
958 962 tree = repo[tree_id]
959 963 except KeyError:
960 964 raise ObjectMissing('No tree with id: {}'.format(tree_id))
961 965
962 966 result = []
963 967 for item in tree:
964 968 item_sha = item.hex
965 969 item_mode = item.filemode
966 970 item_type = item.type
967 971
968 972 if item_type == 'commit':
969 973 # NOTE(marcink): submodules we translate to 'link' for backward compat
970 974 item_type = 'link'
971 975
972 976 result.append((item.name, item_mode, item_sha, item_type))
973 977 return result
974 978 return _tree_items(repo_id, tree_id)
975 979
976 980 @reraise_safe_exceptions
977 981 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
978 982 """
979 983 Old version that uses subprocess to call diff
980 984 """
981 985
982 986 flags = [
983 987 '-U%s' % context, '--patch',
984 988 '--binary',
985 989 '--find-renames',
986 990 '--no-indent-heuristic',
987 991 # '--indent-heuristic',
988 992 #'--full-index',
989 993 #'--abbrev=40'
990 994 ]
991 995
992 996 if opt_ignorews:
993 997 flags.append('--ignore-all-space')
994 998
995 999 if commit_id_1 == self.EMPTY_COMMIT:
996 1000 cmd = ['show'] + flags + [commit_id_2]
997 1001 else:
998 1002 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
999 1003
1000 1004 if file_filter:
1001 1005 cmd.extend(['--', file_filter])
1002 1006
1003 1007 diff, __ = self.run_git_command(wire, cmd)
1004 1008 # If we used 'show' command, strip first few lines (until actual diff
1005 1009 # starts)
1006 1010 if commit_id_1 == self.EMPTY_COMMIT:
1007 1011 lines = diff.splitlines()
1008 1012 x = 0
1009 1013 for line in lines:
1010 1014 if line.startswith('diff'):
1011 1015 break
1012 1016 x += 1
1013 1017 # Append new line just like 'diff' command do
1014 1018 diff = '\n'.join(lines[x:]) + '\n'
1015 1019 return diff
1016 1020
1017 1021 @reraise_safe_exceptions
1018 1022 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1019 1023 repo_init = self._factory.repo_libgit2(wire)
1020 1024 with repo_init as repo:
1021 1025 swap = True
1022 1026 flags = 0
1023 1027 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1024 1028
1025 1029 if opt_ignorews:
1026 1030 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1027 1031
1028 1032 if commit_id_1 == self.EMPTY_COMMIT:
1029 1033 comm1 = repo[commit_id_2]
1030 1034 diff_obj = comm1.tree.diff_to_tree(
1031 1035 flags=flags, context_lines=context, swap=swap)
1032 1036
1033 1037 else:
1034 1038 comm1 = repo[commit_id_2]
1035 1039 comm2 = repo[commit_id_1]
1036 1040 diff_obj = comm1.tree.diff_to_tree(
1037 1041 comm2.tree, flags=flags, context_lines=context, swap=swap)
1038 1042 similar_flags = 0
1039 1043 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1040 1044 diff_obj.find_similar(flags=similar_flags)
1041 1045
1042 1046 if file_filter:
1043 1047 for p in diff_obj:
1044 1048 if p.delta.old_file.path == file_filter:
1045 1049 return p.patch or ''
1046 1050 # fo matching path == no diff
1047 1051 return ''
1048 1052 return diff_obj.patch or ''
1049 1053
1050 1054 @reraise_safe_exceptions
1051 1055 def node_history(self, wire, commit_id, path, limit):
1052 1056 cache_on, context_uid, repo_id = self._cache_on(wire)
1053 1057 @self.region.conditional_cache_on_arguments(condition=cache_on)
1054 1058 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1055 1059 # optimize for n==1, rev-list is much faster for that use-case
1056 1060 if limit == 1:
1057 1061 cmd = ['rev-list', '-1', commit_id, '--', path]
1058 1062 else:
1059 1063 cmd = ['log']
1060 1064 if limit:
1061 1065 cmd.extend(['-n', str(safe_int(limit, 0))])
1062 1066 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1063 1067
1064 1068 output, __ = self.run_git_command(wire, cmd)
1065 1069 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1066 1070
1067 1071 return [x for x in commit_ids]
1068 1072 return _node_history(context_uid, repo_id, commit_id, path, limit)
1069 1073
1070 1074 @reraise_safe_exceptions
1071 1075 def node_annotate(self, wire, commit_id, path):
1072 1076
1073 1077 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1074 1078 # -l ==> outputs long shas (and we need all 40 characters)
1075 1079 # --root ==> doesn't put '^' character for boundaries
1076 1080 # -r commit_id ==> blames for the given commit
1077 1081 output, __ = self.run_git_command(wire, cmd)
1078 1082
1079 1083 result = []
1080 1084 for i, blame_line in enumerate(output.split('\n')[:-1]):
1081 1085 line_no = i + 1
1082 1086 commit_id, line = re.split(r' ', blame_line, 1)
1083 1087 result.append((line_no, commit_id, line))
1084 1088 return result
1085 1089
1086 1090 @reraise_safe_exceptions
1087 1091 def update_server_info(self, wire):
1088 1092 repo = self._factory.repo(wire)
1089 1093 update_server_info(repo)
1090 1094
1091 1095 @reraise_safe_exceptions
1092 1096 def get_all_commit_ids(self, wire):
1093 1097
1094 1098 cache_on, context_uid, repo_id = self._cache_on(wire)
1095 1099 @self.region.conditional_cache_on_arguments(condition=cache_on)
1096 1100 def _get_all_commit_ids(_context_uid, _repo_id):
1097 1101
1098 1102 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1099 1103 try:
1100 1104 output, __ = self.run_git_command(wire, cmd)
1101 1105 return output.splitlines()
1102 1106 except Exception:
1103 1107 # Can be raised for empty repositories
1104 1108 return []
1105 1109 return _get_all_commit_ids(context_uid, repo_id)
1106 1110
1107 1111 @reraise_safe_exceptions
1108 1112 def run_git_command(self, wire, cmd, **opts):
1109 1113 path = wire.get('path', None)
1110 1114
1111 1115 if path and os.path.isdir(path):
1112 1116 opts['cwd'] = path
1113 1117
1114 1118 if '_bare' in opts:
1115 1119 _copts = []
1116 1120 del opts['_bare']
1117 1121 else:
1118 1122 _copts = ['-c', 'core.quotepath=false', ]
1119 1123 safe_call = False
1120 1124 if '_safe' in opts:
1121 1125 # no exc on failure
1122 1126 del opts['_safe']
1123 1127 safe_call = True
1124 1128
1125 1129 if '_copts' in opts:
1126 1130 _copts.extend(opts['_copts'] or [])
1127 1131 del opts['_copts']
1128 1132
1129 1133 gitenv = os.environ.copy()
1130 1134 gitenv.update(opts.pop('extra_env', {}))
1131 1135 # need to clean fix GIT_DIR !
1132 1136 if 'GIT_DIR' in gitenv:
1133 1137 del gitenv['GIT_DIR']
1134 1138 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1135 1139 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1136 1140
1137 1141 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1138 1142 _opts = {'env': gitenv, 'shell': False}
1139 1143
1140 1144 proc = None
1141 1145 try:
1142 1146 _opts.update(opts)
1143 1147 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1144 1148
1145 1149 return ''.join(proc), ''.join(proc.error)
1146 1150 except (EnvironmentError, OSError) as err:
1147 1151 cmd = ' '.join(cmd) # human friendly CMD
1148 1152 tb_err = ("Couldn't run git command (%s).\n"
1149 1153 "Original error was:%s\n"
1150 1154 "Call options:%s\n"
1151 1155 % (cmd, err, _opts))
1152 1156 log.exception(tb_err)
1153 1157 if safe_call:
1154 1158 return '', err
1155 1159 else:
1156 1160 raise exceptions.VcsException()(tb_err)
1157 1161 finally:
1158 1162 if proc:
1159 1163 proc.close()
1160 1164
1161 1165 @reraise_safe_exceptions
1162 1166 def install_hooks(self, wire, force=False):
1163 1167 from vcsserver.hook_utils import install_git_hooks
1164 1168 bare = self.bare(wire)
1165 1169 path = wire['path']
1166 1170 return install_git_hooks(path, bare, force_create=force)
1167 1171
1168 1172 @reraise_safe_exceptions
1169 1173 def get_hooks_info(self, wire):
1170 1174 from vcsserver.hook_utils import (
1171 1175 get_git_pre_hook_version, get_git_post_hook_version)
1172 1176 bare = self.bare(wire)
1173 1177 path = wire['path']
1174 1178 return {
1175 1179 'pre_version': get_git_pre_hook_version(path, bare),
1176 1180 'post_version': get_git_post_hook_version(path, bare),
1177 1181 }
@@ -1,990 +1,1009 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23 import traceback
24 24
25 25 from hgext import largefiles, rebase, purge
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29 from mercurial import verify
30 from mercurial import repair
30 31
31 32 import vcsserver
32 33 from vcsserver import exceptions
33 34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
34 35 from vcsserver.hgcompat import (
35 36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
36 37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
37 38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
38 39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
39 40 RepoLookupError, InterventionRequired, RequirementError)
40 41 from vcsserver.vcs_base import RemoteBase
41 42
42 43 log = logging.getLogger(__name__)
43 44
44 45
45 46 def make_ui_from_config(repo_config):
46 47
47 48 class LoggingUI(ui.ui):
48 49 def status(self, *msg, **opts):
49 50 log.info(' '.join(msg).rstrip('\n'))
50 51 super(LoggingUI, self).status(*msg, **opts)
51 52
52 53 def warn(self, *msg, **opts):
53 54 log.warn(' '.join(msg).rstrip('\n'))
54 55 super(LoggingUI, self).warn(*msg, **opts)
55 56
56 57 def error(self, *msg, **opts):
57 58 log.error(' '.join(msg).rstrip('\n'))
58 59 super(LoggingUI, self).error(*msg, **opts)
59 60
60 61 def note(self, *msg, **opts):
61 62 log.info(' '.join(msg).rstrip('\n'))
62 63 super(LoggingUI, self).note(*msg, **opts)
63 64
64 65 def debug(self, *msg, **opts):
65 66 log.debug(' '.join(msg).rstrip('\n'))
66 67 super(LoggingUI, self).debug(*msg, **opts)
67 68
68 69 baseui = LoggingUI()
69 70
70 71 # clean the baseui object
71 72 baseui._ocfg = hgconfig.config()
72 73 baseui._ucfg = hgconfig.config()
73 74 baseui._tcfg = hgconfig.config()
74 75
75 76 for section, option, value in repo_config:
76 77 baseui.setconfig(section, option, value)
77 78
78 79 # make our hgweb quiet so it doesn't print output
79 80 baseui.setconfig('ui', 'quiet', 'true')
80 81
81 82 baseui.setconfig('ui', 'paginate', 'never')
82 83 # for better Error reporting of Mercurial
83 84 baseui.setconfig('ui', 'message-output', 'stderr')
84 85
85 86 # force mercurial to only use 1 thread, otherwise it may try to set a
86 87 # signal in a non-main thread, thus generating a ValueError.
87 88 baseui.setconfig('worker', 'numcpus', 1)
88 89
89 90 # If there is no config for the largefiles extension, we explicitly disable
90 91 # it here. This overrides settings from repositories hgrc file. Recent
91 92 # mercurial versions enable largefiles in hgrc on clone from largefile
92 93 # repo.
93 94 if not baseui.hasconfig('extensions', 'largefiles'):
94 95 log.debug('Explicitly disable largefiles extension for repo.')
95 96 baseui.setconfig('extensions', 'largefiles', '!')
96 97
97 98 return baseui
98 99
99 100
100 101 def reraise_safe_exceptions(func):
101 102 """Decorator for converting mercurial exceptions to something neutral."""
102 103
103 104 def wrapper(*args, **kwargs):
104 105 try:
105 106 return func(*args, **kwargs)
106 107 except (Abort, InterventionRequired) as e:
107 108 raise_from_original(exceptions.AbortException(e))
108 109 except RepoLookupError as e:
109 110 raise_from_original(exceptions.LookupException(e))
110 111 except RequirementError as e:
111 112 raise_from_original(exceptions.RequirementException(e))
112 113 except RepoError as e:
113 114 raise_from_original(exceptions.VcsException(e))
114 115 except LookupError as e:
115 116 raise_from_original(exceptions.LookupException(e))
116 117 except Exception as e:
117 118 if not hasattr(e, '_vcs_kind'):
118 119 log.exception("Unhandled exception in hg remote call")
119 120 raise_from_original(exceptions.UnhandledException(e))
120 121
121 122 raise
122 123 return wrapper
123 124
124 125
125 126 class MercurialFactory(RepoFactory):
126 127 repo_type = 'hg'
127 128
128 129 def _create_config(self, config, hooks=True):
129 130 if not hooks:
130 131 hooks_to_clean = frozenset((
131 132 'changegroup.repo_size', 'preoutgoing.pre_pull',
132 133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
133 134 new_config = []
134 135 for section, option, value in config:
135 136 if section == 'hooks' and option in hooks_to_clean:
136 137 continue
137 138 new_config.append((section, option, value))
138 139 config = new_config
139 140
140 141 baseui = make_ui_from_config(config)
141 142 return baseui
142 143
143 144 def _create_repo(self, wire, create):
144 145 baseui = self._create_config(wire["config"])
145 146 return instance(baseui, wire["path"], create)
146 147
147 148 def repo(self, wire, create=False):
148 149 """
149 150 Get a repository instance for the given path.
150 151 """
151 152 return self._create_repo(wire, create)
152 153
153 154
155 def patch_ui_message_output(baseui):
156 baseui.setconfig('ui', 'quiet', 'false')
157 output = io.BytesIO()
158
159 def write(data, **unused_kwargs):
160 output.write(data)
161
162 baseui.status = write
163 baseui.write = write
164 baseui.warn = write
165 baseui.debug = write
166
167 return baseui, output
168
169
154 170 class HgRemote(RemoteBase):
155 171
156 172 def __init__(self, factory):
157 173 self._factory = factory
158 174 self._bulk_methods = {
159 175 "affected_files": self.ctx_files,
160 176 "author": self.ctx_user,
161 177 "branch": self.ctx_branch,
162 178 "children": self.ctx_children,
163 179 "date": self.ctx_date,
164 180 "message": self.ctx_description,
165 181 "parents": self.ctx_parents,
166 182 "status": self.ctx_status,
167 183 "obsolete": self.ctx_obsolete,
168 184 "phase": self.ctx_phase,
169 185 "hidden": self.ctx_hidden,
170 186 "_file_paths": self.ctx_list,
171 187 }
172 188
173 189 def _get_ctx(self, repo, ref):
174 190 return get_ctx(repo, ref)
175 191
176 192 @reraise_safe_exceptions
177 193 def discover_hg_version(self):
178 194 from mercurial import util
179 195 return util.version()
180 196
181 197 @reraise_safe_exceptions
182 198 def is_empty(self, wire):
183 199 repo = self._factory.repo(wire)
184 200
185 201 try:
186 202 return len(repo) == 0
187 203 except Exception:
188 204 log.exception("failed to read object_store")
189 205 return False
190 206
191 207 @reraise_safe_exceptions
192 208 def archive_repo(self, archive_path, mtime, file_info, kind):
193 209 if kind == "tgz":
194 210 archiver = archival.tarit(archive_path, mtime, "gz")
195 211 elif kind == "tbz2":
196 212 archiver = archival.tarit(archive_path, mtime, "bz2")
197 213 elif kind == 'zip':
198 214 archiver = archival.zipit(archive_path, mtime)
199 215 else:
200 216 raise exceptions.ArchiveException()(
201 217 'Remote does not support: "%s".' % kind)
202 218
203 219 for f_path, f_mode, f_is_link, f_content in file_info:
204 220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
205 221 archiver.done()
206 222
207 223 @reraise_safe_exceptions
208 224 def bookmarks(self, wire):
209 225 cache_on, context_uid, repo_id = self._cache_on(wire)
210 226 @self.region.conditional_cache_on_arguments(condition=cache_on)
211 227 def _bookmarks(_context_uid, _repo_id):
212 228 repo = self._factory.repo(wire)
213 229 return dict(repo._bookmarks)
214 230
215 231 return _bookmarks(context_uid, repo_id)
216 232
217 233 @reraise_safe_exceptions
218 234 def branches(self, wire, normal, closed):
219 235 cache_on, context_uid, repo_id = self._cache_on(wire)
220 236 @self.region.conditional_cache_on_arguments(condition=cache_on)
221 237 def _branches(_context_uid, _repo_id, _normal, _closed):
222 238 repo = self._factory.repo(wire)
223 239 iter_branches = repo.branchmap().iterbranches()
224 240 bt = {}
225 241 for branch_name, _heads, tip, is_closed in iter_branches:
226 242 if normal and not is_closed:
227 243 bt[branch_name] = tip
228 244 if closed and is_closed:
229 245 bt[branch_name] = tip
230 246
231 247 return bt
232 248
233 249 return _branches(context_uid, repo_id, normal, closed)
234 250
235 251 @reraise_safe_exceptions
236 252 def bulk_request(self, wire, commit_id, pre_load):
237 253 cache_on, context_uid, repo_id = self._cache_on(wire)
238 254 @self.region.conditional_cache_on_arguments(condition=cache_on)
239 255 def _bulk_request(_repo_id, _commit_id, _pre_load):
240 256 result = {}
241 257 for attr in pre_load:
242 258 try:
243 259 method = self._bulk_methods[attr]
244 260 result[attr] = method(wire, commit_id)
245 261 except KeyError as e:
246 262 raise exceptions.VcsException(e)(
247 263 'Unknown bulk attribute: "%s"' % attr)
248 264 return result
249 265
250 266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
251 267
252 268 @reraise_safe_exceptions
253 269 def ctx_branch(self, wire, commit_id):
254 270 cache_on, context_uid, repo_id = self._cache_on(wire)
255 271 @self.region.conditional_cache_on_arguments(condition=cache_on)
256 272 def _ctx_branch(_repo_id, _commit_id):
257 273 repo = self._factory.repo(wire)
258 274 ctx = self._get_ctx(repo, commit_id)
259 275 return ctx.branch()
260 276 return _ctx_branch(repo_id, commit_id)
261 277
262 278 @reraise_safe_exceptions
263 279 def ctx_date(self, wire, commit_id):
264 280 cache_on, context_uid, repo_id = self._cache_on(wire)
265 281 @self.region.conditional_cache_on_arguments(condition=cache_on)
266 282 def _ctx_date(_repo_id, _commit_id):
267 283 repo = self._factory.repo(wire)
268 284 ctx = self._get_ctx(repo, commit_id)
269 285 return ctx.date()
270 286 return _ctx_date(repo_id, commit_id)
271 287
272 288 @reraise_safe_exceptions
273 289 def ctx_description(self, wire, revision):
274 290 repo = self._factory.repo(wire)
275 291 ctx = self._get_ctx(repo, revision)
276 292 return ctx.description()
277 293
278 294 @reraise_safe_exceptions
279 295 def ctx_files(self, wire, commit_id):
280 296 cache_on, context_uid, repo_id = self._cache_on(wire)
281 297 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 298 def _ctx_files(_repo_id, _commit_id):
283 299 repo = self._factory.repo(wire)
284 300 ctx = self._get_ctx(repo, commit_id)
285 301 return ctx.files()
286 302
287 303 return _ctx_files(repo_id, commit_id)
288 304
289 305 @reraise_safe_exceptions
290 306 def ctx_list(self, path, revision):
291 307 repo = self._factory.repo(path)
292 308 ctx = self._get_ctx(repo, revision)
293 309 return list(ctx)
294 310
295 311 @reraise_safe_exceptions
296 312 def ctx_parents(self, wire, commit_id):
297 313 cache_on, context_uid, repo_id = self._cache_on(wire)
298 314 @self.region.conditional_cache_on_arguments(condition=cache_on)
299 315 def _ctx_parents(_repo_id, _commit_id):
300 316 repo = self._factory.repo(wire)
301 317 ctx = self._get_ctx(repo, commit_id)
302 318 return [parent.hex() for parent in ctx.parents()
303 319 if not (parent.hidden() or parent.obsolete())]
304 320
305 321 return _ctx_parents(repo_id, commit_id)
306 322
307 323 @reraise_safe_exceptions
308 324 def ctx_children(self, wire, commit_id):
309 325 cache_on, context_uid, repo_id = self._cache_on(wire)
310 326 @self.region.conditional_cache_on_arguments(condition=cache_on)
311 327 def _ctx_children(_repo_id, _commit_id):
312 328 repo = self._factory.repo(wire)
313 329 ctx = self._get_ctx(repo, commit_id)
314 330 return [child.hex() for child in ctx.children()
315 331 if not (child.hidden() or child.obsolete())]
316 332
317 333 return _ctx_children(repo_id, commit_id)
318 334
319 335 @reraise_safe_exceptions
320 336 def ctx_phase(self, wire, commit_id):
321 337 cache_on, context_uid, repo_id = self._cache_on(wire)
322 338 @self.region.conditional_cache_on_arguments(condition=cache_on)
323 339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
324 340 repo = self._factory.repo(wire)
325 341 ctx = self._get_ctx(repo, commit_id)
326 342 # public=0, draft=1, secret=3
327 343 return ctx.phase()
328 344 return _ctx_phase(context_uid, repo_id, commit_id)
329 345
330 346 @reraise_safe_exceptions
331 347 def ctx_obsolete(self, wire, commit_id):
332 348 cache_on, context_uid, repo_id = self._cache_on(wire)
333 349 @self.region.conditional_cache_on_arguments(condition=cache_on)
334 350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
335 351 repo = self._factory.repo(wire)
336 352 ctx = self._get_ctx(repo, commit_id)
337 353 return ctx.obsolete()
338 354 return _ctx_obsolete(context_uid, repo_id, commit_id)
339 355
340 356 @reraise_safe_exceptions
341 357 def ctx_hidden(self, wire, commit_id):
342 358 cache_on, context_uid, repo_id = self._cache_on(wire)
343 359 @self.region.conditional_cache_on_arguments(condition=cache_on)
344 360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
345 361 repo = self._factory.repo(wire)
346 362 ctx = self._get_ctx(repo, commit_id)
347 363 return ctx.hidden()
348 364 return _ctx_hidden(context_uid, repo_id, commit_id)
349 365
350 366 @reraise_safe_exceptions
351 367 def ctx_substate(self, wire, revision):
352 368 repo = self._factory.repo(wire)
353 369 ctx = self._get_ctx(repo, revision)
354 370 return ctx.substate
355 371
356 372 @reraise_safe_exceptions
357 373 def ctx_status(self, wire, revision):
358 374 repo = self._factory.repo(wire)
359 375 ctx = self._get_ctx(repo, revision)
360 376 status = repo[ctx.p1().node()].status(other=ctx.node())
361 377 # object of status (odd, custom named tuple in mercurial) is not
362 378 # correctly serializable, we make it a list, as the underling
363 379 # API expects this to be a list
364 380 return list(status)
365 381
366 382 @reraise_safe_exceptions
367 383 def ctx_user(self, wire, revision):
368 384 repo = self._factory.repo(wire)
369 385 ctx = self._get_ctx(repo, revision)
370 386 return ctx.user()
371 387
372 388 @reraise_safe_exceptions
373 389 def check_url(self, url, config):
374 390 _proto = None
375 391 if '+' in url[:url.find('://')]:
376 392 _proto = url[0:url.find('+')]
377 393 url = url[url.find('+') + 1:]
378 394 handlers = []
379 395 url_obj = url_parser(url)
380 396 test_uri, authinfo = url_obj.authinfo()
381 397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
382 398 url_obj.query = obfuscate_qs(url_obj.query)
383 399
384 400 cleaned_uri = str(url_obj)
385 401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
386 402
387 403 if authinfo:
388 404 # create a password manager
389 405 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
390 406 passmgr.add_password(*authinfo)
391 407
392 408 handlers.extend((httpbasicauthhandler(passmgr),
393 409 httpdigestauthhandler(passmgr)))
394 410
395 411 o = urllib2.build_opener(*handlers)
396 412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
397 413 ('Accept', 'application/mercurial-0.1')]
398 414
399 415 q = {"cmd": 'between'}
400 416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
401 417 qs = '?%s' % urllib.urlencode(q)
402 418 cu = "%s%s" % (test_uri, qs)
403 419 req = urllib2.Request(cu, None, {})
404 420
405 421 try:
406 422 log.debug("Trying to open URL %s", cleaned_uri)
407 423 resp = o.open(req)
408 424 if resp.code != 200:
409 425 raise exceptions.URLError()('Return Code is not 200')
410 426 except Exception as e:
411 427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
412 428 # means it cannot be cloned
413 429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
414 430
415 431 # now check if it's a proper hg repo, but don't do it for svn
416 432 try:
417 433 if _proto == 'svn':
418 434 pass
419 435 else:
420 436 # check for pure hg repos
421 437 log.debug(
422 438 "Verifying if URL is a Mercurial repository: %s",
423 439 cleaned_uri)
424 440 ui = make_ui_from_config(config)
425 441 peer_checker = makepeer(ui, url)
426 442 peer_checker.lookup('tip')
427 443 except Exception as e:
428 444 log.warning("URL is not a valid Mercurial repository: %s",
429 445 cleaned_uri)
430 446 raise exceptions.URLError(e)(
431 447 "url [%s] does not look like an hg repo org_exc: %s"
432 448 % (cleaned_uri, e))
433 449
434 450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
435 451 return True
436 452
437 453 @reraise_safe_exceptions
438 454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
439 455 repo = self._factory.repo(wire)
440 456
441 457 if file_filter:
442 458 match_filter = match(file_filter[0], '', [file_filter[1]])
443 459 else:
444 460 match_filter = file_filter
445 461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
446 462
447 463 try:
448 464 return "".join(patch.diff(
449 465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
450 466 except RepoLookupError as e:
451 467 raise exceptions.LookupException(e)()
452 468
453 469 @reraise_safe_exceptions
454 470 def node_history(self, wire, revision, path, limit):
455 471 cache_on, context_uid, repo_id = self._cache_on(wire)
456 472 @self.region.conditional_cache_on_arguments(condition=cache_on)
457 473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
458 474 repo = self._factory.repo(wire)
459 475
460 476 ctx = self._get_ctx(repo, revision)
461 477 fctx = ctx.filectx(path)
462 478
463 479 def history_iter():
464 480 limit_rev = fctx.rev()
465 481 for obj in reversed(list(fctx.filelog())):
466 482 obj = fctx.filectx(obj)
467 483 ctx = obj.changectx()
468 484 if ctx.hidden() or ctx.obsolete():
469 485 continue
470 486
471 487 if limit_rev >= obj.rev():
472 488 yield obj
473 489
474 490 history = []
475 491 for cnt, obj in enumerate(history_iter()):
476 492 if limit and cnt >= limit:
477 493 break
478 494 history.append(hex(obj.node()))
479 495
480 496 return [x for x in history]
481 497 return _node_history(context_uid, repo_id, revision, path, limit)
482 498
483 499 @reraise_safe_exceptions
484 500 def node_history_untill(self, wire, revision, path, limit):
485 501 cache_on, context_uid, repo_id = self._cache_on(wire)
486 502 @self.region.conditional_cache_on_arguments(condition=cache_on)
487 503 def _node_history_until(_context_uid, _repo_id):
488 504 repo = self._factory.repo(wire)
489 505 ctx = self._get_ctx(repo, revision)
490 506 fctx = ctx.filectx(path)
491 507
492 508 file_log = list(fctx.filelog())
493 509 if limit:
494 510 # Limit to the last n items
495 511 file_log = file_log[-limit:]
496 512
497 513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
498 514 return _node_history_until(context_uid, repo_id, revision, path, limit)
499 515
500 516 @reraise_safe_exceptions
501 517 def fctx_annotate(self, wire, revision, path):
502 518 repo = self._factory.repo(wire)
503 519 ctx = self._get_ctx(repo, revision)
504 520 fctx = ctx.filectx(path)
505 521
506 522 result = []
507 523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
508 524 ln_no = i
509 525 sha = hex(annotate_obj.fctx.node())
510 526 content = annotate_obj.text
511 527 result.append((ln_no, sha, content))
512 528 return result
513 529
514 530 @reraise_safe_exceptions
515 531 def fctx_node_data(self, wire, revision, path):
516 532 repo = self._factory.repo(wire)
517 533 ctx = self._get_ctx(repo, revision)
518 534 fctx = ctx.filectx(path)
519 535 return fctx.data()
520 536
521 537 @reraise_safe_exceptions
522 538 def fctx_flags(self, wire, commit_id, path):
523 539 cache_on, context_uid, repo_id = self._cache_on(wire)
524 540 @self.region.conditional_cache_on_arguments(condition=cache_on)
525 541 def _fctx_flags(_repo_id, _commit_id, _path):
526 542 repo = self._factory.repo(wire)
527 543 ctx = self._get_ctx(repo, commit_id)
528 544 fctx = ctx.filectx(path)
529 545 return fctx.flags()
530 546
531 547 return _fctx_flags(repo_id, commit_id, path)
532 548
533 549 @reraise_safe_exceptions
534 550 def fctx_size(self, wire, commit_id, path):
535 551 cache_on, context_uid, repo_id = self._cache_on(wire)
536 552 @self.region.conditional_cache_on_arguments(condition=cache_on)
537 553 def _fctx_size(_repo_id, _revision, _path):
538 554 repo = self._factory.repo(wire)
539 555 ctx = self._get_ctx(repo, commit_id)
540 556 fctx = ctx.filectx(path)
541 557 return fctx.size()
542 558 return _fctx_size(repo_id, commit_id, path)
543 559
544 560 @reraise_safe_exceptions
545 561 def get_all_commit_ids(self, wire, name):
546 562 cache_on, context_uid, repo_id = self._cache_on(wire)
547 563 @self.region.conditional_cache_on_arguments(condition=cache_on)
548 564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
549 565 repo = self._factory.repo(wire)
550 566 repo = repo.filtered(name)
551 567 revs = map(lambda x: hex(x[7]), repo.changelog.index)
552 568 return revs
553 569 return _get_all_commit_ids(context_uid, repo_id, name)
554 570
555 571 @reraise_safe_exceptions
556 572 def get_config_value(self, wire, section, name, untrusted=False):
557 573 repo = self._factory.repo(wire)
558 574 return repo.ui.config(section, name, untrusted=untrusted)
559 575
560 576 @reraise_safe_exceptions
561 577 def is_large_file(self, wire, commit_id, path):
562 578 cache_on, context_uid, repo_id = self._cache_on(wire)
563 579 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
565 581 return largefiles.lfutil.isstandin(path)
566 582
567 583 return _is_large_file(context_uid, repo_id, commit_id, path)
568 584
569 585 @reraise_safe_exceptions
570 586 def is_binary(self, wire, revision, path):
571 587 cache_on, context_uid, repo_id = self._cache_on(wire)
572 588
573 589 @self.region.conditional_cache_on_arguments(condition=cache_on)
574 590 def _is_binary(_repo_id, _sha, _path):
575 591 repo = self._factory.repo(wire)
576 592 ctx = self._get_ctx(repo, revision)
577 593 fctx = ctx.filectx(path)
578 594 return fctx.isbinary()
579 595
580 596 return _is_binary(repo_id, revision, path)
581 597
582 598 @reraise_safe_exceptions
583 599 def in_largefiles_store(self, wire, sha):
584 600 repo = self._factory.repo(wire)
585 601 return largefiles.lfutil.instore(repo, sha)
586 602
587 603 @reraise_safe_exceptions
588 604 def in_user_cache(self, wire, sha):
589 605 repo = self._factory.repo(wire)
590 606 return largefiles.lfutil.inusercache(repo.ui, sha)
591 607
592 608 @reraise_safe_exceptions
593 609 def store_path(self, wire, sha):
594 610 repo = self._factory.repo(wire)
595 611 return largefiles.lfutil.storepath(repo, sha)
596 612
597 613 @reraise_safe_exceptions
598 614 def link(self, wire, sha, path):
599 615 repo = self._factory.repo(wire)
600 616 largefiles.lfutil.link(
601 617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
602 618
603 619 @reraise_safe_exceptions
604 620 def localrepository(self, wire, create=False):
605 621 self._factory.repo(wire, create=create)
606 622
607 623 @reraise_safe_exceptions
608 624 def lookup(self, wire, revision, both):
609 625 cache_on, context_uid, repo_id = self._cache_on(wire)
610 626 @self.region.conditional_cache_on_arguments(condition=cache_on)
611 627 def _lookup(_context_uid, _repo_id, _revision, _both):
612 628
613 629 repo = self._factory.repo(wire)
614 630 rev = _revision
615 631 if isinstance(rev, int):
616 632 # NOTE(marcink):
617 633 # since Mercurial doesn't support negative indexes properly
618 634 # we need to shift accordingly by one to get proper index, e.g
619 635 # repo[-1] => repo[-2]
620 636 # repo[0] => repo[-1]
621 637 if rev <= 0:
622 638 rev = rev + -1
623 639 try:
624 640 ctx = self._get_ctx(repo, rev)
625 641 except (TypeError, RepoLookupError) as e:
626 642 e._org_exc_tb = traceback.format_exc()
627 643 raise exceptions.LookupException(e)(rev)
628 644 except LookupError as e:
629 645 e._org_exc_tb = traceback.format_exc()
630 646 raise exceptions.LookupException(e)(e.name)
631 647
632 648 if not both:
633 649 return ctx.hex()
634 650
635 651 ctx = repo[ctx.hex()]
636 652 return ctx.hex(), ctx.rev()
637 653
638 654 return _lookup(context_uid, repo_id, revision, both)
639 655
640 656 @reraise_safe_exceptions
641 657 def sync_push(self, wire, url):
642 658 if not self.check_url(url, wire['config']):
643 659 return
644 660
645 661 repo = self._factory.repo(wire)
646 662
647 663 # Disable any prompts for this repo
648 664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
649 665
650 666 bookmarks = dict(repo._bookmarks).keys()
651 667 remote = peer(repo, {}, url)
652 668 # Disable any prompts for this remote
653 669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
654 670
655 671 return exchange.push(
656 672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
657 673
658 674 @reraise_safe_exceptions
659 675 def revision(self, wire, rev):
660 676 repo = self._factory.repo(wire)
661 677 ctx = self._get_ctx(repo, rev)
662 678 return ctx.rev()
663 679
664 680 @reraise_safe_exceptions
665 681 def rev_range(self, wire, commit_filter):
666 682 cache_on, context_uid, repo_id = self._cache_on(wire)
667 683
668 684 @self.region.conditional_cache_on_arguments(condition=cache_on)
669 685 def _rev_range(_context_uid, _repo_id, _filter):
670 686 repo = self._factory.repo(wire)
671 687 revisions = [rev for rev in revrange(repo, commit_filter)]
672 688 return revisions
673 689
674 690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
675 691
676 692 @reraise_safe_exceptions
677 693 def rev_range_hash(self, wire, node):
678 694 repo = self._factory.repo(wire)
679 695
680 696 def get_revs(repo, rev_opt):
681 697 if rev_opt:
682 698 revs = revrange(repo, rev_opt)
683 699 if len(revs) == 0:
684 700 return (nullrev, nullrev)
685 701 return max(revs), min(revs)
686 702 else:
687 703 return len(repo) - 1, 0
688 704
689 705 stop, start = get_revs(repo, [node + ':'])
690 706 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
691 707 return revs
692 708
693 709 @reraise_safe_exceptions
694 710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
695 711 other_path = kwargs.pop('other_path', None)
696 712
697 713 # case when we want to compare two independent repositories
698 714 if other_path and other_path != wire["path"]:
699 715 baseui = self._factory._create_config(wire["config"])
700 716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
701 717 else:
702 718 repo = self._factory.repo(wire)
703 719 return list(repo.revs(rev_spec, *args))
704 720
705 721 @reraise_safe_exceptions
706 722 def verify(self, wire,):
707 723 repo = self._factory.repo(wire)
708 724 baseui = self._factory._create_config(wire['config'])
709 baseui.setconfig('ui', 'quiet', 'false')
710 output = io.BytesIO()
711 725
712 def write(data, **unused_kwargs):
713 output.write(data)
714 baseui.write = write
726 baseui, output = patch_ui_message_output(baseui)
715 727
716 728 repo.ui = baseui
717 729 verify.verify(repo)
718 730 return output.getvalue()
719 731
720 732 @reraise_safe_exceptions
721 733 def hg_update_cache(self, wire,):
722 734 repo = self._factory.repo(wire)
723 735 baseui = self._factory._create_config(wire['config'])
724 baseui.setconfig('ui', 'quiet', 'false')
725 output = io.BytesIO()
726
727 def write(data, **unused_kwargs):
728 output.write(data)
729 baseui.write = write
736 baseui, output = patch_ui_message_output(baseui)
730 737
731 738 repo.ui = baseui
732 739 with repo.wlock(), repo.lock():
733 740 repo.updatecaches(full=True)
734 741
735 742 return output.getvalue()
736 743
737 744 @reraise_safe_exceptions
745 def hg_rebuild_fn_cache(self, wire,):
746 repo = self._factory.repo(wire)
747 baseui = self._factory._create_config(wire['config'])
748 baseui, output = patch_ui_message_output(baseui)
749
750 repo.ui = baseui
751
752 repair.rebuildfncache(baseui, repo)
753
754 return output.getvalue()
755
756 @reraise_safe_exceptions
738 757 def tags(self, wire):
739 758 cache_on, context_uid, repo_id = self._cache_on(wire)
740 759 @self.region.conditional_cache_on_arguments(condition=cache_on)
741 760 def _tags(_context_uid, _repo_id):
742 761 repo = self._factory.repo(wire)
743 762 return repo.tags()
744 763
745 764 return _tags(context_uid, repo_id)
746 765
747 766 @reraise_safe_exceptions
748 767 def update(self, wire, node=None, clean=False):
749 768 repo = self._factory.repo(wire)
750 769 baseui = self._factory._create_config(wire['config'])
751 770 commands.update(baseui, repo, node=node, clean=clean)
752 771
753 772 @reraise_safe_exceptions
754 773 def identify(self, wire):
755 774 repo = self._factory.repo(wire)
756 775 baseui = self._factory._create_config(wire['config'])
757 776 output = io.BytesIO()
758 777 baseui.write = output.write
759 778 # This is required to get a full node id
760 779 baseui.debugflag = True
761 780 commands.identify(baseui, repo, id=True)
762 781
763 782 return output.getvalue()
764 783
765 784 @reraise_safe_exceptions
766 785 def heads(self, wire, branch=None):
767 786 repo = self._factory.repo(wire)
768 787 baseui = self._factory._create_config(wire['config'])
769 788 output = io.BytesIO()
770 789
771 790 def write(data, **unused_kwargs):
772 791 output.write(data)
773 792
774 793 baseui.write = write
775 794 if branch:
776 795 args = [branch]
777 796 else:
778 797 args = []
779 798 commands.heads(baseui, repo, template='{node} ', *args)
780 799
781 800 return output.getvalue()
782 801
783 802 @reraise_safe_exceptions
784 803 def ancestor(self, wire, revision1, revision2):
785 804 repo = self._factory.repo(wire)
786 805 changelog = repo.changelog
787 806 lookup = repo.lookup
788 807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
789 808 return hex(a)
790 809
791 810 @reraise_safe_exceptions
792 811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
793 812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
794 813 clone(baseui, source, dest, noupdate=not update_after_clone)
795 814
796 815 @reraise_safe_exceptions
797 816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
798 817
799 818 repo = self._factory.repo(wire)
800 819 baseui = self._factory._create_config(wire['config'])
801 820 publishing = baseui.configbool('phases', 'publish')
802 821 if publishing:
803 822 new_commit = 'public'
804 823 else:
805 824 new_commit = 'draft'
806 825
807 826 def _filectxfn(_repo, ctx, path):
808 827 """
809 828 Marks given path as added/changed/removed in a given _repo. This is
810 829 for internal mercurial commit function.
811 830 """
812 831
813 832 # check if this path is removed
814 833 if path in removed:
815 834 # returning None is a way to mark node for removal
816 835 return None
817 836
818 837 # check if this path is added
819 838 for node in updated:
820 839 if node['path'] == path:
821 840 return memfilectx(
822 841 _repo,
823 842 changectx=ctx,
824 843 path=node['path'],
825 844 data=node['content'],
826 845 islink=False,
827 846 isexec=bool(node['mode'] & stat.S_IXUSR),
828 847 copysource=False)
829 848
830 849 raise exceptions.AbortException()(
831 850 "Given path haven't been marked as added, "
832 851 "changed or removed (%s)" % path)
833 852
834 853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
835 854
836 855 commit_ctx = memctx(
837 856 repo=repo,
838 857 parents=parents,
839 858 text=message,
840 859 files=files,
841 860 filectxfn=_filectxfn,
842 861 user=user,
843 862 date=(commit_time, commit_timezone),
844 863 extra=extra)
845 864
846 865 n = repo.commitctx(commit_ctx)
847 866 new_id = hex(n)
848 867
849 868 return new_id
850 869
851 870 @reraise_safe_exceptions
852 871 def pull(self, wire, url, commit_ids=None):
853 872 repo = self._factory.repo(wire)
854 873 # Disable any prompts for this repo
855 874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
856 875
857 876 remote = peer(repo, {}, url)
858 877 # Disable any prompts for this remote
859 878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
860 879
861 880 if commit_ids:
862 881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
863 882
864 883 return exchange.pull(
865 884 repo, remote, heads=commit_ids, force=None).cgresult
866 885
867 886 @reraise_safe_exceptions
868 887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
869 888 repo = self._factory.repo(wire)
870 889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
871 890
872 891 # Mercurial internally has a lot of logic that checks ONLY if
873 892 # option is defined, we just pass those if they are defined then
874 893 opts = {}
875 894 if bookmark:
876 895 opts['bookmark'] = bookmark
877 896 if branch:
878 897 opts['branch'] = branch
879 898 if revision:
880 899 opts['rev'] = revision
881 900
882 901 commands.pull(baseui, repo, source, **opts)
883 902
884 903 @reraise_safe_exceptions
885 904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
886 905 repo = self._factory.repo(wire)
887 906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
888 907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
889 908 new_branch=push_branches)
890 909
891 910 @reraise_safe_exceptions
892 911 def strip(self, wire, revision, update, backup):
893 912 repo = self._factory.repo(wire)
894 913 ctx = self._get_ctx(repo, revision)
895 914 hgext_strip(
896 915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
897 916
898 917 @reraise_safe_exceptions
899 918 def get_unresolved_files(self, wire):
900 919 repo = self._factory.repo(wire)
901 920
902 921 log.debug('Calculating unresolved files for repo: %s', repo)
903 922 output = io.BytesIO()
904 923
905 924 def write(data, **unused_kwargs):
906 925 output.write(data)
907 926
908 927 baseui = self._factory._create_config(wire['config'])
909 928 baseui.write = write
910 929
911 930 commands.resolve(baseui, repo, list=True)
912 931 unresolved = output.getvalue().splitlines(0)
913 932 return unresolved
914 933
915 934 @reraise_safe_exceptions
916 935 def merge(self, wire, revision):
917 936 repo = self._factory.repo(wire)
918 937 baseui = self._factory._create_config(wire['config'])
919 938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
920 939
921 940 # In case of sub repositories are used mercurial prompts the user in
922 941 # case of merge conflicts or different sub repository sources. By
923 942 # setting the interactive flag to `False` mercurial doesn't prompt the
924 943 # used but instead uses a default value.
925 944 repo.ui.setconfig('ui', 'interactive', False)
926 945 commands.merge(baseui, repo, rev=revision)
927 946
928 947 @reraise_safe_exceptions
929 948 def merge_state(self, wire):
930 949 repo = self._factory.repo(wire)
931 950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
932 951
933 952 # In case of sub repositories are used mercurial prompts the user in
934 953 # case of merge conflicts or different sub repository sources. By
935 954 # setting the interactive flag to `False` mercurial doesn't prompt the
936 955 # used but instead uses a default value.
937 956 repo.ui.setconfig('ui', 'interactive', False)
938 957 ms = hg_merge.mergestate(repo)
939 958 return [x for x in ms.unresolved()]
940 959
941 960 @reraise_safe_exceptions
942 961 def commit(self, wire, message, username, close_branch=False):
943 962 repo = self._factory.repo(wire)
944 963 baseui = self._factory._create_config(wire['config'])
945 964 repo.ui.setconfig('ui', 'username', username)
946 965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
947 966
948 967 @reraise_safe_exceptions
949 968 def rebase(self, wire, source=None, dest=None, abort=False):
950 969 repo = self._factory.repo(wire)
951 970 baseui = self._factory._create_config(wire['config'])
952 971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
953 972 # In case of sub repositories are used mercurial prompts the user in
954 973 # case of merge conflicts or different sub repository sources. By
955 974 # setting the interactive flag to `False` mercurial doesn't prompt the
956 975 # used but instead uses a default value.
957 976 repo.ui.setconfig('ui', 'interactive', False)
958 977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
959 978
960 979 @reraise_safe_exceptions
961 980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
962 981 repo = self._factory.repo(wire)
963 982 ctx = self._get_ctx(repo, revision)
964 983 node = ctx.node()
965 984
966 985 date = (tag_time, tag_timezone)
967 986 try:
968 987 hg_tag.tag(repo, name, node, message, local, user, date)
969 988 except Abort as e:
970 989 log.exception("Tag operation aborted")
971 990 # Exception can contain unicode which we convert
972 991 raise exceptions.AbortException(e)(repr(e))
973 992
974 993 @reraise_safe_exceptions
975 994 def bookmark(self, wire, bookmark, revision=None):
976 995 repo = self._factory.repo(wire)
977 996 baseui = self._factory._create_config(wire['config'])
978 997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
979 998
980 999 @reraise_safe_exceptions
981 1000 def install_hooks(self, wire, force=False):
982 1001 # we don't need any special hooks for Mercurial
983 1002 pass
984 1003
985 1004 @reraise_safe_exceptions
986 1005 def get_hooks_info(self, wire):
987 1006 return {
988 1007 'pre_version': vcsserver.__version__,
989 1008 'post_version': vcsserver.__version__,
990 1009 }
@@ -1,75 +1,79 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Mercurial libs compatibility
20 20 """
21 21
22 22 import mercurial
23 23 from mercurial import demandimport
24 24 # patch demandimport, due to bug in mercurial when it always triggers
25 25 # demandimport.enable()
26 26 demandimport.enable = lambda *args, **kwargs: 1
27 27
28 28 from mercurial import ui
29 29 from mercurial import patch
30 30 from mercurial import config
31 31 from mercurial import extensions
32 32 from mercurial import scmutil
33 33 from mercurial import archival
34 34 from mercurial import discovery
35 35 from mercurial import unionrepo
36 36 from mercurial import localrepo
37 37 from mercurial import merge as hg_merge
38 38 from mercurial import subrepo
39 39 from mercurial import subrepoutil
40 40 from mercurial import tags as hg_tag
41 41
42 42 from mercurial.commands import clone, nullid, pull
43 43 from mercurial.context import memctx, memfilectx
44 44 from mercurial.error import (
45 45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
46 46 RequirementError, ProgrammingError)
47 47 from mercurial.hgweb import hgweb_mod
48 48 from mercurial.localrepo import instance
49 49 from mercurial.match import match
50 50 from mercurial.mdiff import diffopts
51 51 from mercurial.node import bin, hex
52 52 from mercurial.encoding import tolocal
53 53 from mercurial.discovery import findcommonoutgoing
54 54 from mercurial.hg import peer
55 55 from mercurial.httppeer import makepeer
56 56 from mercurial.util import url as hg_url
57 57 from mercurial.scmutil import revrange, revsymbol
58 58 from mercurial.node import nullrev
59 59 from mercurial import exchange
60 60 from hgext import largefiles
61 61
62 62 # those authnadlers are patched for python 2.6.5 bug an
63 63 # infinit looping when given invalid resources
64 64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
65 65
66 66
67 67 def get_ctx(repo, ref):
68 68 try:
69 69 ctx = repo[ref]
70 70 except ProgrammingError:
71 71 # we're unable to find the rev using a regular lookup, we fallback
72 72 # to slower, but backward compat revsymbol usage
73 73 ctx = revsymbol(repo, ref)
74
74 except (LookupError, RepoLookupError):
75 # Similar case as above but only for refs that are not numeric
76 if isinstance(ref, (int, long)):
77 raise
78 ctx = revsymbol(repo, ref)
75 79 return ctx
@@ -1,722 +1,729 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2019 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import importlib
26 26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class HooksHttpClient(object):
41 41 connection = None
42 42
43 43 def __init__(self, hooks_uri):
44 44 self.hooks_uri = hooks_uri
45 45
46 46 def __call__(self, method, extras):
47 47 connection = HTTPConnection(self.hooks_uri)
48 48 body = self._serialize(method, extras)
49 49 try:
50 50 connection.request('POST', '/', body)
51 51 except Exception:
52 52 log.error('Connection failed on %s', connection)
53 53 raise
54 54 response = connection.getresponse()
55 55
56 56 response_data = response.read()
57 57
58 58 try:
59 59 return json.loads(response_data)
60 60 except Exception:
61 61 log.exception('Failed to decode hook response json data. '
62 62 'response_code:%s, raw_data:%s',
63 63 response.status, response_data)
64 64 raise
65 65
66 66 def _serialize(self, hook_name, extras):
67 67 data = {
68 68 'method': hook_name,
69 69 'extras': extras
70 70 }
71 71 return json.dumps(data)
72 72
73 73
74 74 class HooksDummyClient(object):
75 75 def __init__(self, hooks_module):
76 76 self._hooks_module = importlib.import_module(hooks_module)
77 77
78 78 def __call__(self, hook_name, extras):
79 79 with self._hooks_module.Hooks() as hooks:
80 80 return getattr(hooks, hook_name)(extras)
81 81
82 82
83 83 class HooksShadowRepoClient(object):
84 84
85 85 def __call__(self, hook_name, extras):
86 86 return {'output': '', 'status': 0}
87 87
88 88
89 89 class RemoteMessageWriter(object):
90 90 """Writer base class."""
91 91 def write(self, message):
92 92 raise NotImplementedError()
93 93
94 94
95 95 class HgMessageWriter(RemoteMessageWriter):
96 96 """Writer that knows how to send messages to mercurial clients."""
97 97
98 98 def __init__(self, ui):
99 99 self.ui = ui
100 100
101 101 def write(self, message):
102 102 # TODO: Check why the quiet flag is set by default.
103 103 old = self.ui.quiet
104 104 self.ui.quiet = False
105 105 self.ui.status(message.encode('utf-8'))
106 106 self.ui.quiet = old
107 107
108 108
109 109 class GitMessageWriter(RemoteMessageWriter):
110 110 """Writer that knows how to send messages to git clients."""
111 111
112 112 def __init__(self, stdout=None):
113 113 self.stdout = stdout or sys.stdout
114 114
115 115 def write(self, message):
116 116 self.stdout.write(message.encode('utf-8'))
117 117
118 118
119 119 class SvnMessageWriter(RemoteMessageWriter):
120 120 """Writer that knows how to send messages to svn clients."""
121 121
122 122 def __init__(self, stderr=None):
123 123 # SVN needs data sent to stderr for back-to-client messaging
124 124 self.stderr = stderr or sys.stderr
125 125
126 126 def write(self, message):
127 127 self.stderr.write(message.encode('utf-8'))
128 128
129 129
130 130 def _handle_exception(result):
131 131 exception_class = result.get('exception')
132 132 exception_traceback = result.get('exception_traceback')
133 133
134 134 if exception_traceback:
135 135 log.error('Got traceback from remote call:%s', exception_traceback)
136 136
137 137 if exception_class == 'HTTPLockedRC':
138 138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
139 139 elif exception_class == 'HTTPBranchProtected':
140 140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
141 141 elif exception_class == 'RepositoryError':
142 142 raise exceptions.VcsException()(*result['exception_args'])
143 143 elif exception_class:
144 144 raise Exception('Got remote exception "%s" with args "%s"' %
145 145 (exception_class, result['exception_args']))
146 146
147 147
148 148 def _get_hooks_client(extras):
149 149 hooks_uri = extras.get('hooks_uri')
150 150 is_shadow_repo = extras.get('is_shadow_repo')
151 151 if hooks_uri:
152 152 return HooksHttpClient(extras['hooks_uri'])
153 153 elif is_shadow_repo:
154 154 return HooksShadowRepoClient()
155 155 else:
156 156 return HooksDummyClient(extras['hooks_module'])
157 157
158 158
159 159 def _call_hook(hook_name, extras, writer):
160 160 hooks_client = _get_hooks_client(extras)
161 161 log.debug('Hooks, using client:%s', hooks_client)
162 162 result = hooks_client(hook_name, extras)
163 163 log.debug('Hooks got result: %s', result)
164 164
165 165 _handle_exception(result)
166 166 writer.write(result['output'])
167 167
168 168 return result['status']
169 169
170 170
171 171 def _extras_from_ui(ui):
172 172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
173 173 if not hook_data:
174 174 # maybe it's inside environ ?
175 175 env_hook_data = os.environ.get('RC_SCM_DATA')
176 176 if env_hook_data:
177 177 hook_data = env_hook_data
178 178
179 179 extras = {}
180 180 if hook_data:
181 181 extras = json.loads(hook_data)
182 182 return extras
183 183
184 184
185 185 def _rev_range_hash(repo, node, check_heads=False):
186 186 from vcsserver.hgcompat import get_ctx
187 187
188 188 commits = []
189 189 revs = []
190 190 start = get_ctx(repo, node).rev()
191 191 end = len(repo)
192 192 for rev in range(start, end):
193 193 revs.append(rev)
194 194 ctx = get_ctx(repo, rev)
195 195 commit_id = mercurial.node.hex(ctx.node())
196 196 branch = ctx.branch()
197 197 commits.append((commit_id, branch))
198 198
199 199 parent_heads = []
200 200 if check_heads:
201 201 parent_heads = _check_heads(repo, start, end, revs)
202 202 return commits, parent_heads
203 203
204 204
205 205 def _check_heads(repo, start, end, commits):
206 206 from vcsserver.hgcompat import get_ctx
207 207 changelog = repo.changelog
208 208 parents = set()
209 209
210 210 for new_rev in commits:
211 211 for p in changelog.parentrevs(new_rev):
212 212 if p == mercurial.node.nullrev:
213 213 continue
214 214 if p < start:
215 215 parents.add(p)
216 216
217 217 for p in parents:
218 218 branch = get_ctx(repo, p).branch()
219 219 # The heads descending from that parent, on the same branch
220 220 parent_heads = set([p])
221 221 reachable = set([p])
222 222 for x in xrange(p + 1, end):
223 223 if get_ctx(repo, x).branch() != branch:
224 224 continue
225 225 for pp in changelog.parentrevs(x):
226 226 if pp in reachable:
227 227 reachable.add(x)
228 228 parent_heads.discard(pp)
229 229 parent_heads.add(x)
230 230 # More than one head? Suggest merging
231 231 if len(parent_heads) > 1:
232 232 return list(parent_heads)
233 233
234 234 return []
235 235
236 236
237 237 def _get_git_env():
238 238 env = {}
239 239 for k, v in os.environ.items():
240 240 if k.startswith('GIT'):
241 241 env[k] = v
242 242
243 243 # serialized version
244 244 return [(k, v) for k, v in env.items()]
245 245
246 246
247 247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
248 248 env = {}
249 249 for k, v in os.environ.items():
250 250 if k.startswith('HG'):
251 251 env[k] = v
252 252
253 253 env['HG_NODE'] = old_rev
254 254 env['HG_NODE_LAST'] = new_rev
255 255 env['HG_TXNID'] = txnid
256 256 env['HG_PENDING'] = repo_path
257 257
258 258 return [(k, v) for k, v in env.items()]
259 259
260 260
261 261 def repo_size(ui, repo, **kwargs):
262 262 extras = _extras_from_ui(ui)
263 263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
264 264
265 265
266 266 def pre_pull(ui, repo, **kwargs):
267 267 extras = _extras_from_ui(ui)
268 268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
269 269
270 270
271 271 def pre_pull_ssh(ui, repo, **kwargs):
272 272 extras = _extras_from_ui(ui)
273 273 if extras and extras.get('SSH'):
274 274 return pre_pull(ui, repo, **kwargs)
275 275 return 0
276 276
277 277
278 278 def post_pull(ui, repo, **kwargs):
279 279 extras = _extras_from_ui(ui)
280 280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
281 281
282 282
283 283 def post_pull_ssh(ui, repo, **kwargs):
284 284 extras = _extras_from_ui(ui)
285 285 if extras and extras.get('SSH'):
286 286 return post_pull(ui, repo, **kwargs)
287 287 return 0
288 288
289 289
290 290 def pre_push(ui, repo, node=None, **kwargs):
291 291 """
292 292 Mercurial pre_push hook
293 293 """
294 294 extras = _extras_from_ui(ui)
295 295 detect_force_push = extras.get('detect_force_push')
296 296
297 297 rev_data = []
298 298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
299 299 branches = collections.defaultdict(list)
300 300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 301 for commit_id, branch in commits:
302 302 branches[branch].append(commit_id)
303 303
304 304 for branch, commits in branches.items():
305 305 old_rev = kwargs.get('node_last') or commits[0]
306 306 rev_data.append({
307 307 'total_commits': len(commits),
308 308 'old_rev': old_rev,
309 309 'new_rev': commits[-1],
310 310 'ref': '',
311 311 'type': 'branch',
312 312 'name': branch,
313 313 })
314 314
315 315 for push_ref in rev_data:
316 316 push_ref['multiple_heads'] = _heads
317 317
318 318 repo_path = os.path.join(
319 319 extras.get('repo_store', ''), extras.get('repository', ''))
320 320 push_ref['hg_env'] = _get_hg_env(
321 321 old_rev=push_ref['old_rev'],
322 322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
323 323 repo_path=repo_path)
324 324
325 325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
326 326 extras['commit_ids'] = rev_data
327 327
328 328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
329 329
330 330
331 331 def pre_push_ssh(ui, repo, node=None, **kwargs):
332 332 extras = _extras_from_ui(ui)
333 333 if extras.get('SSH'):
334 334 return pre_push(ui, repo, node, **kwargs)
335 335
336 336 return 0
337 337
338 338
339 339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
340 340 """
341 341 Mercurial pre_push hook for SSH
342 342 """
343 343 extras = _extras_from_ui(ui)
344 344 if extras.get('SSH'):
345 345 permission = extras['SSH_PERMISSIONS']
346 346
347 347 if 'repository.write' == permission or 'repository.admin' == permission:
348 348 return 0
349 349
350 350 # non-zero ret code
351 351 return 1
352 352
353 353 return 0
354 354
355 355
356 356 def post_push(ui, repo, node, **kwargs):
357 357 """
358 358 Mercurial post_push hook
359 359 """
360 360 extras = _extras_from_ui(ui)
361 361
362 362 commit_ids = []
363 363 branches = []
364 364 bookmarks = []
365 365 tags = []
366 366
367 367 commits, _heads = _rev_range_hash(repo, node)
368 368 for commit_id, branch in commits:
369 369 commit_ids.append(commit_id)
370 370 if branch not in branches:
371 371 branches.append(branch)
372 372
373 373 if hasattr(ui, '_rc_pushkey_branches'):
374 374 bookmarks = ui._rc_pushkey_branches
375 375
376 376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
377 377 extras['commit_ids'] = commit_ids
378 378 extras['new_refs'] = {
379 379 'branches': branches,
380 380 'bookmarks': bookmarks,
381 381 'tags': tags
382 382 }
383 383
384 384 return _call_hook('post_push', extras, HgMessageWriter(ui))
385 385
386 386
387 387 def post_push_ssh(ui, repo, node, **kwargs):
388 388 """
389 389 Mercurial post_push hook for SSH
390 390 """
391 391 if _extras_from_ui(ui).get('SSH'):
392 392 return post_push(ui, repo, node, **kwargs)
393 393 return 0
394 394
395 395
396 396 def key_push(ui, repo, **kwargs):
397 397 from vcsserver.hgcompat import get_ctx
398 398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
399 399 # store new bookmarks in our UI object propagated later to post_push
400 400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
401 401 return
402 402
403 403
404 404 # backward compat
405 405 log_pull_action = post_pull
406 406
407 407 # backward compat
408 408 log_push_action = post_push
409 409
410 410
411 411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
412 412 """
413 413 Old hook name: keep here for backward compatibility.
414 414
415 415 This is only required when the installed git hooks are not upgraded.
416 416 """
417 417 pass
418 418
419 419
420 420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
421 421 """
422 422 Old hook name: keep here for backward compatibility.
423 423
424 424 This is only required when the installed git hooks are not upgraded.
425 425 """
426 426 pass
427 427
428 428
429 429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
430 430
431 431
432 432 def git_pre_pull(extras):
433 433 """
434 434 Pre pull hook.
435 435
436 436 :param extras: dictionary containing the keys defined in simplevcs
437 437 :type extras: dict
438 438
439 439 :return: status code of the hook. 0 for success.
440 440 :rtype: int
441 441 """
442 442 if 'pull' not in extras['hooks']:
443 443 return HookResponse(0, '')
444 444
445 445 stdout = io.BytesIO()
446 446 try:
447 447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
448 448 except Exception as error:
449 449 status = 128
450 450 stdout.write('ERROR: %s\n' % str(error))
451 451
452 452 return HookResponse(status, stdout.getvalue())
453 453
454 454
455 455 def git_post_pull(extras):
456 456 """
457 457 Post pull hook.
458 458
459 459 :param extras: dictionary containing the keys defined in simplevcs
460 460 :type extras: dict
461 461
462 462 :return: status code of the hook. 0 for success.
463 463 :rtype: int
464 464 """
465 465 if 'pull' not in extras['hooks']:
466 466 return HookResponse(0, '')
467 467
468 468 stdout = io.BytesIO()
469 469 try:
470 470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 471 except Exception as error:
472 472 status = 128
473 473 stdout.write('ERROR: %s\n' % error)
474 474
475 475 return HookResponse(status, stdout.getvalue())
476 476
477 477
478 478 def _parse_git_ref_lines(revision_lines):
479 479 rev_data = []
480 480 for revision_line in revision_lines or []:
481 481 old_rev, new_rev, ref = revision_line.strip().split(' ')
482 482 ref_data = ref.split('/', 2)
483 483 if ref_data[1] in ('tags', 'heads'):
484 484 rev_data.append({
485 485 # NOTE(marcink):
486 486 # we're unable to tell total_commits for git at this point
487 487 # but we set the variable for consistency with GIT
488 488 'total_commits': -1,
489 489 'old_rev': old_rev,
490 490 'new_rev': new_rev,
491 491 'ref': ref,
492 492 'type': ref_data[1],
493 493 'name': ref_data[2],
494 494 })
495 495 return rev_data
496 496
497 497
498 498 def git_pre_receive(unused_repo_path, revision_lines, env):
499 499 """
500 500 Pre push hook.
501 501
502 502 :param extras: dictionary containing the keys defined in simplevcs
503 503 :type extras: dict
504 504
505 505 :return: status code of the hook. 0 for success.
506 506 :rtype: int
507 507 """
508 508 extras = json.loads(env['RC_SCM_DATA'])
509 509 rev_data = _parse_git_ref_lines(revision_lines)
510 510 if 'push' not in extras['hooks']:
511 511 return 0
512 512 empty_commit_id = '0' * 40
513 513
514 514 detect_force_push = extras.get('detect_force_push')
515 515
516 516 for push_ref in rev_data:
517 517 # store our git-env which holds the temp store
518 518 push_ref['git_env'] = _get_git_env()
519 519 push_ref['pruned_sha'] = ''
520 520 if not detect_force_push:
521 521 # don't check for forced-push when we don't need to
522 522 continue
523 523
524 524 type_ = push_ref['type']
525 525 new_branch = push_ref['old_rev'] == empty_commit_id
526 526 delete_branch = push_ref['new_rev'] == empty_commit_id
527 527 if type_ == 'heads' and not (new_branch or delete_branch):
528 528 old_rev = push_ref['old_rev']
529 529 new_rev = push_ref['new_rev']
530 530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
531 531 stdout, stderr = subprocessio.run_command(
532 532 cmd, env=os.environ.copy())
533 533 # means we're having some non-reachable objects, this forced push was used
534 534 if stdout:
535 535 push_ref['pruned_sha'] = stdout.splitlines()
536 536
537 537 extras['hook_type'] = 'pre_receive'
538 538 extras['commit_ids'] = rev_data
539 539 return _call_hook('pre_push', extras, GitMessageWriter())
540 540
541 541
542 542 def git_post_receive(unused_repo_path, revision_lines, env):
543 543 """
544 544 Post push hook.
545 545
546 546 :param extras: dictionary containing the keys defined in simplevcs
547 547 :type extras: dict
548 548
549 549 :return: status code of the hook. 0 for success.
550 550 :rtype: int
551 551 """
552 552 extras = json.loads(env['RC_SCM_DATA'])
553 553 if 'push' not in extras['hooks']:
554 554 return 0
555 555
556 556 rev_data = _parse_git_ref_lines(revision_lines)
557 557
558 558 git_revs = []
559 559
560 560 # N.B.(skreft): it is ok to just call git, as git before calling a
561 561 # subcommand sets the PATH environment variable so that it point to the
562 562 # correct version of the git executable.
563 563 empty_commit_id = '0' * 40
564 564 branches = []
565 565 tags = []
566 566 for push_ref in rev_data:
567 567 type_ = push_ref['type']
568 568
569 569 if type_ == 'heads':
570 570 if push_ref['old_rev'] == empty_commit_id:
571 571 # starting new branch case
572 572 if push_ref['name'] not in branches:
573 573 branches.append(push_ref['name'])
574 574
575 575 # Fix up head revision if needed
576 576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
577 577 try:
578 578 subprocessio.run_command(cmd, env=os.environ.copy())
579 579 except Exception:
580 580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
581 581 'refs/heads/%s' % push_ref['name']]
582 582 print("Setting default branch to %s" % push_ref['name'])
583 583 subprocessio.run_command(cmd, env=os.environ.copy())
584 584
585 585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
586 586 '--format=%(refname)', 'refs/heads/*']
587 587 stdout, stderr = subprocessio.run_command(
588 588 cmd, env=os.environ.copy())
589 589 heads = stdout
590 590 heads = heads.replace(push_ref['ref'], '')
591 591 heads = ' '.join(head for head
592 592 in heads.splitlines() if head) or '.'
593 593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
594 594 '--pretty=format:%H', '--', push_ref['new_rev'],
595 595 '--not', heads]
596 596 stdout, stderr = subprocessio.run_command(
597 597 cmd, env=os.environ.copy())
598 598 git_revs.extend(stdout.splitlines())
599 599 elif push_ref['new_rev'] == empty_commit_id:
600 600 # delete branch case
601 601 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 602 else:
603 603 if push_ref['name'] not in branches:
604 604 branches.append(push_ref['name'])
605 605
606 606 cmd = [settings.GIT_EXECUTABLE, 'log',
607 607 '{old_rev}..{new_rev}'.format(**push_ref),
608 608 '--reverse', '--pretty=format:%H']
609 609 stdout, stderr = subprocessio.run_command(
610 610 cmd, env=os.environ.copy())
611 611 git_revs.extend(stdout.splitlines())
612 612 elif type_ == 'tags':
613 613 if push_ref['name'] not in tags:
614 614 tags.append(push_ref['name'])
615 615 git_revs.append('tag=>%s' % push_ref['name'])
616 616
617 617 extras['hook_type'] = 'post_receive'
618 618 extras['commit_ids'] = git_revs
619 619 extras['new_refs'] = {
620 620 'branches': branches,
621 621 'bookmarks': [],
622 622 'tags': tags,
623 623 }
624 624
625 625 if 'repo_size' in extras['hooks']:
626 626 try:
627 627 _call_hook('repo_size', extras, GitMessageWriter())
628 628 except:
629 629 pass
630 630
631 631 return _call_hook('post_push', extras, GitMessageWriter())
632 632
633 633
634 634 def _get_extras_from_txn_id(path, txn_id):
635 635 extras = {}
636 636 try:
637 637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
638 638 '-t', txn_id,
639 639 '--revprop', path, 'rc-scm-extras']
640 640 stdout, stderr = subprocessio.run_command(
641 641 cmd, env=os.environ.copy())
642 642 extras = json.loads(base64.urlsafe_b64decode(stdout))
643 643 except Exception:
644 644 log.exception('Failed to extract extras info from txn_id')
645 645
646 646 return extras
647 647
648 648
649 649 def _get_extras_from_commit_id(commit_id, path):
650 650 extras = {}
651 651 try:
652 652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
653 653 '-r', commit_id,
654 654 '--revprop', path, 'rc-scm-extras']
655 655 stdout, stderr = subprocessio.run_command(
656 656 cmd, env=os.environ.copy())
657 657 extras = json.loads(base64.urlsafe_b64decode(stdout))
658 658 except Exception:
659 659 log.exception('Failed to extract extras info from commit_id')
660 660
661 661 return extras
662 662
663 663
664 664 def svn_pre_commit(repo_path, commit_data, env):
665 665 path, txn_id = commit_data
666 666 branches = []
667 667 tags = []
668 668
669 669 if env.get('RC_SCM_DATA'):
670 670 extras = json.loads(env['RC_SCM_DATA'])
671 671 else:
672 672 # fallback method to read from TXN-ID stored data
673 673 extras = _get_extras_from_txn_id(path, txn_id)
674 674 if not extras:
675 675 return 0
676 676
677 677 extras['hook_type'] = 'pre_commit'
678 678 extras['commit_ids'] = [txn_id]
679 679 extras['txn_id'] = txn_id
680 680 extras['new_refs'] = {
681 681 'total_commits': 1,
682 682 'branches': branches,
683 683 'bookmarks': [],
684 684 'tags': tags,
685 685 }
686 686
687 687 return _call_hook('pre_push', extras, SvnMessageWriter())
688 688
689 689
690 690 def svn_post_commit(repo_path, commit_data, env):
691 691 """
692 692 commit_data is path, rev, txn_id
693 693 """
694 path, commit_id, txn_id = commit_data
694 if len(commit_data) == 3:
695 path, commit_id, txn_id = commit_data
696 elif len(commit_data) == 2:
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
698 'Some functionality might be limited')
699 path, commit_id = commit_data
700 txn_id = None
701
695 702 branches = []
696 703 tags = []
697 704
698 705 if env.get('RC_SCM_DATA'):
699 706 extras = json.loads(env['RC_SCM_DATA'])
700 707 else:
701 708 # fallback method to read from TXN-ID stored data
702 709 extras = _get_extras_from_commit_id(commit_id, path)
703 710 if not extras:
704 711 return 0
705 712
706 713 extras['hook_type'] = 'post_commit'
707 714 extras['commit_ids'] = [commit_id]
708 715 extras['txn_id'] = txn_id
709 716 extras['new_refs'] = {
710 717 'branches': branches,
711 718 'bookmarks': [],
712 719 'tags': tags,
713 720 'total_commits': 1,
714 721 }
715 722
716 723 if 'repo_size' in extras['hooks']:
717 724 try:
718 725 _call_hook('repo_size', extras, SvnMessageWriter())
719 726 except Exception:
720 727 pass
721 728
722 729 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,89 +1,110 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2019 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import logging
18 18 import hashlib
19 19
20 20 log = logging.getLogger(__name__)
21 21
22 22
23 23 def safe_int(val, default=None):
24 24 """
25 25 Returns int() of val if val is not convertable to int use default
26 26 instead
27 27
28 28 :param val:
29 29 :param default:
30 30 """
31 31
32 32 try:
33 33 val = int(val)
34 34 except (ValueError, TypeError):
35 35 val = default
36 36
37 37 return val
38 38
39 39
40 def safe_str(unicode_, to_encoding=['utf8']):
40 def safe_str(unicode_, to_encoding=None):
41 41 """
42 42 safe str function. Does few trick to turn unicode_ into string
43 43
44 In case of UnicodeEncodeError, we try to return it with encoding detected
45 by chardet library if it fails fallback to string with errors replaced
46
47 44 :param unicode_: unicode to encode
45 :param to_encoding: encode to this type UTF8 default
48 46 :rtype: str
49 47 :returns: str object
50 48 """
51
49 to_encoding = to_encoding or ['utf8']
52 50 # if it's not basestr cast to str
53 51 if not isinstance(unicode_, basestring):
54 52 return str(unicode_)
55 53
56 54 if isinstance(unicode_, str):
57 55 return unicode_
58 56
59 57 if not isinstance(to_encoding, (list, tuple)):
60 58 to_encoding = [to_encoding]
61 59
62 60 for enc in to_encoding:
63 61 try:
64 62 return unicode_.encode(enc)
65 63 except UnicodeEncodeError:
66 64 pass
67 65
66 return unicode_.encode(to_encoding[0], 'replace')
67
68
69 def safe_unicode(str_, from_encoding=None):
70 """
71 safe unicode function. Does few trick to turn str_ into unicode
72
73 :param str_: string to decode
74 :param from_encoding: encode from this type UTF8 default
75 :rtype: unicode
76 :returns: unicode object
77 """
78 from_encoding = from_encoding or ['utf8']
79
80 if isinstance(str_, unicode):
81 return str_
82
83 if not isinstance(from_encoding, (list, tuple)):
84 from_encoding = [from_encoding]
85
68 86 try:
69 import chardet
70 encoding = chardet.detect(unicode_)['encoding']
71 if encoding is None:
72 raise UnicodeEncodeError()
87 return unicode(str_)
88 except UnicodeDecodeError:
89 pass
73 90
74 return unicode_.encode(encoding)
75 except (ImportError, UnicodeEncodeError):
76 return unicode_.encode(to_encoding[0], 'replace')
91 for enc in from_encoding:
92 try:
93 return unicode(str_, enc)
94 except UnicodeDecodeError:
95 pass
96
97 return unicode(str_, from_encoding[0], 'replace')
77 98
78 99
79 100 class AttributeDict(dict):
80 101 def __getattr__(self, attr):
81 102 return self.get(attr, None)
82 103 __setattr__ = dict.__setitem__
83 104 __delattr__ = dict.__delitem__
84 105
85 106
86 107 def sha1(val):
87 108 return hashlib.sha1(val).hexdigest()
88 109
89 110
General Comments 0
You need to be logged in to leave comments. Login now