Show More
@@ -1,732 +1,742 b'' | |||
|
1 | 1 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
2 | 2 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
3 | 3 | # |
|
4 | 4 | # This program is free software; you can redistribute it and/or modify |
|
5 | 5 | # it under the terms of the GNU General Public License as published by |
|
6 | 6 | # the Free Software Foundation; either version 3 of the License, or |
|
7 | 7 | # (at your option) any later version. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU General Public License |
|
15 | 15 | # along with this program; if not, write to the Free Software Foundation, |
|
16 | 16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
17 | 17 | import collections |
|
18 | 18 | import logging |
|
19 | 19 | import os |
|
20 | 20 | import posixpath as vcspath |
|
21 | 21 | import re |
|
22 | 22 | import stat |
|
23 | 23 | import traceback |
|
24 | 24 | import urllib |
|
25 | 25 | import urllib2 |
|
26 | 26 | from functools import wraps |
|
27 | 27 | |
|
28 | 28 | import more_itertools |
|
29 | 29 | from dulwich import index, objects |
|
30 | 30 | from dulwich.client import HttpGitClient, LocalGitClient |
|
31 | 31 | from dulwich.errors import ( |
|
32 | 32 | NotGitRepository, ChecksumMismatch, WrongObjectException, |
|
33 | 33 | MissingCommitError, ObjectMissing, HangupException, |
|
34 | 34 | UnexpectedCommandError) |
|
35 | 35 | from dulwich.repo import Repo as DulwichRepo, Tag |
|
36 | 36 | from dulwich.server import update_server_info |
|
37 | 37 | |
|
38 | 38 | from vcsserver import exceptions, settings, subprocessio |
|
39 | 39 | from vcsserver.utils import safe_str |
|
40 | 40 | from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original |
|
41 | 41 | from vcsserver.hgcompat import ( |
|
42 | 42 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler) |
|
43 | 43 | from vcsserver.git_lfs.lib import LFSOidStore |
|
44 | 44 | |
|
45 | 45 | DIR_STAT = stat.S_IFDIR |
|
46 | 46 | FILE_MODE = stat.S_IFMT |
|
47 | 47 | GIT_LINK = objects.S_IFGITLINK |
|
48 | 48 | |
|
49 | 49 | log = logging.getLogger(__name__) |
|
50 | 50 | |
|
51 | 51 | |
|
52 | 52 | def reraise_safe_exceptions(func): |
|
53 | 53 | """Converts Dulwich exceptions to something neutral.""" |
|
54 | 54 | @wraps(func) |
|
55 | 55 | def wrapper(*args, **kwargs): |
|
56 | 56 | try: |
|
57 | 57 | return func(*args, **kwargs) |
|
58 | 58 | except (ChecksumMismatch, WrongObjectException, MissingCommitError, |
|
59 | 59 | ObjectMissing) as e: |
|
60 | 60 | exc = exceptions.LookupException(e) |
|
61 | 61 | raise exc(e) |
|
62 | 62 | except (HangupException, UnexpectedCommandError) as e: |
|
63 | 63 | exc = exceptions.VcsException(e) |
|
64 | 64 | raise exc(e) |
|
65 | 65 | except Exception as e: |
|
66 | 66 | # NOTE(marcink): becuase of how dulwich handles some exceptions |
|
67 | 67 | # (KeyError on empty repos), we cannot track this and catch all |
|
68 | 68 | # exceptions, it's an exceptions from other handlers |
|
69 | 69 | #if not hasattr(e, '_vcs_kind'): |
|
70 | 70 | #log.exception("Unhandled exception in git remote call") |
|
71 | 71 | #raise_from_original(exceptions.UnhandledException) |
|
72 | 72 | raise |
|
73 | 73 | return wrapper |
|
74 | 74 | |
|
75 | 75 | |
|
76 | 76 | class Repo(DulwichRepo): |
|
77 | 77 | """ |
|
78 | 78 | A wrapper for dulwich Repo class. |
|
79 | 79 | |
|
80 | 80 | Since dulwich is sometimes keeping .idx file descriptors open, it leads to |
|
81 | 81 | "Too many open files" error. We need to close all opened file descriptors |
|
82 | 82 | once the repo object is destroyed. |
|
83 | 83 | |
|
84 | 84 | TODO: mikhail: please check if we need this wrapper after updating dulwich |
|
85 | 85 | to 0.12.0 + |
|
86 | 86 | """ |
|
87 | 87 | def __del__(self): |
|
88 | 88 | if hasattr(self, 'object_store'): |
|
89 | 89 | self.close() |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | class GitFactory(RepoFactory): |
|
93 | 93 | repo_type = 'git' |
|
94 | 94 | |
|
95 | 95 | def _create_repo(self, wire, create): |
|
96 | 96 | repo_path = str_to_dulwich(wire['path']) |
|
97 | 97 | return Repo(repo_path) |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | class GitRemote(object): |
|
101 | 101 | |
|
102 | 102 | def __init__(self, factory): |
|
103 | 103 | self._factory = factory |
|
104 | 104 | self.peeled_ref_marker = '^{}' |
|
105 | 105 | self._bulk_methods = { |
|
106 | 106 | "author": self.commit_attribute, |
|
107 | 107 | "date": self.get_object_attrs, |
|
108 | 108 | "message": self.commit_attribute, |
|
109 | 109 | "parents": self.commit_attribute, |
|
110 | 110 | "_commit": self.revision, |
|
111 | 111 | } |
|
112 | 112 | |
|
113 | 113 | def _wire_to_config(self, wire): |
|
114 | 114 | if 'config' in wire: |
|
115 | 115 | return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']]) |
|
116 | 116 | return {} |
|
117 | 117 | |
|
118 | 118 | def _assign_ref(self, wire, ref, commit_id): |
|
119 | 119 | repo = self._factory.repo(wire) |
|
120 | 120 | repo[ref] = commit_id |
|
121 | 121 | |
|
122 | 122 | def _remote_conf(self, config): |
|
123 | 123 | params = [ |
|
124 | 124 | '-c', 'core.askpass=""', |
|
125 | 125 | ] |
|
126 | 126 | ssl_cert_dir = config.get('vcs_ssl_dir') |
|
127 | 127 | if ssl_cert_dir: |
|
128 | 128 | params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)]) |
|
129 | 129 | return params |
|
130 | 130 | |
|
131 | 131 | @reraise_safe_exceptions |
|
132 | 132 | def add_object(self, wire, content): |
|
133 | 133 | repo = self._factory.repo(wire) |
|
134 | 134 | blob = objects.Blob() |
|
135 | 135 | blob.set_raw_string(content) |
|
136 | 136 | repo.object_store.add_object(blob) |
|
137 | 137 | return blob.id |
|
138 | 138 | |
|
139 | 139 | @reraise_safe_exceptions |
|
140 | 140 | def assert_correct_path(self, wire): |
|
141 | 141 | path = wire.get('path') |
|
142 | 142 | try: |
|
143 | 143 | self._factory.repo(wire) |
|
144 | 144 | except NotGitRepository as e: |
|
145 | 145 | tb = traceback.format_exc() |
|
146 | 146 | log.debug("Invalid Git path `%s`, tb: %s", path, tb) |
|
147 | 147 | return False |
|
148 | 148 | |
|
149 | 149 | return True |
|
150 | 150 | |
|
151 | 151 | @reraise_safe_exceptions |
|
152 | 152 | def bare(self, wire): |
|
153 | 153 | repo = self._factory.repo(wire) |
|
154 | 154 | return repo.bare |
|
155 | 155 | |
|
156 | 156 | @reraise_safe_exceptions |
|
157 | 157 | def blob_as_pretty_string(self, wire, sha): |
|
158 | 158 | repo = self._factory.repo(wire) |
|
159 | 159 | return repo[sha].as_pretty_string() |
|
160 | 160 | |
|
161 | 161 | @reraise_safe_exceptions |
|
162 | 162 | def blob_raw_length(self, wire, sha): |
|
163 | 163 | repo = self._factory.repo(wire) |
|
164 | 164 | blob = repo[sha] |
|
165 | 165 | return blob.raw_length() |
|
166 | 166 | |
|
167 | 167 | def _parse_lfs_pointer(self, raw_content): |
|
168 | 168 | |
|
169 | 169 | spec_string = 'version https://git-lfs.github.com/spec' |
|
170 | 170 | if raw_content and raw_content.startswith(spec_string): |
|
171 | 171 | pattern = re.compile(r""" |
|
172 | 172 | (?:\n)? |
|
173 | 173 | ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n |
|
174 | 174 | ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n |
|
175 | 175 | ^size[ ](?P<oid_size>[0-9]+)\n |
|
176 | 176 | (?:\n)? |
|
177 | 177 | """, re.VERBOSE | re.MULTILINE) |
|
178 | 178 | match = pattern.match(raw_content) |
|
179 | 179 | if match: |
|
180 | 180 | return match.groupdict() |
|
181 | 181 | |
|
182 | 182 | return {} |
|
183 | 183 | |
|
184 | 184 | @reraise_safe_exceptions |
|
185 | 185 | def is_large_file(self, wire, sha): |
|
186 | 186 | repo = self._factory.repo(wire) |
|
187 | 187 | blob = repo[sha] |
|
188 | 188 | return self._parse_lfs_pointer(blob.as_raw_string()) |
|
189 | 189 | |
|
190 | 190 | @reraise_safe_exceptions |
|
191 | 191 | def in_largefiles_store(self, wire, oid): |
|
192 | 192 | repo = self._factory.repo(wire) |
|
193 | 193 | conf = self._wire_to_config(wire) |
|
194 | 194 | |
|
195 | 195 | store_location = conf.get('vcs_git_lfs_store_location') |
|
196 | 196 | if store_location: |
|
197 | 197 | repo_name = repo.path |
|
198 | 198 | store = LFSOidStore( |
|
199 | 199 | oid=oid, repo=repo_name, store_location=store_location) |
|
200 | 200 | return store.has_oid() |
|
201 | 201 | |
|
202 | 202 | return False |
|
203 | 203 | |
|
204 | 204 | @reraise_safe_exceptions |
|
205 | 205 | def store_path(self, wire, oid): |
|
206 | 206 | repo = self._factory.repo(wire) |
|
207 | 207 | conf = self._wire_to_config(wire) |
|
208 | 208 | |
|
209 | 209 | store_location = conf.get('vcs_git_lfs_store_location') |
|
210 | 210 | if store_location: |
|
211 | 211 | repo_name = repo.path |
|
212 | 212 | store = LFSOidStore( |
|
213 | 213 | oid=oid, repo=repo_name, store_location=store_location) |
|
214 | 214 | return store.oid_path |
|
215 | 215 | raise ValueError('Unable to fetch oid with path {}'.format(oid)) |
|
216 | 216 | |
|
217 | 217 | @reraise_safe_exceptions |
|
218 | 218 | def bulk_request(self, wire, rev, pre_load): |
|
219 | 219 | result = {} |
|
220 | 220 | for attr in pre_load: |
|
221 | 221 | try: |
|
222 | 222 | method = self._bulk_methods[attr] |
|
223 | 223 | args = [wire, rev] |
|
224 | 224 | if attr == "date": |
|
225 | 225 | args.extend(["commit_time", "commit_timezone"]) |
|
226 | 226 | elif attr in ["author", "message", "parents"]: |
|
227 | 227 | args.append(attr) |
|
228 | 228 | result[attr] = method(*args) |
|
229 | 229 | except KeyError as e: |
|
230 | 230 | raise exceptions.VcsException(e)( |
|
231 | 231 | "Unknown bulk attribute: %s" % attr) |
|
232 | 232 | return result |
|
233 | 233 | |
|
234 | 234 | def _build_opener(self, url): |
|
235 | 235 | handlers = [] |
|
236 | 236 | url_obj = url_parser(url) |
|
237 | 237 | _, authinfo = url_obj.authinfo() |
|
238 | 238 | |
|
239 | 239 | if authinfo: |
|
240 | 240 | # create a password manager |
|
241 | 241 | passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() |
|
242 | 242 | passmgr.add_password(*authinfo) |
|
243 | 243 | |
|
244 | 244 | handlers.extend((httpbasicauthhandler(passmgr), |
|
245 | 245 | httpdigestauthhandler(passmgr))) |
|
246 | 246 | |
|
247 | 247 | return urllib2.build_opener(*handlers) |
|
248 | 248 | |
|
249 | 249 | @reraise_safe_exceptions |
|
250 | 250 | def check_url(self, url, config): |
|
251 | 251 | url_obj = url_parser(url) |
|
252 | 252 | test_uri, _ = url_obj.authinfo() |
|
253 | 253 | url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd |
|
254 | 254 | url_obj.query = obfuscate_qs(url_obj.query) |
|
255 | 255 | cleaned_uri = str(url_obj) |
|
256 | 256 | log.info("Checking URL for remote cloning/import: %s", cleaned_uri) |
|
257 | 257 | |
|
258 | 258 | if not test_uri.endswith('info/refs'): |
|
259 | 259 | test_uri = test_uri.rstrip('/') + '/info/refs' |
|
260 | 260 | |
|
261 | 261 | o = self._build_opener(url) |
|
262 | 262 | o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git |
|
263 | 263 | |
|
264 | 264 | q = {"service": 'git-upload-pack'} |
|
265 | 265 | qs = '?%s' % urllib.urlencode(q) |
|
266 | 266 | cu = "%s%s" % (test_uri, qs) |
|
267 | 267 | req = urllib2.Request(cu, None, {}) |
|
268 | 268 | |
|
269 | 269 | try: |
|
270 | 270 | log.debug("Trying to open URL %s", cleaned_uri) |
|
271 | 271 | resp = o.open(req) |
|
272 | 272 | if resp.code != 200: |
|
273 | 273 | raise exceptions.URLError()('Return Code is not 200') |
|
274 | 274 | except Exception as e: |
|
275 | 275 | log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True) |
|
276 | 276 | # means it cannot be cloned |
|
277 | 277 | raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e)) |
|
278 | 278 | |
|
279 | 279 | # now detect if it's proper git repo |
|
280 | 280 | gitdata = resp.read() |
|
281 | 281 | if 'service=git-upload-pack' in gitdata: |
|
282 | 282 | pass |
|
283 | 283 | elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata): |
|
284 | 284 | # old style git can return some other format ! |
|
285 | 285 | pass |
|
286 | 286 | else: |
|
287 | 287 | raise exceptions.URLError()( |
|
288 | 288 | "url [%s] does not look like an git" % (cleaned_uri,)) |
|
289 | 289 | |
|
290 | 290 | return True |
|
291 | 291 | |
|
292 | 292 | @reraise_safe_exceptions |
|
293 | 293 | def clone(self, wire, url, deferred, valid_refs, update_after_clone): |
|
294 | 294 | # TODO(marcink): deprecate this method. Last i checked we don't use it anymore |
|
295 | 295 | remote_refs = self.pull(wire, url, apply_refs=False) |
|
296 | 296 | repo = self._factory.repo(wire) |
|
297 | 297 | if isinstance(valid_refs, list): |
|
298 | 298 | valid_refs = tuple(valid_refs) |
|
299 | 299 | |
|
300 | 300 | for k in remote_refs: |
|
301 | 301 | # only parse heads/tags and skip so called deferred tags |
|
302 | 302 | if k.startswith(valid_refs) and not k.endswith(deferred): |
|
303 | 303 | repo[k] = remote_refs[k] |
|
304 | 304 | |
|
305 | 305 | if update_after_clone: |
|
306 | 306 | # we want to checkout HEAD |
|
307 | 307 | repo["HEAD"] = remote_refs["HEAD"] |
|
308 | 308 | index.build_index_from_tree(repo.path, repo.index_path(), |
|
309 | 309 | repo.object_store, repo["HEAD"].tree) |
|
310 | 310 | |
|
311 | 311 | # TODO: this is quite complex, check if that can be simplified |
|
312 | 312 | @reraise_safe_exceptions |
|
313 | 313 | def commit(self, wire, commit_data, branch, commit_tree, updated, removed): |
|
314 | 314 | repo = self._factory.repo(wire) |
|
315 | 315 | object_store = repo.object_store |
|
316 | 316 | |
|
317 | 317 | # Create tree and populates it with blobs |
|
318 | 318 | commit_tree = commit_tree and repo[commit_tree] or objects.Tree() |
|
319 | 319 | |
|
320 | 320 | for node in updated: |
|
321 | 321 | # Compute subdirs if needed |
|
322 | 322 | dirpath, nodename = vcspath.split(node['path']) |
|
323 | 323 | dirnames = map(safe_str, dirpath and dirpath.split('/') or []) |
|
324 | 324 | parent = commit_tree |
|
325 | 325 | ancestors = [('', parent)] |
|
326 | 326 | |
|
327 | 327 | # Tries to dig for the deepest existing tree |
|
328 | 328 | while dirnames: |
|
329 | 329 | curdir = dirnames.pop(0) |
|
330 | 330 | try: |
|
331 | 331 | dir_id = parent[curdir][1] |
|
332 | 332 | except KeyError: |
|
333 | 333 | # put curdir back into dirnames and stops |
|
334 | 334 | dirnames.insert(0, curdir) |
|
335 | 335 | break |
|
336 | 336 | else: |
|
337 | 337 | # If found, updates parent |
|
338 | 338 | parent = repo[dir_id] |
|
339 | 339 | ancestors.append((curdir, parent)) |
|
340 | 340 | # Now parent is deepest existing tree and we need to create |
|
341 | 341 | # subtrees for dirnames (in reverse order) |
|
342 | 342 | # [this only applies for nodes from added] |
|
343 | 343 | new_trees = [] |
|
344 | 344 | |
|
345 | 345 | blob = objects.Blob.from_string(node['content']) |
|
346 | 346 | |
|
347 | 347 | if dirnames: |
|
348 | 348 | # If there are trees which should be created we need to build |
|
349 | 349 | # them now (in reverse order) |
|
350 | 350 | reversed_dirnames = list(reversed(dirnames)) |
|
351 | 351 | curtree = objects.Tree() |
|
352 | 352 | curtree[node['node_path']] = node['mode'], blob.id |
|
353 | 353 | new_trees.append(curtree) |
|
354 | 354 | for dirname in reversed_dirnames[:-1]: |
|
355 | 355 | newtree = objects.Tree() |
|
356 | 356 | newtree[dirname] = (DIR_STAT, curtree.id) |
|
357 | 357 | new_trees.append(newtree) |
|
358 | 358 | curtree = newtree |
|
359 | 359 | parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id) |
|
360 | 360 | else: |
|
361 | 361 | parent.add( |
|
362 | 362 | name=node['node_path'], mode=node['mode'], hexsha=blob.id) |
|
363 | 363 | |
|
364 | 364 | new_trees.append(parent) |
|
365 | 365 | # Update ancestors |
|
366 | 366 | reversed_ancestors = reversed( |
|
367 | 367 | [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])]) |
|
368 | 368 | for parent, tree, path in reversed_ancestors: |
|
369 | 369 | parent[path] = (DIR_STAT, tree.id) |
|
370 | 370 | object_store.add_object(tree) |
|
371 | 371 | |
|
372 | 372 | object_store.add_object(blob) |
|
373 | 373 | for tree in new_trees: |
|
374 | 374 | object_store.add_object(tree) |
|
375 | 375 | |
|
376 | 376 | for node_path in removed: |
|
377 | 377 | paths = node_path.split('/') |
|
378 | 378 | tree = commit_tree |
|
379 | 379 | trees = [tree] |
|
380 | 380 | # Traverse deep into the forest... |
|
381 | 381 | for path in paths: |
|
382 | 382 | try: |
|
383 | 383 | obj = repo[tree[path][1]] |
|
384 | 384 | if isinstance(obj, objects.Tree): |
|
385 | 385 | trees.append(obj) |
|
386 | 386 | tree = obj |
|
387 | 387 | except KeyError: |
|
388 | 388 | break |
|
389 | 389 | # Cut down the blob and all rotten trees on the way back... |
|
390 | 390 | for path, tree in reversed(zip(paths, trees)): |
|
391 | 391 | del tree[path] |
|
392 | 392 | if tree: |
|
393 | 393 | # This tree still has elements - don't remove it or any |
|
394 | 394 | # of it's parents |
|
395 | 395 | break |
|
396 | 396 | |
|
397 | 397 | object_store.add_object(commit_tree) |
|
398 | 398 | |
|
399 | 399 | # Create commit |
|
400 | 400 | commit = objects.Commit() |
|
401 | 401 | commit.tree = commit_tree.id |
|
402 | 402 | for k, v in commit_data.iteritems(): |
|
403 | 403 | setattr(commit, k, v) |
|
404 | 404 | object_store.add_object(commit) |
|
405 | 405 | |
|
406 | 406 | ref = 'refs/heads/%s' % branch |
|
407 | 407 | repo.refs[ref] = commit.id |
|
408 | 408 | |
|
409 | 409 | return commit.id |
|
410 | 410 | |
|
411 | 411 | @reraise_safe_exceptions |
|
412 | 412 | def pull(self, wire, url, apply_refs=True, refs=None, update_after=False): |
|
413 | 413 | if url != 'default' and '://' not in url: |
|
414 | 414 | client = LocalGitClient(url) |
|
415 | 415 | else: |
|
416 | 416 | url_obj = url_parser(url) |
|
417 | 417 | o = self._build_opener(url) |
|
418 | 418 | url, _ = url_obj.authinfo() |
|
419 | 419 | client = HttpGitClient(base_url=url, opener=o) |
|
420 | 420 | repo = self._factory.repo(wire) |
|
421 | 421 | |
|
422 | 422 | determine_wants = repo.object_store.determine_wants_all |
|
423 | 423 | if refs: |
|
424 | 424 | def determine_wants_requested(references): |
|
425 | 425 | return [references[r] for r in references if r in refs] |
|
426 | 426 | determine_wants = determine_wants_requested |
|
427 | 427 | |
|
428 | 428 | try: |
|
429 | 429 | remote_refs = client.fetch( |
|
430 | 430 | path=url, target=repo, determine_wants=determine_wants) |
|
431 | 431 | except NotGitRepository as e: |
|
432 | 432 | log.warning( |
|
433 | 433 | 'Trying to fetch from "%s" failed, not a Git repository.', url) |
|
434 | 434 | # Exception can contain unicode which we convert |
|
435 | 435 | raise exceptions.AbortException(e)(repr(e)) |
|
436 | 436 | |
|
437 | 437 | # mikhail: client.fetch() returns all the remote refs, but fetches only |
|
438 | 438 | # refs filtered by `determine_wants` function. We need to filter result |
|
439 | 439 | # as well |
|
440 | 440 | if refs: |
|
441 | 441 | remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs} |
|
442 | 442 | |
|
443 | 443 | if apply_refs: |
|
444 | 444 | # TODO: johbo: Needs proper test coverage with a git repository |
|
445 | 445 | # that contains a tag object, so that we would end up with |
|
446 | 446 | # a peeled ref at this point. |
|
447 | 447 | for k in remote_refs: |
|
448 | 448 | if k.endswith(self.peeled_ref_marker): |
|
449 | 449 | log.debug("Skipping peeled reference %s", k) |
|
450 | 450 | continue |
|
451 | 451 | repo[k] = remote_refs[k] |
|
452 | 452 | |
|
453 | 453 | if refs and not update_after: |
|
454 | 454 | # mikhail: explicitly set the head to the last ref. |
|
455 | 455 | repo['HEAD'] = remote_refs[refs[-1]] |
|
456 | 456 | |
|
457 | 457 | if update_after: |
|
458 | 458 | # we want to checkout HEAD |
|
459 | 459 | repo["HEAD"] = remote_refs["HEAD"] |
|
460 | 460 | index.build_index_from_tree(repo.path, repo.index_path(), |
|
461 | 461 | repo.object_store, repo["HEAD"].tree) |
|
462 | 462 | return remote_refs |
|
463 | 463 | |
|
464 | 464 | @reraise_safe_exceptions |
|
465 | 465 | def sync_fetch(self, wire, url, refs=None): |
|
466 | 466 | repo = self._factory.repo(wire) |
|
467 | 467 | if refs and not isinstance(refs, (list, tuple)): |
|
468 | 468 | refs = [refs] |
|
469 | 469 | config = self._wire_to_config(wire) |
|
470 | 470 | # get all remote refs we'll use to fetch later |
|
471 | 471 | output, __ = self.run_git_command( |
|
472 | 472 | wire, ['ls-remote', url], fail_on_stderr=False, |
|
473 | 473 | _copts=self._remote_conf(config), |
|
474 | 474 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) |
|
475 | 475 | |
|
476 | 476 | remote_refs = collections.OrderedDict() |
|
477 | 477 | fetch_refs = [] |
|
478 | 478 | |
|
479 | 479 | for ref_line in output.splitlines(): |
|
480 | 480 | sha, ref = ref_line.split('\t') |
|
481 | 481 | sha = sha.strip() |
|
482 | 482 | if ref in remote_refs: |
|
483 | 483 | # duplicate, skip |
|
484 | 484 | continue |
|
485 | 485 | if ref.endswith(self.peeled_ref_marker): |
|
486 | 486 | log.debug("Skipping peeled reference %s", ref) |
|
487 | 487 | continue |
|
488 | 488 | # don't sync HEAD |
|
489 | 489 | if ref in ['HEAD']: |
|
490 | 490 | continue |
|
491 | 491 | |
|
492 | 492 | remote_refs[ref] = sha |
|
493 | 493 | |
|
494 | 494 | if refs and sha in refs: |
|
495 | 495 | # we filter fetch using our specified refs |
|
496 | 496 | fetch_refs.append('{}:{}'.format(ref, ref)) |
|
497 | 497 | elif not refs: |
|
498 | 498 | fetch_refs.append('{}:{}'.format(ref, ref)) |
|
499 | 499 | log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs)) |
|
500 | 500 | if fetch_refs: |
|
501 | 501 | for chunk in more_itertools.chunked(fetch_refs, 1024 * 4): |
|
502 | 502 | fetch_refs_chunks = list(chunk) |
|
503 | 503 | log.debug('Fetching %s refs from import url', len(fetch_refs_chunks)) |
|
504 | 504 | _out, _err = self.run_git_command( |
|
505 | 505 | wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks, |
|
506 | 506 | fail_on_stderr=False, |
|
507 | 507 | _copts=self._remote_conf(config), |
|
508 | 508 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) |
|
509 | 509 | |
|
510 | 510 | return remote_refs |
|
511 | 511 | |
|
512 | 512 | @reraise_safe_exceptions |
|
513 | 513 | def sync_push(self, wire, url, refs=None): |
|
514 | 514 | if not self.check_url(url, wire): |
|
515 | 515 | return |
|
516 | 516 | config = self._wire_to_config(wire) |
|
517 | 517 | repo = self._factory.repo(wire) |
|
518 | 518 | self.run_git_command( |
|
519 | 519 | wire, ['push', url, '--mirror'], fail_on_stderr=False, |
|
520 | 520 | _copts=self._remote_conf(config), |
|
521 | 521 | extra_env={'GIT_TERMINAL_PROMPT': '0'}) |
|
522 | 522 | |
|
523 | 523 | @reraise_safe_exceptions |
|
524 | 524 | def get_remote_refs(self, wire, url): |
|
525 | 525 | repo = Repo(url) |
|
526 | 526 | return repo.get_refs() |
|
527 | 527 | |
|
528 | 528 | @reraise_safe_exceptions |
|
529 | 529 | def get_description(self, wire): |
|
530 | 530 | repo = self._factory.repo(wire) |
|
531 | 531 | return repo.get_description() |
|
532 | 532 | |
|
533 | 533 | @reraise_safe_exceptions |
|
534 | 534 | def get_missing_revs(self, wire, rev1, rev2, path2): |
|
535 | 535 | repo = self._factory.repo(wire) |
|
536 | 536 | LocalGitClient(thin_packs=False).fetch(path2, repo) |
|
537 | 537 | |
|
538 | 538 | wire_remote = wire.copy() |
|
539 | 539 | wire_remote['path'] = path2 |
|
540 | 540 | repo_remote = self._factory.repo(wire_remote) |
|
541 | 541 | LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote) |
|
542 | 542 | |
|
543 | 543 | revs = [ |
|
544 | 544 | x.commit.id |
|
545 | 545 | for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])] |
|
546 | 546 | return revs |
|
547 | 547 | |
|
548 | 548 | @reraise_safe_exceptions |
|
549 | 549 | def get_object(self, wire, sha): |
|
550 | 550 | repo = self._factory.repo(wire) |
|
551 | 551 | obj = repo.get_object(sha) |
|
552 | 552 | commit_id = obj.id |
|
553 | 553 | |
|
554 | 554 | if isinstance(obj, Tag): |
|
555 | 555 | commit_id = obj.object[1] |
|
556 | 556 | |
|
557 | 557 | return { |
|
558 | 558 | 'id': obj.id, |
|
559 | 559 | 'type': obj.type_name, |
|
560 | 560 | 'commit_id': commit_id |
|
561 | 561 | } |
|
562 | 562 | |
|
563 | 563 | @reraise_safe_exceptions |
|
564 | 564 | def get_object_attrs(self, wire, sha, *attrs): |
|
565 | 565 | repo = self._factory.repo(wire) |
|
566 | 566 | obj = repo.get_object(sha) |
|
567 | 567 | return list(getattr(obj, a) for a in attrs) |
|
568 | 568 | |
|
569 | 569 | @reraise_safe_exceptions |
|
570 | 570 | def get_refs(self, wire): |
|
571 | 571 | repo = self._factory.repo(wire) |
|
572 | 572 | result = {} |
|
573 | 573 | for ref, sha in repo.refs.as_dict().items(): |
|
574 | 574 | peeled_sha = repo.get_peeled(ref) |
|
575 | 575 | result[ref] = peeled_sha |
|
576 | 576 | return result |
|
577 | 577 | |
|
578 | 578 | @reraise_safe_exceptions |
|
579 | 579 | def get_refs_path(self, wire): |
|
580 | 580 | repo = self._factory.repo(wire) |
|
581 | 581 | return repo.refs.path |
|
582 | 582 | |
|
583 | 583 | @reraise_safe_exceptions |
|
584 | 584 | def head(self, wire, show_exc=True): |
|
585 | 585 | repo = self._factory.repo(wire) |
|
586 | 586 | try: |
|
587 | 587 | return repo.head() |
|
588 | 588 | except Exception: |
|
589 | 589 | if show_exc: |
|
590 | 590 | raise |
|
591 | 591 | |
|
592 | 592 | @reraise_safe_exceptions |
|
593 | 593 | def init(self, wire): |
|
594 | 594 | repo_path = str_to_dulwich(wire['path']) |
|
595 | 595 | self.repo = Repo.init(repo_path) |
|
596 | 596 | |
|
597 | 597 | @reraise_safe_exceptions |
|
598 | 598 | def init_bare(self, wire): |
|
599 | 599 | repo_path = str_to_dulwich(wire['path']) |
|
600 | 600 | self.repo = Repo.init_bare(repo_path) |
|
601 | 601 | |
|
602 | 602 | @reraise_safe_exceptions |
|
603 | 603 | def revision(self, wire, rev): |
|
604 | 604 | repo = self._factory.repo(wire) |
|
605 | 605 | obj = repo[rev] |
|
606 | 606 | obj_data = { |
|
607 | 607 | 'id': obj.id, |
|
608 | 608 | } |
|
609 | 609 | try: |
|
610 | 610 | obj_data['tree'] = obj.tree |
|
611 | 611 | except AttributeError: |
|
612 | 612 | pass |
|
613 | 613 | return obj_data |
|
614 | 614 | |
|
615 | 615 | @reraise_safe_exceptions |
|
616 | 616 | def commit_attribute(self, wire, rev, attr): |
|
617 | 617 | repo = self._factory.repo(wire) |
|
618 | 618 | obj = repo[rev] |
|
619 | 619 | return getattr(obj, attr) |
|
620 | 620 | |
|
621 | 621 | @reraise_safe_exceptions |
|
622 | 622 | def set_refs(self, wire, key, value): |
|
623 | 623 | repo = self._factory.repo(wire) |
|
624 | 624 | repo.refs[key] = value |
|
625 | 625 | |
|
626 | 626 | @reraise_safe_exceptions |
|
627 | 627 | def remove_ref(self, wire, key): |
|
628 | 628 | repo = self._factory.repo(wire) |
|
629 | 629 | del repo.refs[key] |
|
630 | 630 | |
|
631 | 631 | @reraise_safe_exceptions |
|
632 | 632 | def tree_changes(self, wire, source_id, target_id): |
|
633 | 633 | repo = self._factory.repo(wire) |
|
634 | 634 | source = repo[source_id].tree if source_id else None |
|
635 | 635 | target = repo[target_id].tree |
|
636 | 636 | result = repo.object_store.tree_changes(source, target) |
|
637 | 637 | return list(result) |
|
638 | 638 | |
|
639 | 639 | @reraise_safe_exceptions |
|
640 | 640 | def tree_items(self, wire, tree_id): |
|
641 | 641 | repo = self._factory.repo(wire) |
|
642 | 642 | tree = repo[tree_id] |
|
643 | 643 | |
|
644 | 644 | result = [] |
|
645 | 645 | for item in tree.iteritems(): |
|
646 | 646 | item_sha = item.sha |
|
647 | 647 | item_mode = item.mode |
|
648 | 648 | |
|
649 | 649 | if FILE_MODE(item_mode) == GIT_LINK: |
|
650 | 650 | item_type = "link" |
|
651 | 651 | else: |
|
652 | 652 | item_type = repo[item_sha].type_name |
|
653 | 653 | |
|
654 | 654 | result.append((item.path, item_mode, item_sha, item_type)) |
|
655 | 655 | return result |
|
656 | 656 | |
|
657 | 657 | @reraise_safe_exceptions |
|
658 | 658 | def update_server_info(self, wire): |
|
659 | 659 | repo = self._factory.repo(wire) |
|
660 | 660 | update_server_info(repo) |
|
661 | 661 | |
|
662 | 662 | @reraise_safe_exceptions |
|
663 | 663 | def discover_git_version(self): |
|
664 | 664 | stdout, _ = self.run_git_command( |
|
665 | 665 | {}, ['--version'], _bare=True, _safe=True) |
|
666 | 666 | prefix = 'git version' |
|
667 | 667 | if stdout.startswith(prefix): |
|
668 | 668 | stdout = stdout[len(prefix):] |
|
669 | 669 | return stdout.strip() |
|
670 | 670 | |
|
671 | 671 | @reraise_safe_exceptions |
|
672 | 672 | def run_git_command(self, wire, cmd, **opts): |
|
673 | 673 | path = wire.get('path', None) |
|
674 | 674 | |
|
675 | 675 | if path and os.path.isdir(path): |
|
676 | 676 | opts['cwd'] = path |
|
677 | 677 | |
|
678 | 678 | if '_bare' in opts: |
|
679 | 679 | _copts = [] |
|
680 | 680 | del opts['_bare'] |
|
681 | 681 | else: |
|
682 | 682 | _copts = ['-c', 'core.quotepath=false', ] |
|
683 | 683 | safe_call = False |
|
684 | 684 | if '_safe' in opts: |
|
685 | 685 | # no exc on failure |
|
686 | 686 | del opts['_safe'] |
|
687 | 687 | safe_call = True |
|
688 | 688 | |
|
689 | 689 | if '_copts' in opts: |
|
690 | 690 | _copts.extend(opts['_copts'] or []) |
|
691 | 691 | del opts['_copts'] |
|
692 | 692 | |
|
693 | 693 | gitenv = os.environ.copy() |
|
694 | 694 | gitenv.update(opts.pop('extra_env', {})) |
|
695 | 695 | # need to clean fix GIT_DIR ! |
|
696 | 696 | if 'GIT_DIR' in gitenv: |
|
697 | 697 | del gitenv['GIT_DIR'] |
|
698 | 698 | gitenv['GIT_CONFIG_NOGLOBAL'] = '1' |
|
699 | 699 | gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1' |
|
700 | 700 | |
|
701 | 701 | cmd = [settings.GIT_EXECUTABLE] + _copts + cmd |
|
702 | 702 | _opts = {'env': gitenv, 'shell': False} |
|
703 | 703 | |
|
704 | 704 | try: |
|
705 | 705 | _opts.update(opts) |
|
706 | 706 | p = subprocessio.SubprocessIOChunker(cmd, **_opts) |
|
707 | 707 | |
|
708 | 708 | return ''.join(p), ''.join(p.error) |
|
709 | 709 | except (EnvironmentError, OSError) as err: |
|
710 | 710 | cmd = ' '.join(cmd) # human friendly CMD |
|
711 | 711 | tb_err = ("Couldn't run git command (%s).\n" |
|
712 | 712 | "Original error was:%s\n" |
|
713 | 713 | "Call options:%s\n" |
|
714 | 714 | % (cmd, err, _opts)) |
|
715 | 715 | log.exception(tb_err) |
|
716 | 716 | if safe_call: |
|
717 | 717 | return '', err |
|
718 | 718 | else: |
|
719 | 719 | raise exceptions.VcsException()(tb_err) |
|
720 | 720 | |
|
721 | 721 | @reraise_safe_exceptions |
|
722 | 722 | def install_hooks(self, wire, force=False): |
|
723 | 723 | from vcsserver.hook_utils import install_git_hooks |
|
724 | 724 | repo = self._factory.repo(wire) |
|
725 | 725 | return install_git_hooks(repo.path, repo.bare, force_create=force) |
|
726 | 726 | |
|
727 | @reraise_safe_exceptions | |
|
728 | def get_hooks_info(self, wire): | |
|
729 | from vcsserver.hook_utils import ( | |
|
730 | get_git_pre_hook_version, get_git_post_hook_version) | |
|
731 | repo = self._factory.repo(wire) | |
|
732 | return { | |
|
733 | 'pre_version': get_git_pre_hook_version(repo.path, repo.bare), | |
|
734 | 'post_version': get_git_post_hook_version(repo.path, repo.bare), | |
|
735 | } | |
|
736 | ||
|
727 | 737 | |
|
728 | 738 | def str_to_dulwich(value): |
|
729 | 739 | """ |
|
730 | 740 | Dulwich 0.10.1a requires `unicode` objects to be passed in. |
|
731 | 741 | """ |
|
732 | 742 | return value.decode(settings.WIRE_ENCODING) |
@@ -1,795 +1,803 b'' | |||
|
1 | 1 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
2 | 2 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
3 | 3 | # |
|
4 | 4 | # This program is free software; you can redistribute it and/or modify |
|
5 | 5 | # it under the terms of the GNU General Public License as published by |
|
6 | 6 | # the Free Software Foundation; either version 3 of the License, or |
|
7 | 7 | # (at your option) any later version. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU General Public License |
|
15 | 15 | # along with this program; if not, write to the Free Software Foundation, |
|
16 | 16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
17 | 17 | |
|
18 | 18 | import io |
|
19 | 19 | import logging |
|
20 | 20 | import stat |
|
21 | 21 | import urllib |
|
22 | 22 | import urllib2 |
|
23 | 23 | |
|
24 | 24 | from hgext import largefiles, rebase |
|
25 | 25 | from hgext.strip import strip as hgext_strip |
|
26 | 26 | from mercurial import commands |
|
27 | 27 | from mercurial import unionrepo |
|
28 | 28 | from mercurial import verify |
|
29 | 29 | |
|
30 | import vcsserver | |
|
30 | 31 | from vcsserver import exceptions |
|
31 | 32 | from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original |
|
32 | 33 | from vcsserver.hgcompat import ( |
|
33 | 34 | archival, bin, clone, config as hgconfig, diffopts, hex, |
|
34 | 35 | hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler, |
|
35 | 36 | makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev, |
|
36 | 37 | patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError, |
|
37 | 38 | RepoLookupError, InterventionRequired, RequirementError) |
|
38 | 39 | |
|
39 | 40 | log = logging.getLogger(__name__) |
|
40 | 41 | |
|
41 | 42 | |
|
42 | 43 | def make_ui_from_config(repo_config): |
|
43 | 44 | baseui = ui.ui() |
|
44 | 45 | |
|
45 | 46 | # clean the baseui object |
|
46 | 47 | baseui._ocfg = hgconfig.config() |
|
47 | 48 | baseui._ucfg = hgconfig.config() |
|
48 | 49 | baseui._tcfg = hgconfig.config() |
|
49 | 50 | |
|
50 | 51 | for section, option, value in repo_config: |
|
51 | 52 | baseui.setconfig(section, option, value) |
|
52 | 53 | |
|
53 | 54 | # make our hgweb quiet so it doesn't print output |
|
54 | 55 | baseui.setconfig('ui', 'quiet', 'true') |
|
55 | 56 | |
|
56 | 57 | baseui.setconfig('ui', 'paginate', 'never') |
|
57 | 58 | # force mercurial to only use 1 thread, otherwise it may try to set a |
|
58 | 59 | # signal in a non-main thread, thus generating a ValueError. |
|
59 | 60 | baseui.setconfig('worker', 'numcpus', 1) |
|
60 | 61 | |
|
61 | 62 | # If there is no config for the largefiles extension, we explicitly disable |
|
62 | 63 | # it here. This overrides settings from repositories hgrc file. Recent |
|
63 | 64 | # mercurial versions enable largefiles in hgrc on clone from largefile |
|
64 | 65 | # repo. |
|
65 | 66 | if not baseui.hasconfig('extensions', 'largefiles'): |
|
66 | 67 | log.debug('Explicitly disable largefiles extension for repo.') |
|
67 | 68 | baseui.setconfig('extensions', 'largefiles', '!') |
|
68 | 69 | |
|
69 | 70 | return baseui |
|
70 | 71 | |
|
71 | 72 | |
|
72 | 73 | def reraise_safe_exceptions(func): |
|
73 | 74 | """Decorator for converting mercurial exceptions to something neutral.""" |
|
74 | 75 | def wrapper(*args, **kwargs): |
|
75 | 76 | try: |
|
76 | 77 | return func(*args, **kwargs) |
|
77 | 78 | except (Abort, InterventionRequired) as e: |
|
78 | 79 | raise_from_original(exceptions.AbortException(e)) |
|
79 | 80 | except RepoLookupError as e: |
|
80 | 81 | raise_from_original(exceptions.LookupException(e)) |
|
81 | 82 | except RequirementError as e: |
|
82 | 83 | raise_from_original(exceptions.RequirementException(e)) |
|
83 | 84 | except RepoError as e: |
|
84 | 85 | raise_from_original(exceptions.VcsException(e)) |
|
85 | 86 | except LookupError as e: |
|
86 | 87 | raise_from_original(exceptions.LookupException(e)) |
|
87 | 88 | except Exception as e: |
|
88 | 89 | if not hasattr(e, '_vcs_kind'): |
|
89 | 90 | log.exception("Unhandled exception in hg remote call") |
|
90 | 91 | raise_from_original(exceptions.UnhandledException(e)) |
|
91 | 92 | |
|
92 | 93 | raise |
|
93 | 94 | return wrapper |
|
94 | 95 | |
|
95 | 96 | |
|
96 | 97 | class MercurialFactory(RepoFactory): |
|
97 | 98 | repo_type = 'hg' |
|
98 | 99 | |
|
99 | 100 | def _create_config(self, config, hooks=True): |
|
100 | 101 | if not hooks: |
|
101 | 102 | hooks_to_clean = frozenset(( |
|
102 | 103 | 'changegroup.repo_size', 'preoutgoing.pre_pull', |
|
103 | 104 | 'outgoing.pull_logger', 'prechangegroup.pre_push')) |
|
104 | 105 | new_config = [] |
|
105 | 106 | for section, option, value in config: |
|
106 | 107 | if section == 'hooks' and option in hooks_to_clean: |
|
107 | 108 | continue |
|
108 | 109 | new_config.append((section, option, value)) |
|
109 | 110 | config = new_config |
|
110 | 111 | |
|
111 | 112 | baseui = make_ui_from_config(config) |
|
112 | 113 | return baseui |
|
113 | 114 | |
|
114 | 115 | def _create_repo(self, wire, create): |
|
115 | 116 | baseui = self._create_config(wire["config"]) |
|
116 | 117 | return localrepository(baseui, wire["path"], create) |
|
117 | 118 | |
|
118 | 119 | |
|
119 | 120 | class HgRemote(object): |
|
120 | 121 | |
|
121 | 122 | def __init__(self, factory): |
|
122 | 123 | self._factory = factory |
|
123 | 124 | |
|
124 | 125 | self._bulk_methods = { |
|
125 | 126 | "affected_files": self.ctx_files, |
|
126 | 127 | "author": self.ctx_user, |
|
127 | 128 | "branch": self.ctx_branch, |
|
128 | 129 | "children": self.ctx_children, |
|
129 | 130 | "date": self.ctx_date, |
|
130 | 131 | "message": self.ctx_description, |
|
131 | 132 | "parents": self.ctx_parents, |
|
132 | 133 | "status": self.ctx_status, |
|
133 | 134 | "obsolete": self.ctx_obsolete, |
|
134 | 135 | "phase": self.ctx_phase, |
|
135 | 136 | "hidden": self.ctx_hidden, |
|
136 | 137 | "_file_paths": self.ctx_list, |
|
137 | 138 | } |
|
138 | 139 | |
|
139 | 140 | @reraise_safe_exceptions |
|
140 | 141 | def discover_hg_version(self): |
|
141 | 142 | from mercurial import util |
|
142 | 143 | return util.version() |
|
143 | 144 | |
|
144 | 145 | @reraise_safe_exceptions |
|
145 | 146 | def archive_repo(self, archive_path, mtime, file_info, kind): |
|
146 | 147 | if kind == "tgz": |
|
147 | 148 | archiver = archival.tarit(archive_path, mtime, "gz") |
|
148 | 149 | elif kind == "tbz2": |
|
149 | 150 | archiver = archival.tarit(archive_path, mtime, "bz2") |
|
150 | 151 | elif kind == 'zip': |
|
151 | 152 | archiver = archival.zipit(archive_path, mtime) |
|
152 | 153 | else: |
|
153 | 154 | raise exceptions.ArchiveException()( |
|
154 | 155 | 'Remote does not support: "%s".' % kind) |
|
155 | 156 | |
|
156 | 157 | for f_path, f_mode, f_is_link, f_content in file_info: |
|
157 | 158 | archiver.addfile(f_path, f_mode, f_is_link, f_content) |
|
158 | 159 | archiver.done() |
|
159 | 160 | |
|
160 | 161 | @reraise_safe_exceptions |
|
161 | 162 | def bookmarks(self, wire): |
|
162 | 163 | repo = self._factory.repo(wire) |
|
163 | 164 | return dict(repo._bookmarks) |
|
164 | 165 | |
|
165 | 166 | @reraise_safe_exceptions |
|
166 | 167 | def branches(self, wire, normal, closed): |
|
167 | 168 | repo = self._factory.repo(wire) |
|
168 | 169 | iter_branches = repo.branchmap().iterbranches() |
|
169 | 170 | bt = {} |
|
170 | 171 | for branch_name, _heads, tip, is_closed in iter_branches: |
|
171 | 172 | if normal and not is_closed: |
|
172 | 173 | bt[branch_name] = tip |
|
173 | 174 | if closed and is_closed: |
|
174 | 175 | bt[branch_name] = tip |
|
175 | 176 | |
|
176 | 177 | return bt |
|
177 | 178 | |
|
178 | 179 | @reraise_safe_exceptions |
|
179 | 180 | def bulk_request(self, wire, rev, pre_load): |
|
180 | 181 | result = {} |
|
181 | 182 | for attr in pre_load: |
|
182 | 183 | try: |
|
183 | 184 | method = self._bulk_methods[attr] |
|
184 | 185 | result[attr] = method(wire, rev) |
|
185 | 186 | except KeyError as e: |
|
186 | 187 | raise exceptions.VcsException(e)( |
|
187 | 188 | 'Unknown bulk attribute: "%s"' % attr) |
|
188 | 189 | return result |
|
189 | 190 | |
|
190 | 191 | @reraise_safe_exceptions |
|
191 | 192 | def clone(self, wire, source, dest, update_after_clone=False, hooks=True): |
|
192 | 193 | baseui = self._factory._create_config(wire["config"], hooks=hooks) |
|
193 | 194 | clone(baseui, source, dest, noupdate=not update_after_clone) |
|
194 | 195 | |
|
195 | 196 | @reraise_safe_exceptions |
|
196 | 197 | def commitctx( |
|
197 | 198 | self, wire, message, parents, commit_time, commit_timezone, |
|
198 | 199 | user, files, extra, removed, updated): |
|
199 | 200 | |
|
200 | 201 | def _filectxfn(_repo, memctx, path): |
|
201 | 202 | """ |
|
202 | 203 | Marks given path as added/changed/removed in a given _repo. This is |
|
203 | 204 | for internal mercurial commit function. |
|
204 | 205 | """ |
|
205 | 206 | |
|
206 | 207 | # check if this path is removed |
|
207 | 208 | if path in removed: |
|
208 | 209 | # returning None is a way to mark node for removal |
|
209 | 210 | return None |
|
210 | 211 | |
|
211 | 212 | # check if this path is added |
|
212 | 213 | for node in updated: |
|
213 | 214 | if node['path'] == path: |
|
214 | 215 | return memfilectx( |
|
215 | 216 | _repo, |
|
216 | 217 | changectx=memctx, |
|
217 | 218 | path=node['path'], |
|
218 | 219 | data=node['content'], |
|
219 | 220 | islink=False, |
|
220 | 221 | isexec=bool(node['mode'] & stat.S_IXUSR), |
|
221 | 222 | copied=False) |
|
222 | 223 | |
|
223 | 224 | raise exceptions.AbortException()( |
|
224 | 225 | "Given path haven't been marked as added, " |
|
225 | 226 | "changed or removed (%s)" % path) |
|
226 | 227 | |
|
227 | 228 | repo = self._factory.repo(wire) |
|
228 | 229 | |
|
229 | 230 | commit_ctx = memctx( |
|
230 | 231 | repo=repo, |
|
231 | 232 | parents=parents, |
|
232 | 233 | text=message, |
|
233 | 234 | files=files, |
|
234 | 235 | filectxfn=_filectxfn, |
|
235 | 236 | user=user, |
|
236 | 237 | date=(commit_time, commit_timezone), |
|
237 | 238 | extra=extra) |
|
238 | 239 | |
|
239 | 240 | n = repo.commitctx(commit_ctx) |
|
240 | 241 | new_id = hex(n) |
|
241 | 242 | |
|
242 | 243 | return new_id |
|
243 | 244 | |
|
244 | 245 | @reraise_safe_exceptions |
|
245 | 246 | def ctx_branch(self, wire, revision): |
|
246 | 247 | repo = self._factory.repo(wire) |
|
247 | 248 | ctx = repo[revision] |
|
248 | 249 | return ctx.branch() |
|
249 | 250 | |
|
250 | 251 | @reraise_safe_exceptions |
|
251 | 252 | def ctx_children(self, wire, revision): |
|
252 | 253 | repo = self._factory.repo(wire) |
|
253 | 254 | ctx = repo[revision] |
|
254 | 255 | return [child.rev() for child in ctx.children()] |
|
255 | 256 | |
|
256 | 257 | @reraise_safe_exceptions |
|
257 | 258 | def ctx_date(self, wire, revision): |
|
258 | 259 | repo = self._factory.repo(wire) |
|
259 | 260 | ctx = repo[revision] |
|
260 | 261 | return ctx.date() |
|
261 | 262 | |
|
262 | 263 | @reraise_safe_exceptions |
|
263 | 264 | def ctx_description(self, wire, revision): |
|
264 | 265 | repo = self._factory.repo(wire) |
|
265 | 266 | ctx = repo[revision] |
|
266 | 267 | return ctx.description() |
|
267 | 268 | |
|
268 | 269 | @reraise_safe_exceptions |
|
269 | 270 | def ctx_diff( |
|
270 | 271 | self, wire, revision, git=True, ignore_whitespace=True, context=3): |
|
271 | 272 | repo = self._factory.repo(wire) |
|
272 | 273 | ctx = repo[revision] |
|
273 | 274 | result = ctx.diff( |
|
274 | 275 | git=git, ignore_whitespace=ignore_whitespace, context=context) |
|
275 | 276 | return list(result) |
|
276 | 277 | |
|
277 | 278 | @reraise_safe_exceptions |
|
278 | 279 | def ctx_files(self, wire, revision): |
|
279 | 280 | repo = self._factory.repo(wire) |
|
280 | 281 | ctx = repo[revision] |
|
281 | 282 | return ctx.files() |
|
282 | 283 | |
|
283 | 284 | @reraise_safe_exceptions |
|
284 | 285 | def ctx_list(self, path, revision): |
|
285 | 286 | repo = self._factory.repo(path) |
|
286 | 287 | ctx = repo[revision] |
|
287 | 288 | return list(ctx) |
|
288 | 289 | |
|
289 | 290 | @reraise_safe_exceptions |
|
290 | 291 | def ctx_parents(self, wire, revision): |
|
291 | 292 | repo = self._factory.repo(wire) |
|
292 | 293 | ctx = repo[revision] |
|
293 | 294 | return [parent.rev() for parent in ctx.parents()] |
|
294 | 295 | |
|
295 | 296 | @reraise_safe_exceptions |
|
296 | 297 | def ctx_phase(self, wire, revision): |
|
297 | 298 | repo = self._factory.repo(wire) |
|
298 | 299 | ctx = repo[revision] |
|
299 | 300 | # public=0, draft=1, secret=3 |
|
300 | 301 | return ctx.phase() |
|
301 | 302 | |
|
302 | 303 | @reraise_safe_exceptions |
|
303 | 304 | def ctx_obsolete(self, wire, revision): |
|
304 | 305 | repo = self._factory.repo(wire) |
|
305 | 306 | ctx = repo[revision] |
|
306 | 307 | return ctx.obsolete() |
|
307 | 308 | |
|
308 | 309 | @reraise_safe_exceptions |
|
309 | 310 | def ctx_hidden(self, wire, revision): |
|
310 | 311 | repo = self._factory.repo(wire) |
|
311 | 312 | ctx = repo[revision] |
|
312 | 313 | return ctx.hidden() |
|
313 | 314 | |
|
314 | 315 | @reraise_safe_exceptions |
|
315 | 316 | def ctx_substate(self, wire, revision): |
|
316 | 317 | repo = self._factory.repo(wire) |
|
317 | 318 | ctx = repo[revision] |
|
318 | 319 | return ctx.substate |
|
319 | 320 | |
|
320 | 321 | @reraise_safe_exceptions |
|
321 | 322 | def ctx_status(self, wire, revision): |
|
322 | 323 | repo = self._factory.repo(wire) |
|
323 | 324 | ctx = repo[revision] |
|
324 | 325 | status = repo[ctx.p1().node()].status(other=ctx.node()) |
|
325 | 326 | # object of status (odd, custom named tuple in mercurial) is not |
|
326 | 327 | # correctly serializable, we make it a list, as the underling |
|
327 | 328 | # API expects this to be a list |
|
328 | 329 | return list(status) |
|
329 | 330 | |
|
330 | 331 | @reraise_safe_exceptions |
|
331 | 332 | def ctx_user(self, wire, revision): |
|
332 | 333 | repo = self._factory.repo(wire) |
|
333 | 334 | ctx = repo[revision] |
|
334 | 335 | return ctx.user() |
|
335 | 336 | |
|
336 | 337 | @reraise_safe_exceptions |
|
337 | 338 | def check_url(self, url, config): |
|
338 | 339 | _proto = None |
|
339 | 340 | if '+' in url[:url.find('://')]: |
|
340 | 341 | _proto = url[0:url.find('+')] |
|
341 | 342 | url = url[url.find('+') + 1:] |
|
342 | 343 | handlers = [] |
|
343 | 344 | url_obj = url_parser(url) |
|
344 | 345 | test_uri, authinfo = url_obj.authinfo() |
|
345 | 346 | url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd |
|
346 | 347 | url_obj.query = obfuscate_qs(url_obj.query) |
|
347 | 348 | |
|
348 | 349 | cleaned_uri = str(url_obj) |
|
349 | 350 | log.info("Checking URL for remote cloning/import: %s", cleaned_uri) |
|
350 | 351 | |
|
351 | 352 | if authinfo: |
|
352 | 353 | # create a password manager |
|
353 | 354 | passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() |
|
354 | 355 | passmgr.add_password(*authinfo) |
|
355 | 356 | |
|
356 | 357 | handlers.extend((httpbasicauthhandler(passmgr), |
|
357 | 358 | httpdigestauthhandler(passmgr))) |
|
358 | 359 | |
|
359 | 360 | o = urllib2.build_opener(*handlers) |
|
360 | 361 | o.addheaders = [('Content-Type', 'application/mercurial-0.1'), |
|
361 | 362 | ('Accept', 'application/mercurial-0.1')] |
|
362 | 363 | |
|
363 | 364 | q = {"cmd": 'between'} |
|
364 | 365 | q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)}) |
|
365 | 366 | qs = '?%s' % urllib.urlencode(q) |
|
366 | 367 | cu = "%s%s" % (test_uri, qs) |
|
367 | 368 | req = urllib2.Request(cu, None, {}) |
|
368 | 369 | |
|
369 | 370 | try: |
|
370 | 371 | log.debug("Trying to open URL %s", cleaned_uri) |
|
371 | 372 | resp = o.open(req) |
|
372 | 373 | if resp.code != 200: |
|
373 | 374 | raise exceptions.URLError()('Return Code is not 200') |
|
374 | 375 | except Exception as e: |
|
375 | 376 | log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True) |
|
376 | 377 | # means it cannot be cloned |
|
377 | 378 | raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e)) |
|
378 | 379 | |
|
379 | 380 | # now check if it's a proper hg repo, but don't do it for svn |
|
380 | 381 | try: |
|
381 | 382 | if _proto == 'svn': |
|
382 | 383 | pass |
|
383 | 384 | else: |
|
384 | 385 | # check for pure hg repos |
|
385 | 386 | log.debug( |
|
386 | 387 | "Verifying if URL is a Mercurial repository: %s", |
|
387 | 388 | cleaned_uri) |
|
388 | 389 | ui = make_ui_from_config(config) |
|
389 | 390 | peer_checker = makepeer(ui, url) |
|
390 | 391 | peer_checker.lookup('tip') |
|
391 | 392 | except Exception as e: |
|
392 | 393 | log.warning("URL is not a valid Mercurial repository: %s", |
|
393 | 394 | cleaned_uri) |
|
394 | 395 | raise exceptions.URLError(e)( |
|
395 | 396 | "url [%s] does not look like an hg repo org_exc: %s" |
|
396 | 397 | % (cleaned_uri, e)) |
|
397 | 398 | |
|
398 | 399 | log.info("URL is a valid Mercurial repository: %s", cleaned_uri) |
|
399 | 400 | return True |
|
400 | 401 | |
|
401 | 402 | @reraise_safe_exceptions |
|
402 | 403 | def diff( |
|
403 | 404 | self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews, |
|
404 | 405 | context): |
|
405 | 406 | repo = self._factory.repo(wire) |
|
406 | 407 | |
|
407 | 408 | if file_filter: |
|
408 | 409 | match_filter = match(file_filter[0], '', [file_filter[1]]) |
|
409 | 410 | else: |
|
410 | 411 | match_filter = file_filter |
|
411 | 412 | opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context) |
|
412 | 413 | |
|
413 | 414 | try: |
|
414 | 415 | return "".join(patch.diff( |
|
415 | 416 | repo, node1=rev1, node2=rev2, match=match_filter, opts=opts)) |
|
416 | 417 | except RepoLookupError as e: |
|
417 | 418 | raise exceptions.LookupException(e)() |
|
418 | 419 | |
|
419 | 420 | @reraise_safe_exceptions |
|
420 | 421 | def node_history(self, wire, revision, path, limit): |
|
421 | 422 | repo = self._factory.repo(wire) |
|
422 | 423 | |
|
423 | 424 | ctx = repo[revision] |
|
424 | 425 | fctx = ctx.filectx(path) |
|
425 | 426 | |
|
426 | 427 | def history_iter(): |
|
427 | 428 | limit_rev = fctx.rev() |
|
428 | 429 | for obj in reversed(list(fctx.filelog())): |
|
429 | 430 | obj = fctx.filectx(obj) |
|
430 | 431 | if limit_rev >= obj.rev(): |
|
431 | 432 | yield obj |
|
432 | 433 | |
|
433 | 434 | history = [] |
|
434 | 435 | for cnt, obj in enumerate(history_iter()): |
|
435 | 436 | if limit and cnt >= limit: |
|
436 | 437 | break |
|
437 | 438 | history.append(hex(obj.node())) |
|
438 | 439 | |
|
439 | 440 | return [x for x in history] |
|
440 | 441 | |
|
441 | 442 | @reraise_safe_exceptions |
|
442 | 443 | def node_history_untill(self, wire, revision, path, limit): |
|
443 | 444 | repo = self._factory.repo(wire) |
|
444 | 445 | ctx = repo[revision] |
|
445 | 446 | fctx = ctx.filectx(path) |
|
446 | 447 | |
|
447 | 448 | file_log = list(fctx.filelog()) |
|
448 | 449 | if limit: |
|
449 | 450 | # Limit to the last n items |
|
450 | 451 | file_log = file_log[-limit:] |
|
451 | 452 | |
|
452 | 453 | return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)] |
|
453 | 454 | |
|
454 | 455 | @reraise_safe_exceptions |
|
455 | 456 | def fctx_annotate(self, wire, revision, path): |
|
456 | 457 | repo = self._factory.repo(wire) |
|
457 | 458 | ctx = repo[revision] |
|
458 | 459 | fctx = ctx.filectx(path) |
|
459 | 460 | |
|
460 | 461 | result = [] |
|
461 | 462 | for i, annotate_obj in enumerate(fctx.annotate(), 1): |
|
462 | 463 | ln_no = i |
|
463 | 464 | sha = hex(annotate_obj.fctx.node()) |
|
464 | 465 | content = annotate_obj.text |
|
465 | 466 | result.append((ln_no, sha, content)) |
|
466 | 467 | return result |
|
467 | 468 | |
|
468 | 469 | @reraise_safe_exceptions |
|
469 | 470 | def fctx_data(self, wire, revision, path): |
|
470 | 471 | repo = self._factory.repo(wire) |
|
471 | 472 | ctx = repo[revision] |
|
472 | 473 | fctx = ctx.filectx(path) |
|
473 | 474 | return fctx.data() |
|
474 | 475 | |
|
475 | 476 | @reraise_safe_exceptions |
|
476 | 477 | def fctx_flags(self, wire, revision, path): |
|
477 | 478 | repo = self._factory.repo(wire) |
|
478 | 479 | ctx = repo[revision] |
|
479 | 480 | fctx = ctx.filectx(path) |
|
480 | 481 | return fctx.flags() |
|
481 | 482 | |
|
482 | 483 | @reraise_safe_exceptions |
|
483 | 484 | def fctx_size(self, wire, revision, path): |
|
484 | 485 | repo = self._factory.repo(wire) |
|
485 | 486 | ctx = repo[revision] |
|
486 | 487 | fctx = ctx.filectx(path) |
|
487 | 488 | return fctx.size() |
|
488 | 489 | |
|
489 | 490 | @reraise_safe_exceptions |
|
490 | 491 | def get_all_commit_ids(self, wire, name): |
|
491 | 492 | repo = self._factory.repo(wire) |
|
492 | 493 | revs = repo.filtered(name).changelog.index |
|
493 | 494 | return map(lambda x: hex(x[7]), revs)[:-1] |
|
494 | 495 | |
|
495 | 496 | @reraise_safe_exceptions |
|
496 | 497 | def get_config_value(self, wire, section, name, untrusted=False): |
|
497 | 498 | repo = self._factory.repo(wire) |
|
498 | 499 | return repo.ui.config(section, name, untrusted=untrusted) |
|
499 | 500 | |
|
500 | 501 | @reraise_safe_exceptions |
|
501 | 502 | def get_config_bool(self, wire, section, name, untrusted=False): |
|
502 | 503 | repo = self._factory.repo(wire) |
|
503 | 504 | return repo.ui.configbool(section, name, untrusted=untrusted) |
|
504 | 505 | |
|
505 | 506 | @reraise_safe_exceptions |
|
506 | 507 | def get_config_list(self, wire, section, name, untrusted=False): |
|
507 | 508 | repo = self._factory.repo(wire) |
|
508 | 509 | return repo.ui.configlist(section, name, untrusted=untrusted) |
|
509 | 510 | |
|
510 | 511 | @reraise_safe_exceptions |
|
511 | 512 | def is_large_file(self, wire, path): |
|
512 | 513 | return largefiles.lfutil.isstandin(path) |
|
513 | 514 | |
|
514 | 515 | @reraise_safe_exceptions |
|
515 | 516 | def in_largefiles_store(self, wire, sha): |
|
516 | 517 | repo = self._factory.repo(wire) |
|
517 | 518 | return largefiles.lfutil.instore(repo, sha) |
|
518 | 519 | |
|
519 | 520 | @reraise_safe_exceptions |
|
520 | 521 | def in_user_cache(self, wire, sha): |
|
521 | 522 | repo = self._factory.repo(wire) |
|
522 | 523 | return largefiles.lfutil.inusercache(repo.ui, sha) |
|
523 | 524 | |
|
524 | 525 | @reraise_safe_exceptions |
|
525 | 526 | def store_path(self, wire, sha): |
|
526 | 527 | repo = self._factory.repo(wire) |
|
527 | 528 | return largefiles.lfutil.storepath(repo, sha) |
|
528 | 529 | |
|
529 | 530 | @reraise_safe_exceptions |
|
530 | 531 | def link(self, wire, sha, path): |
|
531 | 532 | repo = self._factory.repo(wire) |
|
532 | 533 | largefiles.lfutil.link( |
|
533 | 534 | largefiles.lfutil.usercachepath(repo.ui, sha), path) |
|
534 | 535 | |
|
535 | 536 | @reraise_safe_exceptions |
|
536 | 537 | def localrepository(self, wire, create=False): |
|
537 | 538 | self._factory.repo(wire, create=create) |
|
538 | 539 | |
|
539 | 540 | @reraise_safe_exceptions |
|
540 | 541 | def lookup(self, wire, revision, both): |
|
541 | 542 | |
|
542 | 543 | repo = self._factory.repo(wire) |
|
543 | 544 | |
|
544 | 545 | if isinstance(revision, int): |
|
545 | 546 | # NOTE(marcink): |
|
546 | 547 | # since Mercurial doesn't support indexes properly |
|
547 | 548 | # we need to shift accordingly by one to get proper index, e.g |
|
548 | 549 | # repo[-1] => repo[-2] |
|
549 | 550 | # repo[0] => repo[-1] |
|
550 | 551 | # repo[1] => repo[2] we also never call repo[0] because |
|
551 | 552 | # it's actually second commit |
|
552 | 553 | if revision <= 0: |
|
553 | 554 | revision = revision + -1 |
|
554 | 555 | else: |
|
555 | 556 | revision = revision + 1 |
|
556 | 557 | |
|
557 | 558 | try: |
|
558 | 559 | ctx = repo[revision] |
|
559 | 560 | except RepoLookupError as e: |
|
560 | 561 | raise exceptions.LookupException(e)(revision) |
|
561 | 562 | except LookupError as e: |
|
562 | 563 | raise exceptions.LookupException(e)(e.name) |
|
563 | 564 | |
|
564 | 565 | if not both: |
|
565 | 566 | return ctx.hex() |
|
566 | 567 | |
|
567 | 568 | ctx = repo[ctx.hex()] |
|
568 | 569 | return ctx.hex(), ctx.rev() |
|
569 | 570 | |
|
570 | 571 | @reraise_safe_exceptions |
|
571 | 572 | def pull(self, wire, url, commit_ids=None): |
|
572 | 573 | repo = self._factory.repo(wire) |
|
573 | 574 | # Disable any prompts for this repo |
|
574 | 575 | repo.ui.setconfig('ui', 'interactive', 'off', '-y') |
|
575 | 576 | |
|
576 | 577 | remote = peer(repo, {}, url) |
|
577 | 578 | # Disable any prompts for this remote |
|
578 | 579 | remote.ui.setconfig('ui', 'interactive', 'off', '-y') |
|
579 | 580 | |
|
580 | 581 | if commit_ids: |
|
581 | 582 | commit_ids = [bin(commit_id) for commit_id in commit_ids] |
|
582 | 583 | |
|
583 | 584 | return exchange.pull( |
|
584 | 585 | repo, remote, heads=commit_ids, force=None).cgresult |
|
585 | 586 | |
|
586 | 587 | @reraise_safe_exceptions |
|
587 | 588 | def sync_push(self, wire, url): |
|
588 | 589 | if not self.check_url(url, wire['config']): |
|
589 | 590 | return |
|
590 | 591 | |
|
591 | 592 | repo = self._factory.repo(wire) |
|
592 | 593 | |
|
593 | 594 | # Disable any prompts for this repo |
|
594 | 595 | repo.ui.setconfig('ui', 'interactive', 'off', '-y') |
|
595 | 596 | |
|
596 | 597 | bookmarks = dict(repo._bookmarks).keys() |
|
597 | 598 | remote = peer(repo, {}, url) |
|
598 | 599 | # Disable any prompts for this remote |
|
599 | 600 | remote.ui.setconfig('ui', 'interactive', 'off', '-y') |
|
600 | 601 | |
|
601 | 602 | return exchange.push( |
|
602 | 603 | repo, remote, newbranch=True, bookmarks=bookmarks).cgresult |
|
603 | 604 | |
|
604 | 605 | @reraise_safe_exceptions |
|
605 | 606 | def revision(self, wire, rev): |
|
606 | 607 | repo = self._factory.repo(wire) |
|
607 | 608 | ctx = repo[rev] |
|
608 | 609 | return ctx.rev() |
|
609 | 610 | |
|
610 | 611 | @reraise_safe_exceptions |
|
611 | 612 | def rev_range(self, wire, filter): |
|
612 | 613 | repo = self._factory.repo(wire) |
|
613 | 614 | revisions = [rev for rev in revrange(repo, filter)] |
|
614 | 615 | return revisions |
|
615 | 616 | |
|
616 | 617 | @reraise_safe_exceptions |
|
617 | 618 | def rev_range_hash(self, wire, node): |
|
618 | 619 | repo = self._factory.repo(wire) |
|
619 | 620 | |
|
620 | 621 | def get_revs(repo, rev_opt): |
|
621 | 622 | if rev_opt: |
|
622 | 623 | revs = revrange(repo, rev_opt) |
|
623 | 624 | if len(revs) == 0: |
|
624 | 625 | return (nullrev, nullrev) |
|
625 | 626 | return max(revs), min(revs) |
|
626 | 627 | else: |
|
627 | 628 | return len(repo) - 1, 0 |
|
628 | 629 | |
|
629 | 630 | stop, start = get_revs(repo, [node + ':']) |
|
630 | 631 | revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)] |
|
631 | 632 | return revs |
|
632 | 633 | |
|
633 | 634 | @reraise_safe_exceptions |
|
634 | 635 | def revs_from_revspec(self, wire, rev_spec, *args, **kwargs): |
|
635 | 636 | other_path = kwargs.pop('other_path', None) |
|
636 | 637 | |
|
637 | 638 | # case when we want to compare two independent repositories |
|
638 | 639 | if other_path and other_path != wire["path"]: |
|
639 | 640 | baseui = self._factory._create_config(wire["config"]) |
|
640 | 641 | repo = unionrepo.unionrepository(baseui, other_path, wire["path"]) |
|
641 | 642 | else: |
|
642 | 643 | repo = self._factory.repo(wire) |
|
643 | 644 | return list(repo.revs(rev_spec, *args)) |
|
644 | 645 | |
|
645 | 646 | @reraise_safe_exceptions |
|
646 | 647 | def strip(self, wire, revision, update, backup): |
|
647 | 648 | repo = self._factory.repo(wire) |
|
648 | 649 | ctx = repo[revision] |
|
649 | 650 | hgext_strip( |
|
650 | 651 | repo.baseui, repo, ctx.node(), update=update, backup=backup) |
|
651 | 652 | |
|
652 | 653 | @reraise_safe_exceptions |
|
653 | 654 | def verify(self, wire,): |
|
654 | 655 | repo = self._factory.repo(wire) |
|
655 | 656 | baseui = self._factory._create_config(wire['config']) |
|
656 | 657 | baseui.setconfig('ui', 'quiet', 'false') |
|
657 | 658 | output = io.BytesIO() |
|
658 | 659 | |
|
659 | 660 | def write(data, **unused_kwargs): |
|
660 | 661 | output.write(data) |
|
661 | 662 | baseui.write = write |
|
662 | 663 | |
|
663 | 664 | repo.ui = baseui |
|
664 | 665 | verify.verify(repo) |
|
665 | 666 | return output.getvalue() |
|
666 | 667 | |
|
667 | 668 | @reraise_safe_exceptions |
|
668 | 669 | def tag(self, wire, name, revision, message, local, user, |
|
669 | 670 | tag_time, tag_timezone): |
|
670 | 671 | repo = self._factory.repo(wire) |
|
671 | 672 | ctx = repo[revision] |
|
672 | 673 | node = ctx.node() |
|
673 | 674 | |
|
674 | 675 | date = (tag_time, tag_timezone) |
|
675 | 676 | try: |
|
676 | 677 | hg_tag.tag(repo, name, node, message, local, user, date) |
|
677 | 678 | except Abort as e: |
|
678 | 679 | log.exception("Tag operation aborted") |
|
679 | 680 | # Exception can contain unicode which we convert |
|
680 | 681 | raise exceptions.AbortException(e)(repr(e)) |
|
681 | 682 | |
|
682 | 683 | @reraise_safe_exceptions |
|
683 | 684 | def tags(self, wire): |
|
684 | 685 | repo = self._factory.repo(wire) |
|
685 | 686 | return repo.tags() |
|
686 | 687 | |
|
687 | 688 | @reraise_safe_exceptions |
|
688 | 689 | def update(self, wire, node=None, clean=False): |
|
689 | 690 | repo = self._factory.repo(wire) |
|
690 | 691 | baseui = self._factory._create_config(wire['config']) |
|
691 | 692 | commands.update(baseui, repo, node=node, clean=clean) |
|
692 | 693 | |
|
693 | 694 | @reraise_safe_exceptions |
|
694 | 695 | def identify(self, wire): |
|
695 | 696 | repo = self._factory.repo(wire) |
|
696 | 697 | baseui = self._factory._create_config(wire['config']) |
|
697 | 698 | output = io.BytesIO() |
|
698 | 699 | baseui.write = output.write |
|
699 | 700 | # This is required to get a full node id |
|
700 | 701 | baseui.debugflag = True |
|
701 | 702 | commands.identify(baseui, repo, id=True) |
|
702 | 703 | |
|
703 | 704 | return output.getvalue() |
|
704 | 705 | |
|
705 | 706 | @reraise_safe_exceptions |
|
706 | 707 | def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, |
|
707 | 708 | hooks=True): |
|
708 | 709 | repo = self._factory.repo(wire) |
|
709 | 710 | baseui = self._factory._create_config(wire['config'], hooks=hooks) |
|
710 | 711 | |
|
711 | 712 | # Mercurial internally has a lot of logic that checks ONLY if |
|
712 | 713 | # option is defined, we just pass those if they are defined then |
|
713 | 714 | opts = {} |
|
714 | 715 | if bookmark: |
|
715 | 716 | opts['bookmark'] = bookmark |
|
716 | 717 | if branch: |
|
717 | 718 | opts['branch'] = branch |
|
718 | 719 | if revision: |
|
719 | 720 | opts['rev'] = revision |
|
720 | 721 | |
|
721 | 722 | commands.pull(baseui, repo, source, **opts) |
|
722 | 723 | |
|
723 | 724 | @reraise_safe_exceptions |
|
724 | 725 | def heads(self, wire, branch=None): |
|
725 | 726 | repo = self._factory.repo(wire) |
|
726 | 727 | baseui = self._factory._create_config(wire['config']) |
|
727 | 728 | output = io.BytesIO() |
|
728 | 729 | |
|
729 | 730 | def write(data, **unused_kwargs): |
|
730 | 731 | output.write(data) |
|
731 | 732 | |
|
732 | 733 | baseui.write = write |
|
733 | 734 | if branch: |
|
734 | 735 | args = [branch] |
|
735 | 736 | else: |
|
736 | 737 | args = [] |
|
737 | 738 | commands.heads(baseui, repo, template='{node} ', *args) |
|
738 | 739 | |
|
739 | 740 | return output.getvalue() |
|
740 | 741 | |
|
741 | 742 | @reraise_safe_exceptions |
|
742 | 743 | def ancestor(self, wire, revision1, revision2): |
|
743 | 744 | repo = self._factory.repo(wire) |
|
744 | 745 | changelog = repo.changelog |
|
745 | 746 | lookup = repo.lookup |
|
746 | 747 | a = changelog.ancestor(lookup(revision1), lookup(revision2)) |
|
747 | 748 | return hex(a) |
|
748 | 749 | |
|
749 | 750 | @reraise_safe_exceptions |
|
750 | 751 | def push(self, wire, revisions, dest_path, hooks=True, |
|
751 | 752 | push_branches=False): |
|
752 | 753 | repo = self._factory.repo(wire) |
|
753 | 754 | baseui = self._factory._create_config(wire['config'], hooks=hooks) |
|
754 | 755 | commands.push(baseui, repo, dest=dest_path, rev=revisions, |
|
755 | 756 | new_branch=push_branches) |
|
756 | 757 | |
|
757 | 758 | @reraise_safe_exceptions |
|
758 | 759 | def merge(self, wire, revision): |
|
759 | 760 | repo = self._factory.repo(wire) |
|
760 | 761 | baseui = self._factory._create_config(wire['config']) |
|
761 | 762 | repo.ui.setconfig('ui', 'merge', 'internal:dump') |
|
762 | 763 | |
|
763 | 764 | # In case of sub repositories are used mercurial prompts the user in |
|
764 | 765 | # case of merge conflicts or different sub repository sources. By |
|
765 | 766 | # setting the interactive flag to `False` mercurial doesn't prompt the |
|
766 | 767 | # used but instead uses a default value. |
|
767 | 768 | repo.ui.setconfig('ui', 'interactive', False) |
|
768 | 769 | |
|
769 | 770 | commands.merge(baseui, repo, rev=revision) |
|
770 | 771 | |
|
771 | 772 | @reraise_safe_exceptions |
|
772 | 773 | def commit(self, wire, message, username, close_branch=False): |
|
773 | 774 | repo = self._factory.repo(wire) |
|
774 | 775 | baseui = self._factory._create_config(wire['config']) |
|
775 | 776 | repo.ui.setconfig('ui', 'username', username) |
|
776 | 777 | commands.commit(baseui, repo, message=message, close_branch=close_branch) |
|
777 | 778 | |
|
778 | 779 | @reraise_safe_exceptions |
|
779 | 780 | def rebase(self, wire, source=None, dest=None, abort=False): |
|
780 | 781 | repo = self._factory.repo(wire) |
|
781 | 782 | baseui = self._factory._create_config(wire['config']) |
|
782 | 783 | repo.ui.setconfig('ui', 'merge', 'internal:dump') |
|
783 | 784 | rebase.rebase( |
|
784 | 785 | baseui, repo, base=source, dest=dest, abort=abort, keep=not abort) |
|
785 | 786 | |
|
786 | 787 | @reraise_safe_exceptions |
|
787 | 788 | def bookmark(self, wire, bookmark, revision=None): |
|
788 | 789 | repo = self._factory.repo(wire) |
|
789 | 790 | baseui = self._factory._create_config(wire['config']) |
|
790 | 791 | commands.bookmark(baseui, repo, bookmark, rev=revision, force=True) |
|
791 | 792 | |
|
792 | 793 | @reraise_safe_exceptions |
|
793 | 794 | def install_hooks(self, wire, force=False): |
|
794 | 795 | # we don't need any special hooks for Mercurial |
|
795 | 796 | pass |
|
797 | ||
|
798 | @reraise_safe_exceptions | |
|
799 | def get_hooks_info(self, wire): | |
|
800 | return { | |
|
801 | 'pre_version': vcsserver.__version__, | |
|
802 | 'post_version': vcsserver.__version__, | |
|
803 | } |
@@ -1,154 +1,203 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | |
|
3 | 3 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
4 | 4 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
5 | 5 | # |
|
6 | 6 | # This program is free software; you can redistribute it and/or modify |
|
7 | 7 | # it under the terms of the GNU General Public License as published by |
|
8 | 8 | # the Free Software Foundation; either version 3 of the License, or |
|
9 | 9 | # (at your option) any later version. |
|
10 | 10 | # |
|
11 | 11 | # This program is distributed in the hope that it will be useful, |
|
12 | 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
13 | 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
14 | 14 | # GNU General Public License for more details. |
|
15 | 15 | # |
|
16 | 16 | # You should have received a copy of the GNU General Public License |
|
17 | 17 | # along with this program; if not, write to the Free Software Foundation, |
|
18 | 18 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
19 | 19 | |
|
20 | 20 | import re |
|
21 | 21 | import os |
|
22 | 22 | import sys |
|
23 | 23 | import datetime |
|
24 | 24 | import logging |
|
25 | 25 | import pkg_resources |
|
26 | 26 | |
|
27 | 27 | import vcsserver |
|
28 | 28 | |
|
29 | 29 | log = logging.getLogger(__name__) |
|
30 | 30 | |
|
31 | 31 | |
|
32 | def get_git_hooks_path(repo_path, bare): | |
|
33 | hooks_path = os.path.join(repo_path, 'hooks') | |
|
34 | if not bare: | |
|
35 | hooks_path = os.path.join(repo_path, '.git', 'hooks') | |
|
36 | ||
|
37 | return hooks_path | |
|
38 | ||
|
39 | ||
|
32 | 40 | def install_git_hooks(repo_path, bare, executable=None, force_create=False): |
|
33 | 41 | """ |
|
34 | 42 | Creates a RhodeCode hook inside a git repository |
|
35 | 43 | |
|
36 | 44 | :param repo_path: path to repository |
|
37 | 45 | :param executable: binary executable to put in the hooks |
|
38 | 46 | :param force_create: Create even if same name hook exists |
|
39 | 47 | """ |
|
40 | 48 | executable = executable or sys.executable |
|
41 |
hooks_path = |
|
|
42 | if not bare: | |
|
43 | hooks_path = os.path.join(repo_path, '.git', 'hooks') | |
|
49 | hooks_path = get_git_hooks_path(repo_path, bare) | |
|
50 | ||
|
44 | 51 | if not os.path.isdir(hooks_path): |
|
45 | 52 | os.makedirs(hooks_path, mode=0o777) |
|
46 | 53 | |
|
47 | 54 | tmpl_post = pkg_resources.resource_string( |
|
48 | 55 | 'vcsserver', '/'.join( |
|
49 | 56 | ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl'))) |
|
50 | 57 | tmpl_pre = pkg_resources.resource_string( |
|
51 | 58 | 'vcsserver', '/'.join( |
|
52 | 59 | ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl'))) |
|
53 | 60 | |
|
54 | 61 | path = '' # not used for now |
|
55 | 62 | timestamp = datetime.datetime.utcnow().isoformat() |
|
56 | 63 | |
|
57 | 64 | for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]: |
|
58 | 65 | log.debug('Installing git hook in repo %s', repo_path) |
|
59 | 66 | _hook_file = os.path.join(hooks_path, '%s-receive' % h_type) |
|
60 | 67 | _rhodecode_hook = check_rhodecode_hook(_hook_file) |
|
61 | 68 | |
|
62 | 69 | if _rhodecode_hook or force_create: |
|
63 | 70 | log.debug('writing git %s hook file at %s !', h_type, _hook_file) |
|
64 | 71 | try: |
|
65 | 72 | with open(_hook_file, 'wb') as f: |
|
66 | 73 | template = template.replace( |
|
67 | 74 | '_TMPL_', vcsserver.__version__) |
|
68 | 75 | template = template.replace('_DATE_', timestamp) |
|
69 | 76 | template = template.replace('_ENV_', executable) |
|
70 | 77 | template = template.replace('_PATH_', path) |
|
71 | 78 | f.write(template) |
|
72 | 79 | os.chmod(_hook_file, 0o755) |
|
73 | 80 | except IOError: |
|
74 | 81 | log.exception('error writing hook file %s', _hook_file) |
|
75 | 82 | else: |
|
76 | 83 | log.debug('skipping writing hook file') |
|
77 | 84 | |
|
78 | 85 | return True |
|
79 | 86 | |
|
80 | 87 | |
|
88 | def get_svn_hooks_path(repo_path): | |
|
89 | hooks_path = os.path.join(repo_path, 'hooks') | |
|
90 | ||
|
91 | return hooks_path | |
|
92 | ||
|
93 | ||
|
81 | 94 | def install_svn_hooks(repo_path, executable=None, force_create=False): |
|
82 | 95 | """ |
|
83 | 96 | Creates RhodeCode hooks inside a svn repository |
|
84 | 97 | |
|
85 | 98 | :param repo_path: path to repository |
|
86 | 99 | :param executable: binary executable to put in the hooks |
|
87 | 100 | :param force_create: Create even if same name hook exists |
|
88 | 101 | """ |
|
89 | 102 | executable = executable or sys.executable |
|
90 |
hooks_path = |
|
|
103 | hooks_path = get_svn_hooks_path(repo_path) | |
|
91 | 104 | if not os.path.isdir(hooks_path): |
|
92 | 105 | os.makedirs(hooks_path, mode=0o777) |
|
93 | 106 | |
|
94 | 107 | tmpl_post = pkg_resources.resource_string( |
|
95 | 108 | 'vcsserver', '/'.join( |
|
96 | 109 | ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl'))) |
|
97 | 110 | tmpl_pre = pkg_resources.resource_string( |
|
98 | 111 | 'vcsserver', '/'.join( |
|
99 | 112 | ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl'))) |
|
100 | 113 | |
|
101 | 114 | path = '' # not used for now |
|
102 | 115 | timestamp = datetime.datetime.utcnow().isoformat() |
|
103 | 116 | |
|
104 | 117 | for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]: |
|
105 | 118 | log.debug('Installing svn hook in repo %s', repo_path) |
|
106 | 119 | _hook_file = os.path.join(hooks_path, '%s-commit' % h_type) |
|
107 | 120 | _rhodecode_hook = check_rhodecode_hook(_hook_file) |
|
108 | 121 | |
|
109 | 122 | if _rhodecode_hook or force_create: |
|
110 | 123 | log.debug('writing svn %s hook file at %s !', h_type, _hook_file) |
|
111 | 124 | |
|
112 | 125 | try: |
|
113 | 126 | with open(_hook_file, 'wb') as f: |
|
114 | 127 | template = template.replace( |
|
115 | 128 | '_TMPL_', vcsserver.__version__) |
|
116 | 129 | template = template.replace('_DATE_', timestamp) |
|
117 | 130 | template = template.replace('_ENV_', executable) |
|
118 | 131 | template = template.replace('_PATH_', path) |
|
119 | 132 | |
|
120 | 133 | f.write(template) |
|
121 | 134 | os.chmod(_hook_file, 0o755) |
|
122 | 135 | except IOError: |
|
123 | 136 | log.exception('error writing hook file %s', _hook_file) |
|
124 | 137 | else: |
|
125 | 138 | log.debug('skipping writing hook file') |
|
126 | 139 | |
|
127 | 140 | return True |
|
128 | 141 | |
|
129 | 142 | |
|
143 | def get_version_from_hook(hook_path): | |
|
144 | version = '' | |
|
145 | hook_content = read_hook_content(hook_path) | |
|
146 | matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content) | |
|
147 | if matches: | |
|
148 | try: | |
|
149 | version = matches.groups()[0] | |
|
150 | log.debug('got version %s from hooks.', version) | |
|
151 | except Exception: | |
|
152 | log.exception("Exception while reading the hook version.") | |
|
153 | return version.replace("'", "") | |
|
154 | ||
|
155 | ||
|
130 | 156 | def check_rhodecode_hook(hook_path): |
|
131 | 157 | """ |
|
132 | 158 | Check if the hook was created by RhodeCode |
|
133 | 159 | """ |
|
134 | 160 | if not os.path.exists(hook_path): |
|
135 | 161 | return True |
|
136 | 162 | |
|
137 |
log.debug('hook exists, checking if it is from |
|
|
138 | hook_content = read_hook_content(hook_path) | |
|
139 | matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content) | |
|
140 |
if |
|
|
141 |
|
|
|
142 | version = matches.groups()[0] | |
|
143 | log.debug('got version %s from hooks.', version) | |
|
144 | return True | |
|
145 | except Exception: | |
|
146 | log.exception("Exception while reading the hook version.") | |
|
163 | log.debug('hook exists, checking if it is from RhodeCode') | |
|
164 | ||
|
165 | version = get_version_from_hook(hook_path) | |
|
166 | if version: | |
|
167 | return True | |
|
147 | 168 | |
|
148 | 169 | return False |
|
149 | 170 | |
|
150 | 171 | |
|
151 | 172 | def read_hook_content(hook_path): |
|
152 | 173 | with open(hook_path, 'rb') as f: |
|
153 | 174 | content = f.read() |
|
154 | 175 | return content |
|
176 | ||
|
177 | ||
|
178 | def get_git_pre_hook_version(repo_path, bare): | |
|
179 | hooks_path = get_git_hooks_path(repo_path, bare) | |
|
180 | _hook_file = os.path.join(hooks_path, 'pre-receive') | |
|
181 | version = get_version_from_hook(_hook_file) | |
|
182 | return version | |
|
183 | ||
|
184 | ||
|
185 | def get_git_post_hook_version(repo_path, bare): | |
|
186 | hooks_path = get_git_hooks_path(repo_path, bare) | |
|
187 | _hook_file = os.path.join(hooks_path, 'post-receive') | |
|
188 | version = get_version_from_hook(_hook_file) | |
|
189 | return version | |
|
190 | ||
|
191 | ||
|
192 | def get_svn_pre_hook_version(repo_path): | |
|
193 | hooks_path = get_svn_hooks_path(repo_path) | |
|
194 | _hook_file = os.path.join(hooks_path, 'pre-commit') | |
|
195 | version = get_version_from_hook(_hook_file) | |
|
196 | return version | |
|
197 | ||
|
198 | ||
|
199 | def get_svn_post_hook_version(repo_path): | |
|
200 | hooks_path = get_svn_hooks_path(repo_path) | |
|
201 | _hook_file = os.path.join(hooks_path, 'post-commit') | |
|
202 | version = get_version_from_hook(_hook_file) | |
|
203 | return version |
@@ -1,722 +1,732 b'' | |||
|
1 | 1 | # RhodeCode VCSServer provides access to different vcs backends via network. |
|
2 | 2 | # Copyright (C) 2014-2019 RhodeCode GmbH |
|
3 | 3 | # |
|
4 | 4 | # This program is free software; you can redistribute it and/or modify |
|
5 | 5 | # it under the terms of the GNU General Public License as published by |
|
6 | 6 | # the Free Software Foundation; either version 3 of the License, or |
|
7 | 7 | # (at your option) any later version. |
|
8 | 8 | # |
|
9 | 9 | # This program is distributed in the hope that it will be useful, |
|
10 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
12 | 12 | # GNU General Public License for more details. |
|
13 | 13 | # |
|
14 | 14 | # You should have received a copy of the GNU General Public License |
|
15 | 15 | # along with this program; if not, write to the Free Software Foundation, |
|
16 | 16 | # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
|
17 | 17 | |
|
18 | 18 | from __future__ import absolute_import |
|
19 | 19 | |
|
20 | 20 | import os |
|
21 | 21 | import subprocess |
|
22 | 22 | from urllib2 import URLError |
|
23 | 23 | import urlparse |
|
24 | 24 | import logging |
|
25 | 25 | import posixpath as vcspath |
|
26 | 26 | import StringIO |
|
27 | 27 | import urllib |
|
28 | 28 | import traceback |
|
29 | 29 | |
|
30 | 30 | import svn.client |
|
31 | 31 | import svn.core |
|
32 | 32 | import svn.delta |
|
33 | 33 | import svn.diff |
|
34 | 34 | import svn.fs |
|
35 | 35 | import svn.repos |
|
36 | 36 | |
|
37 | 37 | from vcsserver import svn_diff, exceptions, subprocessio, settings |
|
38 | 38 | from vcsserver.base import RepoFactory, raise_from_original |
|
39 | 39 | |
|
40 | 40 | log = logging.getLogger(__name__) |
|
41 | 41 | |
|
42 | 42 | |
|
43 | 43 | # Set of svn compatible version flags. |
|
44 | 44 | # Compare with subversion/svnadmin/svnadmin.c |
|
45 | 45 | svn_compatible_versions = { |
|
46 | 46 | 'pre-1.4-compatible', |
|
47 | 47 | 'pre-1.5-compatible', |
|
48 | 48 | 'pre-1.6-compatible', |
|
49 | 49 | 'pre-1.8-compatible', |
|
50 | 50 | 'pre-1.9-compatible' |
|
51 | 51 | } |
|
52 | 52 | |
|
53 | 53 | svn_compatible_versions_map = { |
|
54 | 54 | 'pre-1.4-compatible': '1.3', |
|
55 | 55 | 'pre-1.5-compatible': '1.4', |
|
56 | 56 | 'pre-1.6-compatible': '1.5', |
|
57 | 57 | 'pre-1.8-compatible': '1.7', |
|
58 | 58 | 'pre-1.9-compatible': '1.8', |
|
59 | 59 | } |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | def reraise_safe_exceptions(func): |
|
63 | 63 | """Decorator for converting svn exceptions to something neutral.""" |
|
64 | 64 | def wrapper(*args, **kwargs): |
|
65 | 65 | try: |
|
66 | 66 | return func(*args, **kwargs) |
|
67 | 67 | except Exception as e: |
|
68 | 68 | if not hasattr(e, '_vcs_kind'): |
|
69 | 69 | log.exception("Unhandled exception in svn remote call") |
|
70 | 70 | raise_from_original(exceptions.UnhandledException(e)) |
|
71 | 71 | raise |
|
72 | 72 | return wrapper |
|
73 | 73 | |
|
74 | 74 | |
|
75 | 75 | class SubversionFactory(RepoFactory): |
|
76 | 76 | repo_type = 'svn' |
|
77 | 77 | |
|
78 | 78 | def _create_repo(self, wire, create, compatible_version): |
|
79 | 79 | path = svn.core.svn_path_canonicalize(wire['path']) |
|
80 | 80 | if create: |
|
81 | 81 | fs_config = {'compatible-version': '1.9'} |
|
82 | 82 | if compatible_version: |
|
83 | 83 | if compatible_version not in svn_compatible_versions: |
|
84 | 84 | raise Exception('Unknown SVN compatible version "{}"' |
|
85 | 85 | .format(compatible_version)) |
|
86 | 86 | fs_config['compatible-version'] = \ |
|
87 | 87 | svn_compatible_versions_map[compatible_version] |
|
88 | 88 | |
|
89 | 89 | log.debug('Create SVN repo with config "%s"', fs_config) |
|
90 | 90 | repo = svn.repos.create(path, "", "", None, fs_config) |
|
91 | 91 | else: |
|
92 | 92 | repo = svn.repos.open(path) |
|
93 | 93 | |
|
94 | 94 | log.debug('Got SVN object: %s', repo) |
|
95 | 95 | return repo |
|
96 | 96 | |
|
97 | 97 | def repo(self, wire, create=False, compatible_version=None): |
|
98 | 98 | """ |
|
99 | 99 | Get a repository instance for the given path. |
|
100 | 100 | |
|
101 | 101 | Uses internally the low level beaker API since the decorators introduce |
|
102 | 102 | significant overhead. |
|
103 | 103 | """ |
|
104 | 104 | region = self._cache_region |
|
105 | 105 | context = wire.get('context', None) |
|
106 | 106 | repo_path = wire.get('path', '') |
|
107 | 107 | context_uid = '{}'.format(context) |
|
108 | 108 | cache = wire.get('cache', True) |
|
109 | 109 | cache_on = context and cache |
|
110 | 110 | |
|
111 | 111 | @region.conditional_cache_on_arguments(condition=cache_on) |
|
112 | 112 | def create_new_repo(_repo_type, _repo_path, _context_uid, compatible_version_id): |
|
113 | 113 | return self._create_repo(wire, create, compatible_version) |
|
114 | 114 | |
|
115 | 115 | return create_new_repo(self.repo_type, repo_path, context_uid, |
|
116 | 116 | compatible_version) |
|
117 | 117 | |
|
118 | 118 | |
|
119 | 119 | NODE_TYPE_MAPPING = { |
|
120 | 120 | svn.core.svn_node_file: 'file', |
|
121 | 121 | svn.core.svn_node_dir: 'dir', |
|
122 | 122 | } |
|
123 | 123 | |
|
124 | 124 | |
|
125 | 125 | class SvnRemote(object): |
|
126 | 126 | |
|
127 | 127 | def __init__(self, factory, hg_factory=None): |
|
128 | 128 | self._factory = factory |
|
129 | 129 | # TODO: Remove once we do not use internal Mercurial objects anymore |
|
130 | 130 | # for subversion |
|
131 | 131 | self._hg_factory = hg_factory |
|
132 | 132 | |
|
133 | 133 | @reraise_safe_exceptions |
|
134 | 134 | def discover_svn_version(self): |
|
135 | 135 | try: |
|
136 | 136 | import svn.core |
|
137 | 137 | svn_ver = svn.core.SVN_VERSION |
|
138 | 138 | except ImportError: |
|
139 | 139 | svn_ver = None |
|
140 | 140 | return svn_ver |
|
141 | 141 | |
|
142 | 142 | def check_url(self, url, config_items): |
|
143 | 143 | # this can throw exception if not installed, but we detect this |
|
144 | 144 | from hgsubversion import svnrepo |
|
145 | 145 | |
|
146 | 146 | baseui = self._hg_factory._create_config(config_items) |
|
147 | 147 | # uuid function get's only valid UUID from proper repo, else |
|
148 | 148 | # throws exception |
|
149 | 149 | try: |
|
150 | 150 | svnrepo.svnremoterepo(baseui, url).svn.uuid |
|
151 | 151 | except Exception: |
|
152 | 152 | tb = traceback.format_exc() |
|
153 | 153 | log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb) |
|
154 | 154 | raise URLError( |
|
155 | 155 | '"%s" is not a valid Subversion source url.' % (url, )) |
|
156 | 156 | return True |
|
157 | 157 | |
|
158 | 158 | def is_path_valid_repository(self, wire, path): |
|
159 | 159 | |
|
160 | 160 | # NOTE(marcink): short circuit the check for SVN repo |
|
161 | 161 | # the repos.open might be expensive to check, but we have one cheap |
|
162 | 162 | # pre condition that we can use, to check for 'format' file |
|
163 | 163 | |
|
164 | 164 | if not os.path.isfile(os.path.join(path, 'format')): |
|
165 | 165 | return False |
|
166 | 166 | |
|
167 | 167 | try: |
|
168 | 168 | svn.repos.open(path) |
|
169 | 169 | except svn.core.SubversionException: |
|
170 | 170 | tb = traceback.format_exc() |
|
171 | 171 | log.debug("Invalid Subversion path `%s`, tb: %s", path, tb) |
|
172 | 172 | return False |
|
173 | 173 | return True |
|
174 | 174 | |
|
175 | 175 | @reraise_safe_exceptions |
|
176 | 176 | def verify(self, wire,): |
|
177 | 177 | repo_path = wire['path'] |
|
178 | 178 | if not self.is_path_valid_repository(wire, repo_path): |
|
179 | 179 | raise Exception( |
|
180 | 180 | "Path %s is not a valid Subversion repository." % repo_path) |
|
181 | 181 | |
|
182 | 182 | cmd = ['svnadmin', 'info', repo_path] |
|
183 | 183 | stdout, stderr = subprocessio.run_command(cmd) |
|
184 | 184 | return stdout |
|
185 | 185 | |
|
186 | 186 | def lookup(self, wire, revision): |
|
187 | 187 | if revision not in [-1, None, 'HEAD']: |
|
188 | 188 | raise NotImplementedError |
|
189 | 189 | repo = self._factory.repo(wire) |
|
190 | 190 | fs_ptr = svn.repos.fs(repo) |
|
191 | 191 | head = svn.fs.youngest_rev(fs_ptr) |
|
192 | 192 | return head |
|
193 | 193 | |
|
194 | 194 | def lookup_interval(self, wire, start_ts, end_ts): |
|
195 | 195 | repo = self._factory.repo(wire) |
|
196 | 196 | fsobj = svn.repos.fs(repo) |
|
197 | 197 | start_rev = None |
|
198 | 198 | end_rev = None |
|
199 | 199 | if start_ts: |
|
200 | 200 | start_ts_svn = apr_time_t(start_ts) |
|
201 | 201 | start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1 |
|
202 | 202 | else: |
|
203 | 203 | start_rev = 1 |
|
204 | 204 | if end_ts: |
|
205 | 205 | end_ts_svn = apr_time_t(end_ts) |
|
206 | 206 | end_rev = svn.repos.dated_revision(repo, end_ts_svn) |
|
207 | 207 | else: |
|
208 | 208 | end_rev = svn.fs.youngest_rev(fsobj) |
|
209 | 209 | return start_rev, end_rev |
|
210 | 210 | |
|
211 | 211 | def revision_properties(self, wire, revision): |
|
212 | 212 | repo = self._factory.repo(wire) |
|
213 | 213 | fs_ptr = svn.repos.fs(repo) |
|
214 | 214 | return svn.fs.revision_proplist(fs_ptr, revision) |
|
215 | 215 | |
|
216 | 216 | def revision_changes(self, wire, revision): |
|
217 | 217 | |
|
218 | 218 | repo = self._factory.repo(wire) |
|
219 | 219 | fsobj = svn.repos.fs(repo) |
|
220 | 220 | rev_root = svn.fs.revision_root(fsobj, revision) |
|
221 | 221 | |
|
222 | 222 | editor = svn.repos.ChangeCollector(fsobj, rev_root) |
|
223 | 223 | editor_ptr, editor_baton = svn.delta.make_editor(editor) |
|
224 | 224 | base_dir = "" |
|
225 | 225 | send_deltas = False |
|
226 | 226 | svn.repos.replay2( |
|
227 | 227 | rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas, |
|
228 | 228 | editor_ptr, editor_baton, None) |
|
229 | 229 | |
|
230 | 230 | added = [] |
|
231 | 231 | changed = [] |
|
232 | 232 | removed = [] |
|
233 | 233 | |
|
234 | 234 | # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs |
|
235 | 235 | for path, change in editor.changes.iteritems(): |
|
236 | 236 | # TODO: Decide what to do with directory nodes. Subversion can add |
|
237 | 237 | # empty directories. |
|
238 | 238 | |
|
239 | 239 | if change.item_kind == svn.core.svn_node_dir: |
|
240 | 240 | continue |
|
241 | 241 | if change.action in [svn.repos.CHANGE_ACTION_ADD]: |
|
242 | 242 | added.append(path) |
|
243 | 243 | elif change.action in [svn.repos.CHANGE_ACTION_MODIFY, |
|
244 | 244 | svn.repos.CHANGE_ACTION_REPLACE]: |
|
245 | 245 | changed.append(path) |
|
246 | 246 | elif change.action in [svn.repos.CHANGE_ACTION_DELETE]: |
|
247 | 247 | removed.append(path) |
|
248 | 248 | else: |
|
249 | 249 | raise NotImplementedError( |
|
250 | 250 | "Action %s not supported on path %s" % ( |
|
251 | 251 | change.action, path)) |
|
252 | 252 | |
|
253 | 253 | changes = { |
|
254 | 254 | 'added': added, |
|
255 | 255 | 'changed': changed, |
|
256 | 256 | 'removed': removed, |
|
257 | 257 | } |
|
258 | 258 | return changes |
|
259 | 259 | |
|
260 | 260 | def node_history(self, wire, path, revision, limit): |
|
261 | 261 | cross_copies = False |
|
262 | 262 | repo = self._factory.repo(wire) |
|
263 | 263 | fsobj = svn.repos.fs(repo) |
|
264 | 264 | rev_root = svn.fs.revision_root(fsobj, revision) |
|
265 | 265 | |
|
266 | 266 | history_revisions = [] |
|
267 | 267 | history = svn.fs.node_history(rev_root, path) |
|
268 | 268 | history = svn.fs.history_prev(history, cross_copies) |
|
269 | 269 | while history: |
|
270 | 270 | __, node_revision = svn.fs.history_location(history) |
|
271 | 271 | history_revisions.append(node_revision) |
|
272 | 272 | if limit and len(history_revisions) >= limit: |
|
273 | 273 | break |
|
274 | 274 | history = svn.fs.history_prev(history, cross_copies) |
|
275 | 275 | return history_revisions |
|
276 | 276 | |
|
277 | 277 | def node_properties(self, wire, path, revision): |
|
278 | 278 | repo = self._factory.repo(wire) |
|
279 | 279 | fsobj = svn.repos.fs(repo) |
|
280 | 280 | rev_root = svn.fs.revision_root(fsobj, revision) |
|
281 | 281 | return svn.fs.node_proplist(rev_root, path) |
|
282 | 282 | |
|
283 | 283 | def file_annotate(self, wire, path, revision): |
|
284 | 284 | abs_path = 'file://' + urllib.pathname2url( |
|
285 | 285 | vcspath.join(wire['path'], path)) |
|
286 | 286 | file_uri = svn.core.svn_path_canonicalize(abs_path) |
|
287 | 287 | |
|
288 | 288 | start_rev = svn_opt_revision_value_t(0) |
|
289 | 289 | peg_rev = svn_opt_revision_value_t(revision) |
|
290 | 290 | end_rev = peg_rev |
|
291 | 291 | |
|
292 | 292 | annotations = [] |
|
293 | 293 | |
|
294 | 294 | def receiver(line_no, revision, author, date, line, pool): |
|
295 | 295 | annotations.append((line_no, revision, line)) |
|
296 | 296 | |
|
297 | 297 | # TODO: Cannot use blame5, missing typemap function in the swig code |
|
298 | 298 | try: |
|
299 | 299 | svn.client.blame2( |
|
300 | 300 | file_uri, peg_rev, start_rev, end_rev, |
|
301 | 301 | receiver, svn.client.create_context()) |
|
302 | 302 | except svn.core.SubversionException as exc: |
|
303 | 303 | log.exception("Error during blame operation.") |
|
304 | 304 | raise Exception( |
|
305 | 305 | "Blame not supported or file does not exist at path %s. " |
|
306 | 306 | "Error %s." % (path, exc)) |
|
307 | 307 | |
|
308 | 308 | return annotations |
|
309 | 309 | |
|
310 | 310 | def get_node_type(self, wire, path, rev=None): |
|
311 | 311 | repo = self._factory.repo(wire) |
|
312 | 312 | fs_ptr = svn.repos.fs(repo) |
|
313 | 313 | if rev is None: |
|
314 | 314 | rev = svn.fs.youngest_rev(fs_ptr) |
|
315 | 315 | root = svn.fs.revision_root(fs_ptr, rev) |
|
316 | 316 | node = svn.fs.check_path(root, path) |
|
317 | 317 | return NODE_TYPE_MAPPING.get(node, None) |
|
318 | 318 | |
|
319 | 319 | def get_nodes(self, wire, path, revision=None): |
|
320 | 320 | repo = self._factory.repo(wire) |
|
321 | 321 | fsobj = svn.repos.fs(repo) |
|
322 | 322 | if revision is None: |
|
323 | 323 | revision = svn.fs.youngest_rev(fsobj) |
|
324 | 324 | root = svn.fs.revision_root(fsobj, revision) |
|
325 | 325 | entries = svn.fs.dir_entries(root, path) |
|
326 | 326 | result = [] |
|
327 | 327 | for entry_path, entry_info in entries.iteritems(): |
|
328 | 328 | result.append( |
|
329 | 329 | (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None))) |
|
330 | 330 | return result |
|
331 | 331 | |
|
332 | 332 | def get_file_content(self, wire, path, rev=None): |
|
333 | 333 | repo = self._factory.repo(wire) |
|
334 | 334 | fsobj = svn.repos.fs(repo) |
|
335 | 335 | if rev is None: |
|
336 | 336 | rev = svn.fs.youngest_revision(fsobj) |
|
337 | 337 | root = svn.fs.revision_root(fsobj, rev) |
|
338 | 338 | content = svn.core.Stream(svn.fs.file_contents(root, path)) |
|
339 | 339 | return content.read() |
|
340 | 340 | |
|
341 | 341 | def get_file_size(self, wire, path, revision=None): |
|
342 | 342 | repo = self._factory.repo(wire) |
|
343 | 343 | fsobj = svn.repos.fs(repo) |
|
344 | 344 | if revision is None: |
|
345 | 345 | revision = svn.fs.youngest_revision(fsobj) |
|
346 | 346 | root = svn.fs.revision_root(fsobj, revision) |
|
347 | 347 | size = svn.fs.file_length(root, path) |
|
348 | 348 | return size |
|
349 | 349 | |
|
350 | 350 | def create_repository(self, wire, compatible_version=None): |
|
351 | 351 | log.info('Creating Subversion repository in path "%s"', wire['path']) |
|
352 | 352 | self._factory.repo(wire, create=True, |
|
353 | 353 | compatible_version=compatible_version) |
|
354 | 354 | |
|
355 | 355 | def get_url_and_credentials(self, src_url): |
|
356 | 356 | obj = urlparse.urlparse(src_url) |
|
357 | 357 | username = obj.username or None |
|
358 | 358 | password = obj.password or None |
|
359 | 359 | return username, password, src_url |
|
360 | 360 | |
|
361 | 361 | def import_remote_repository(self, wire, src_url): |
|
362 | 362 | repo_path = wire['path'] |
|
363 | 363 | if not self.is_path_valid_repository(wire, repo_path): |
|
364 | 364 | raise Exception( |
|
365 | 365 | "Path %s is not a valid Subversion repository." % repo_path) |
|
366 | 366 | |
|
367 | 367 | username, password, src_url = self.get_url_and_credentials(src_url) |
|
368 | 368 | rdump_cmd = ['svnrdump', 'dump', '--non-interactive', |
|
369 | 369 | '--trust-server-cert-failures=unknown-ca'] |
|
370 | 370 | if username and password: |
|
371 | 371 | rdump_cmd += ['--username', username, '--password', password] |
|
372 | 372 | rdump_cmd += [src_url] |
|
373 | 373 | |
|
374 | 374 | rdump = subprocess.Popen( |
|
375 | 375 | rdump_cmd, |
|
376 | 376 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
|
377 | 377 | load = subprocess.Popen( |
|
378 | 378 | ['svnadmin', 'load', repo_path], stdin=rdump.stdout) |
|
379 | 379 | |
|
380 | 380 | # TODO: johbo: This can be a very long operation, might be better |
|
381 | 381 | # to track some kind of status and provide an api to check if the |
|
382 | 382 | # import is done. |
|
383 | 383 | rdump.wait() |
|
384 | 384 | load.wait() |
|
385 | 385 | |
|
386 | 386 | log.debug('Return process ended with code: %s', rdump.returncode) |
|
387 | 387 | if rdump.returncode != 0: |
|
388 | 388 | errors = rdump.stderr.read() |
|
389 | 389 | log.error('svnrdump dump failed: statuscode %s: message: %s', |
|
390 | 390 | rdump.returncode, errors) |
|
391 | 391 | reason = 'UNKNOWN' |
|
392 | 392 | if 'svnrdump: E230001:' in errors: |
|
393 | 393 | reason = 'INVALID_CERTIFICATE' |
|
394 | 394 | |
|
395 | 395 | if reason == 'UNKNOWN': |
|
396 | 396 | reason = 'UNKNOWN:{}'.format(errors) |
|
397 | 397 | raise Exception( |
|
398 | 398 | 'Failed to dump the remote repository from %s. Reason:%s' % ( |
|
399 | 399 | src_url, reason)) |
|
400 | 400 | if load.returncode != 0: |
|
401 | 401 | raise Exception( |
|
402 | 402 | 'Failed to load the dump of remote repository from %s.' % |
|
403 | 403 | (src_url, )) |
|
404 | 404 | |
|
405 | 405 | def commit(self, wire, message, author, timestamp, updated, removed): |
|
406 | 406 | assert isinstance(message, str) |
|
407 | 407 | assert isinstance(author, str) |
|
408 | 408 | |
|
409 | 409 | repo = self._factory.repo(wire) |
|
410 | 410 | fsobj = svn.repos.fs(repo) |
|
411 | 411 | |
|
412 | 412 | rev = svn.fs.youngest_rev(fsobj) |
|
413 | 413 | txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message) |
|
414 | 414 | txn_root = svn.fs.txn_root(txn) |
|
415 | 415 | |
|
416 | 416 | for node in updated: |
|
417 | 417 | TxnNodeProcessor(node, txn_root).update() |
|
418 | 418 | for node in removed: |
|
419 | 419 | TxnNodeProcessor(node, txn_root).remove() |
|
420 | 420 | |
|
421 | 421 | commit_id = svn.repos.fs_commit_txn(repo, txn) |
|
422 | 422 | |
|
423 | 423 | if timestamp: |
|
424 | 424 | apr_time = apr_time_t(timestamp) |
|
425 | 425 | ts_formatted = svn.core.svn_time_to_cstring(apr_time) |
|
426 | 426 | svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted) |
|
427 | 427 | |
|
428 | 428 | log.debug('Committed revision "%s" to "%s".', commit_id, wire['path']) |
|
429 | 429 | return commit_id |
|
430 | 430 | |
|
431 | 431 | def diff(self, wire, rev1, rev2, path1=None, path2=None, |
|
432 | 432 | ignore_whitespace=False, context=3): |
|
433 | 433 | |
|
434 | 434 | wire.update(cache=False) |
|
435 | 435 | repo = self._factory.repo(wire) |
|
436 | 436 | diff_creator = SvnDiffer( |
|
437 | 437 | repo, rev1, path1, rev2, path2, ignore_whitespace, context) |
|
438 | 438 | try: |
|
439 | 439 | return diff_creator.generate_diff() |
|
440 | 440 | except svn.core.SubversionException as e: |
|
441 | 441 | log.exception( |
|
442 | 442 | "Error during diff operation operation. " |
|
443 | 443 | "Path might not exist %s, %s" % (path1, path2)) |
|
444 | 444 | return "" |
|
445 | 445 | |
|
446 | 446 | @reraise_safe_exceptions |
|
447 | 447 | def is_large_file(self, wire, path): |
|
448 | 448 | return False |
|
449 | 449 | |
|
450 | 450 | @reraise_safe_exceptions |
|
451 | 451 | def install_hooks(self, wire, force=False): |
|
452 | 452 | from vcsserver.hook_utils import install_svn_hooks |
|
453 | 453 | repo_path = wire['path'] |
|
454 | 454 | binary_dir = settings.BINARY_DIR |
|
455 | 455 | executable = None |
|
456 | 456 | if binary_dir: |
|
457 | 457 | executable = os.path.join(binary_dir, 'python') |
|
458 | 458 | return install_svn_hooks( |
|
459 | 459 | repo_path, executable=executable, force_create=force) |
|
460 | 460 | |
|
461 | @reraise_safe_exceptions | |
|
462 | def get_hooks_info(self, wire): | |
|
463 | from vcsserver.hook_utils import ( | |
|
464 | get_svn_pre_hook_version, get_svn_post_hook_version) | |
|
465 | repo_path = wire['path'] | |
|
466 | return { | |
|
467 | 'pre_version': get_svn_pre_hook_version(repo_path), | |
|
468 | 'post_version': get_svn_post_hook_version(repo_path), | |
|
469 | } | |
|
470 | ||
|
461 | 471 | |
|
462 | 472 | class SvnDiffer(object): |
|
463 | 473 | """ |
|
464 | 474 | Utility to create diffs based on difflib and the Subversion api |
|
465 | 475 | """ |
|
466 | 476 | |
|
467 | 477 | binary_content = False |
|
468 | 478 | |
|
469 | 479 | def __init__( |
|
470 | 480 | self, repo, src_rev, src_path, tgt_rev, tgt_path, |
|
471 | 481 | ignore_whitespace, context): |
|
472 | 482 | self.repo = repo |
|
473 | 483 | self.ignore_whitespace = ignore_whitespace |
|
474 | 484 | self.context = context |
|
475 | 485 | |
|
476 | 486 | fsobj = svn.repos.fs(repo) |
|
477 | 487 | |
|
478 | 488 | self.tgt_rev = tgt_rev |
|
479 | 489 | self.tgt_path = tgt_path or '' |
|
480 | 490 | self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev) |
|
481 | 491 | self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path) |
|
482 | 492 | |
|
483 | 493 | self.src_rev = src_rev |
|
484 | 494 | self.src_path = src_path or self.tgt_path |
|
485 | 495 | self.src_root = svn.fs.revision_root(fsobj, src_rev) |
|
486 | 496 | self.src_kind = svn.fs.check_path(self.src_root, self.src_path) |
|
487 | 497 | |
|
488 | 498 | self._validate() |
|
489 | 499 | |
|
490 | 500 | def _validate(self): |
|
491 | 501 | if (self.tgt_kind != svn.core.svn_node_none and |
|
492 | 502 | self.src_kind != svn.core.svn_node_none and |
|
493 | 503 | self.src_kind != self.tgt_kind): |
|
494 | 504 | # TODO: johbo: proper error handling |
|
495 | 505 | raise Exception( |
|
496 | 506 | "Source and target are not compatible for diff generation. " |
|
497 | 507 | "Source type: %s, target type: %s" % |
|
498 | 508 | (self.src_kind, self.tgt_kind)) |
|
499 | 509 | |
|
500 | 510 | def generate_diff(self): |
|
501 | 511 | buf = StringIO.StringIO() |
|
502 | 512 | if self.tgt_kind == svn.core.svn_node_dir: |
|
503 | 513 | self._generate_dir_diff(buf) |
|
504 | 514 | else: |
|
505 | 515 | self._generate_file_diff(buf) |
|
506 | 516 | return buf.getvalue() |
|
507 | 517 | |
|
508 | 518 | def _generate_dir_diff(self, buf): |
|
509 | 519 | editor = DiffChangeEditor() |
|
510 | 520 | editor_ptr, editor_baton = svn.delta.make_editor(editor) |
|
511 | 521 | svn.repos.dir_delta2( |
|
512 | 522 | self.src_root, |
|
513 | 523 | self.src_path, |
|
514 | 524 | '', # src_entry |
|
515 | 525 | self.tgt_root, |
|
516 | 526 | self.tgt_path, |
|
517 | 527 | editor_ptr, editor_baton, |
|
518 | 528 | authorization_callback_allow_all, |
|
519 | 529 | False, # text_deltas |
|
520 | 530 | svn.core.svn_depth_infinity, # depth |
|
521 | 531 | False, # entry_props |
|
522 | 532 | False, # ignore_ancestry |
|
523 | 533 | ) |
|
524 | 534 | |
|
525 | 535 | for path, __, change in sorted(editor.changes): |
|
526 | 536 | self._generate_node_diff( |
|
527 | 537 | buf, change, path, self.tgt_path, path, self.src_path) |
|
528 | 538 | |
|
529 | 539 | def _generate_file_diff(self, buf): |
|
530 | 540 | change = None |
|
531 | 541 | if self.src_kind == svn.core.svn_node_none: |
|
532 | 542 | change = "add" |
|
533 | 543 | elif self.tgt_kind == svn.core.svn_node_none: |
|
534 | 544 | change = "delete" |
|
535 | 545 | tgt_base, tgt_path = vcspath.split(self.tgt_path) |
|
536 | 546 | src_base, src_path = vcspath.split(self.src_path) |
|
537 | 547 | self._generate_node_diff( |
|
538 | 548 | buf, change, tgt_path, tgt_base, src_path, src_base) |
|
539 | 549 | |
|
540 | 550 | def _generate_node_diff( |
|
541 | 551 | self, buf, change, tgt_path, tgt_base, src_path, src_base): |
|
542 | 552 | |
|
543 | 553 | if self.src_rev == self.tgt_rev and tgt_base == src_base: |
|
544 | 554 | # makes consistent behaviour with git/hg to return empty diff if |
|
545 | 555 | # we compare same revisions |
|
546 | 556 | return |
|
547 | 557 | |
|
548 | 558 | tgt_full_path = vcspath.join(tgt_base, tgt_path) |
|
549 | 559 | src_full_path = vcspath.join(src_base, src_path) |
|
550 | 560 | |
|
551 | 561 | self.binary_content = False |
|
552 | 562 | mime_type = self._get_mime_type(tgt_full_path) |
|
553 | 563 | |
|
554 | 564 | if mime_type and not mime_type.startswith('text'): |
|
555 | 565 | self.binary_content = True |
|
556 | 566 | buf.write("=" * 67 + '\n') |
|
557 | 567 | buf.write("Cannot display: file marked as a binary type.\n") |
|
558 | 568 | buf.write("svn:mime-type = %s\n" % mime_type) |
|
559 | 569 | buf.write("Index: %s\n" % (tgt_path, )) |
|
560 | 570 | buf.write("=" * 67 + '\n') |
|
561 | 571 | buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % { |
|
562 | 572 | 'tgt_path': tgt_path}) |
|
563 | 573 | |
|
564 | 574 | if change == 'add': |
|
565 | 575 | # TODO: johbo: SVN is missing a zero here compared to git |
|
566 | 576 | buf.write("new file mode 10644\n") |
|
567 | 577 | |
|
568 | 578 | #TODO(marcink): intro to binary detection of svn patches |
|
569 | 579 | # if self.binary_content: |
|
570 | 580 | # buf.write('GIT binary patch\n') |
|
571 | 581 | |
|
572 | 582 | buf.write("--- /dev/null\t(revision 0)\n") |
|
573 | 583 | src_lines = [] |
|
574 | 584 | else: |
|
575 | 585 | if change == 'delete': |
|
576 | 586 | buf.write("deleted file mode 10644\n") |
|
577 | 587 | |
|
578 | 588 | #TODO(marcink): intro to binary detection of svn patches |
|
579 | 589 | # if self.binary_content: |
|
580 | 590 | # buf.write('GIT binary patch\n') |
|
581 | 591 | |
|
582 | 592 | buf.write("--- a/%s\t(revision %s)\n" % ( |
|
583 | 593 | src_path, self.src_rev)) |
|
584 | 594 | src_lines = self._svn_readlines(self.src_root, src_full_path) |
|
585 | 595 | |
|
586 | 596 | if change == 'delete': |
|
587 | 597 | buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, )) |
|
588 | 598 | tgt_lines = [] |
|
589 | 599 | else: |
|
590 | 600 | buf.write("+++ b/%s\t(revision %s)\n" % ( |
|
591 | 601 | tgt_path, self.tgt_rev)) |
|
592 | 602 | tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path) |
|
593 | 603 | |
|
594 | 604 | if not self.binary_content: |
|
595 | 605 | udiff = svn_diff.unified_diff( |
|
596 | 606 | src_lines, tgt_lines, context=self.context, |
|
597 | 607 | ignore_blank_lines=self.ignore_whitespace, |
|
598 | 608 | ignore_case=False, |
|
599 | 609 | ignore_space_changes=self.ignore_whitespace) |
|
600 | 610 | buf.writelines(udiff) |
|
601 | 611 | |
|
602 | 612 | def _get_mime_type(self, path): |
|
603 | 613 | try: |
|
604 | 614 | mime_type = svn.fs.node_prop( |
|
605 | 615 | self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE) |
|
606 | 616 | except svn.core.SubversionException: |
|
607 | 617 | mime_type = svn.fs.node_prop( |
|
608 | 618 | self.src_root, path, svn.core.SVN_PROP_MIME_TYPE) |
|
609 | 619 | return mime_type |
|
610 | 620 | |
|
611 | 621 | def _svn_readlines(self, fs_root, node_path): |
|
612 | 622 | if self.binary_content: |
|
613 | 623 | return [] |
|
614 | 624 | node_kind = svn.fs.check_path(fs_root, node_path) |
|
615 | 625 | if node_kind not in ( |
|
616 | 626 | svn.core.svn_node_file, svn.core.svn_node_symlink): |
|
617 | 627 | return [] |
|
618 | 628 | content = svn.core.Stream( |
|
619 | 629 | svn.fs.file_contents(fs_root, node_path)).read() |
|
620 | 630 | return content.splitlines(True) |
|
621 | 631 | |
|
622 | 632 | |
|
623 | 633 | |
|
624 | 634 | class DiffChangeEditor(svn.delta.Editor): |
|
625 | 635 | """ |
|
626 | 636 | Records changes between two given revisions |
|
627 | 637 | """ |
|
628 | 638 | |
|
629 | 639 | def __init__(self): |
|
630 | 640 | self.changes = [] |
|
631 | 641 | |
|
632 | 642 | def delete_entry(self, path, revision, parent_baton, pool=None): |
|
633 | 643 | self.changes.append((path, None, 'delete')) |
|
634 | 644 | |
|
635 | 645 | def add_file( |
|
636 | 646 | self, path, parent_baton, copyfrom_path, copyfrom_revision, |
|
637 | 647 | file_pool=None): |
|
638 | 648 | self.changes.append((path, 'file', 'add')) |
|
639 | 649 | |
|
640 | 650 | def open_file(self, path, parent_baton, base_revision, file_pool=None): |
|
641 | 651 | self.changes.append((path, 'file', 'change')) |
|
642 | 652 | |
|
643 | 653 | |
|
644 | 654 | def authorization_callback_allow_all(root, path, pool): |
|
645 | 655 | return True |
|
646 | 656 | |
|
647 | 657 | |
|
648 | 658 | class TxnNodeProcessor(object): |
|
649 | 659 | """ |
|
650 | 660 | Utility to process the change of one node within a transaction root. |
|
651 | 661 | |
|
652 | 662 | It encapsulates the knowledge of how to add, update or remove |
|
653 | 663 | a node for a given transaction root. The purpose is to support the method |
|
654 | 664 | `SvnRemote.commit`. |
|
655 | 665 | """ |
|
656 | 666 | |
|
657 | 667 | def __init__(self, node, txn_root): |
|
658 | 668 | assert isinstance(node['path'], str) |
|
659 | 669 | |
|
660 | 670 | self.node = node |
|
661 | 671 | self.txn_root = txn_root |
|
662 | 672 | |
|
663 | 673 | def update(self): |
|
664 | 674 | self._ensure_parent_dirs() |
|
665 | 675 | self._add_file_if_node_does_not_exist() |
|
666 | 676 | self._update_file_content() |
|
667 | 677 | self._update_file_properties() |
|
668 | 678 | |
|
669 | 679 | def remove(self): |
|
670 | 680 | svn.fs.delete(self.txn_root, self.node['path']) |
|
671 | 681 | # TODO: Clean up directory if empty |
|
672 | 682 | |
|
673 | 683 | def _ensure_parent_dirs(self): |
|
674 | 684 | curdir = vcspath.dirname(self.node['path']) |
|
675 | 685 | dirs_to_create = [] |
|
676 | 686 | while not self._svn_path_exists(curdir): |
|
677 | 687 | dirs_to_create.append(curdir) |
|
678 | 688 | curdir = vcspath.dirname(curdir) |
|
679 | 689 | |
|
680 | 690 | for curdir in reversed(dirs_to_create): |
|
681 | 691 | log.debug('Creating missing directory "%s"', curdir) |
|
682 | 692 | svn.fs.make_dir(self.txn_root, curdir) |
|
683 | 693 | |
|
684 | 694 | def _svn_path_exists(self, path): |
|
685 | 695 | path_status = svn.fs.check_path(self.txn_root, path) |
|
686 | 696 | return path_status != svn.core.svn_node_none |
|
687 | 697 | |
|
688 | 698 | def _add_file_if_node_does_not_exist(self): |
|
689 | 699 | kind = svn.fs.check_path(self.txn_root, self.node['path']) |
|
690 | 700 | if kind == svn.core.svn_node_none: |
|
691 | 701 | svn.fs.make_file(self.txn_root, self.node['path']) |
|
692 | 702 | |
|
693 | 703 | def _update_file_content(self): |
|
694 | 704 | assert isinstance(self.node['content'], str) |
|
695 | 705 | handler, baton = svn.fs.apply_textdelta( |
|
696 | 706 | self.txn_root, self.node['path'], None, None) |
|
697 | 707 | svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton) |
|
698 | 708 | |
|
699 | 709 | def _update_file_properties(self): |
|
700 | 710 | properties = self.node.get('properties', {}) |
|
701 | 711 | for key, value in properties.iteritems(): |
|
702 | 712 | svn.fs.change_node_prop( |
|
703 | 713 | self.txn_root, self.node['path'], key, value) |
|
704 | 714 | |
|
705 | 715 | |
|
706 | 716 | def apr_time_t(timestamp): |
|
707 | 717 | """ |
|
708 | 718 | Convert a Python timestamp into APR timestamp type apr_time_t |
|
709 | 719 | """ |
|
710 | 720 | return timestamp * 1E6 |
|
711 | 721 | |
|
712 | 722 | |
|
713 | 723 | def svn_opt_revision_value_t(num): |
|
714 | 724 | """ |
|
715 | 725 | Put `num` into a `svn_opt_revision_value_t` structure. |
|
716 | 726 | """ |
|
717 | 727 | value = svn.core.svn_opt_revision_value_t() |
|
718 | 728 | value.number = num |
|
719 | 729 | revision = svn.core.svn_opt_revision_t() |
|
720 | 730 | revision.kind = svn.core.svn_opt_revision_number |
|
721 | 731 | revision.value = value |
|
722 | 732 | return revision |
General Comments 0
You need to be logged in to leave comments.
Login now