##// END OF EJS Templates
py3: raw stringify various JSON and HTTP headers in the LFS blobstore module...
Matt Harbison -
r41472:40efcf78 default
parent child Browse files
Show More
@@ -1,651 +1,654 b''
1 1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
2 2 #
3 3 # Copyright 2017 Facebook, Inc.
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from __future__ import absolute_import
9 9
10 10 import contextlib
11 11 import errno
12 12 import hashlib
13 13 import json
14 14 import os
15 15 import re
16 16 import socket
17 17
18 18 from mercurial.i18n import _
19 19
20 20 from mercurial import (
21 21 encoding,
22 22 error,
23 23 node,
24 24 pathutil,
25 25 pycompat,
26 26 url as urlmod,
27 27 util,
28 28 vfs as vfsmod,
29 29 worker,
30 30 )
31 31
32 32 from mercurial.utils import (
33 33 stringutil,
34 34 )
35 35
36 36 from ..largefiles import lfutil
37 37
38 38 # 64 bytes for SHA256
39 39 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
40 40
41 41 class lfsvfs(vfsmod.vfs):
42 42 def join(self, path):
43 43 """split the path at first two characters, like: XX/XXXXX..."""
44 44 if not _lfsre.match(path):
45 45 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
46 46 return super(lfsvfs, self).join(path[0:2], path[2:])
47 47
48 48 def walk(self, path=None, onerror=None):
49 49 """Yield (dirpath, [], oids) tuple for blobs under path
50 50
51 51 Oids only exist in the root of this vfs, so dirpath is always ''.
52 52 """
53 53 root = os.path.normpath(self.base)
54 54 # when dirpath == root, dirpath[prefixlen:] becomes empty
55 55 # because len(dirpath) < prefixlen.
56 56 prefixlen = len(pathutil.normasprefix(root))
57 57 oids = []
58 58
59 59 for dirpath, dirs, files in os.walk(self.reljoin(self.base, path
60 60 or b''),
61 61 onerror=onerror):
62 62 dirpath = dirpath[prefixlen:]
63 63
64 64 # Silently skip unexpected files and directories
65 65 if len(dirpath) == 2:
66 66 oids.extend([dirpath + f for f in files
67 67 if _lfsre.match(dirpath + f)])
68 68
69 69 yield ('', [], oids)
70 70
71 71 class nullvfs(lfsvfs):
72 72 def __init__(self):
73 73 pass
74 74
75 75 def exists(self, oid):
76 76 return False
77 77
78 78 def read(self, oid):
79 79 # store.read() calls into here if the blob doesn't exist in its
80 80 # self.vfs. Raise the same error as a normal vfs when asked to read a
81 81 # file that doesn't exist. The only difference is the full file path
82 82 # isn't available in the error.
83 83 raise IOError(errno.ENOENT,
84 84 pycompat.sysstr(b'%s: No such file or directory' % oid))
85 85
86 86 def walk(self, path=None, onerror=None):
87 87 return (b'', [], [])
88 88
89 89 def write(self, oid, data):
90 90 pass
91 91
92 92 class filewithprogress(object):
93 93 """a file-like object that supports __len__ and read.
94 94
95 95 Useful to provide progress information for how many bytes are read.
96 96 """
97 97
98 98 def __init__(self, fp, callback):
99 99 self._fp = fp
100 100 self._callback = callback # func(readsize)
101 101 fp.seek(0, os.SEEK_END)
102 102 self._len = fp.tell()
103 103 fp.seek(0)
104 104
105 105 def __len__(self):
106 106 return self._len
107 107
108 108 def read(self, size):
109 109 if self._fp is None:
110 110 return b''
111 111 data = self._fp.read(size)
112 112 if data:
113 113 if self._callback:
114 114 self._callback(len(data))
115 115 else:
116 116 self._fp.close()
117 117 self._fp = None
118 118 return data
119 119
120 120 class local(object):
121 121 """Local blobstore for large file contents.
122 122
123 123 This blobstore is used both as a cache and as a staging area for large blobs
124 124 to be uploaded to the remote blobstore.
125 125 """
126 126
127 127 def __init__(self, repo):
128 128 fullpath = repo.svfs.join(b'lfs/objects')
129 129 self.vfs = lfsvfs(fullpath)
130 130
131 131 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
132 132 self.cachevfs = nullvfs()
133 133 else:
134 134 usercache = lfutil._usercachedir(repo.ui, b'lfs')
135 135 self.cachevfs = lfsvfs(usercache)
136 136 self.ui = repo.ui
137 137
138 138 def open(self, oid):
139 139 """Open a read-only file descriptor to the named blob, in either the
140 140 usercache or the local store."""
141 141 # The usercache is the most likely place to hold the file. Commit will
142 142 # write to both it and the local store, as will anything that downloads
143 143 # the blobs. However, things like clone without an update won't
144 144 # populate the local store. For an init + push of a local clone,
145 145 # the usercache is the only place it _could_ be. If not present, the
146 146 # missing file msg here will indicate the local repo, not the usercache.
147 147 if self.cachevfs.exists(oid):
148 148 return self.cachevfs(oid, b'rb')
149 149
150 150 return self.vfs(oid, b'rb')
151 151
152 152 def download(self, oid, src):
153 153 """Read the blob from the remote source in chunks, verify the content,
154 154 and write to this local blobstore."""
155 155 sha256 = hashlib.sha256()
156 156
157 157 with self.vfs(oid, b'wb', atomictemp=True) as fp:
158 158 for chunk in util.filechunkiter(src, size=1048576):
159 159 fp.write(chunk)
160 160 sha256.update(chunk)
161 161
162 162 realoid = node.hex(sha256.digest())
163 163 if realoid != oid:
164 164 raise LfsCorruptionError(_(b'corrupt remote lfs object: %s')
165 165 % oid)
166 166
167 167 self._linktousercache(oid)
168 168
169 169 def write(self, oid, data):
170 170 """Write blob to local blobstore.
171 171
172 172 This should only be called from the filelog during a commit or similar.
173 173 As such, there is no need to verify the data. Imports from a remote
174 174 store must use ``download()`` instead."""
175 175 with self.vfs(oid, b'wb', atomictemp=True) as fp:
176 176 fp.write(data)
177 177
178 178 self._linktousercache(oid)
179 179
180 180 def linkfromusercache(self, oid):
181 181 """Link blobs found in the user cache into this store.
182 182
183 183 The server module needs to do this when it lets the client know not to
184 184 upload the blob, to ensure it is always available in this store.
185 185 Normally this is done implicitly when the client reads or writes the
186 186 blob, but that doesn't happen when the server tells the client that it
187 187 already has the blob.
188 188 """
189 189 if (not isinstance(self.cachevfs, nullvfs)
190 190 and not self.vfs.exists(oid)):
191 191 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
192 192 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
193 193
194 194 def _linktousercache(self, oid):
195 195 # XXX: should we verify the content of the cache, and hardlink back to
196 196 # the local store on success, but truncate, write and link on failure?
197 197 if (not self.cachevfs.exists(oid)
198 198 and not isinstance(self.cachevfs, nullvfs)):
199 199 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
200 200 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
201 201
202 202 def read(self, oid, verify=True):
203 203 """Read blob from local blobstore."""
204 204 if not self.vfs.exists(oid):
205 205 blob = self._read(self.cachevfs, oid, verify)
206 206
207 207 # Even if revlog will verify the content, it needs to be verified
208 208 # now before making the hardlink to avoid propagating corrupt blobs.
209 209 # Don't abort if corruption is detected, because `hg verify` will
210 210 # give more useful info about the corruption- simply don't add the
211 211 # hardlink.
212 212 if verify or node.hex(hashlib.sha256(blob).digest()) == oid:
213 213 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
214 214 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
215 215 else:
216 216 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
217 217 blob = self._read(self.vfs, oid, verify)
218 218 return blob
219 219
220 220 def _read(self, vfs, oid, verify):
221 221 """Read blob (after verifying) from the given store"""
222 222 blob = vfs.read(oid)
223 223 if verify:
224 224 _verify(oid, blob)
225 225 return blob
226 226
227 227 def verify(self, oid):
228 228 """Indicate whether or not the hash of the underlying file matches its
229 229 name."""
230 230 sha256 = hashlib.sha256()
231 231
232 232 with self.open(oid) as fp:
233 233 for chunk in util.filechunkiter(fp, size=1048576):
234 234 sha256.update(chunk)
235 235
236 236 return oid == node.hex(sha256.digest())
237 237
238 238 def has(self, oid):
239 239 """Returns True if the local blobstore contains the requested blob,
240 240 False otherwise."""
241 241 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
242 242
243 243 def _urlerrorreason(urlerror):
244 244 '''Create a friendly message for the given URLError to be used in an
245 245 LfsRemoteError message.
246 246 '''
247 247 inst = urlerror
248 248
249 249 if isinstance(urlerror.reason, Exception):
250 250 inst = urlerror.reason
251 251
252 252 if util.safehasattr(inst, 'reason'):
253 253 try: # usually it is in the form (errno, strerror)
254 254 reason = inst.reason.args[1]
255 255 except (AttributeError, IndexError):
256 256 # it might be anything, for example a string
257 257 reason = inst.reason
258 258 if isinstance(reason, pycompat.unicode):
259 259 # SSLError of Python 2.7.9 contains a unicode
260 260 reason = encoding.unitolocal(reason)
261 261 return reason
262 262 elif getattr(inst, "strerror", None):
263 263 return encoding.strtolocal(inst.strerror)
264 264 else:
265 265 return stringutil.forcebytestr(urlerror)
266 266
267 267 class _gitlfsremote(object):
268 268
269 269 def __init__(self, repo, url):
270 270 ui = repo.ui
271 271 self.ui = ui
272 272 baseurl, authinfo = url.authinfo()
273 273 self.baseurl = baseurl.rstrip(b'/')
274 274 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
275 275 if not useragent:
276 276 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
277 277 self.urlopener = urlmod.opener(ui, authinfo, useragent)
278 278 self.retry = ui.configint(b'lfs', b'retry')
279 279
280 280 def writebatch(self, pointers, fromstore):
281 281 """Batch upload from local to remote blobstore."""
282 282 self._batch(_deduplicate(pointers), fromstore, b'upload')
283 283
284 284 def readbatch(self, pointers, tostore):
285 285 """Batch download from remote to local blostore."""
286 286 self._batch(_deduplicate(pointers), tostore, b'download')
287 287
288 288 def _batchrequest(self, pointers, action):
289 289 """Get metadata about objects pointed by pointers for given action
290 290
291 291 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
292 292 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
293 293 """
294 objects = [{'oid': p.oid(), 'size': p.size()} for p in pointers]
295 requestdata = json.dumps({
296 'objects': objects,
297 'operation': action,
298 })
294 objects = [{r'oid': pycompat.strurl(p.oid()),
295 r'size': p.size()} for p in pointers]
296 requestdata = pycompat.bytesurl(json.dumps({
297 r'objects': objects,
298 r'operation': pycompat.strurl(action),
299 }))
299 300 url = b'%s/objects/batch' % self.baseurl
300 301 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
301 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
302 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
302 batchreq.add_header(r'Accept', r'application/vnd.git-lfs+json')
303 batchreq.add_header(r'Content-Type', r'application/vnd.git-lfs+json')
303 304 try:
304 305 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
305 306 rawjson = rsp.read()
306 307 except util.urlerr.httperror as ex:
307 308 hints = {
308 309 400: _(b'check that lfs serving is enabled on %s and "%s" is '
309 310 'supported') % (self.baseurl, action),
310 311 404: _(b'the "lfs.url" config may be used to override %s')
311 312 % self.baseurl,
312 313 }
313 314 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
314 315 raise LfsRemoteError(
315 316 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
316 317 hint=hint)
317 318 except util.urlerr.urlerror as ex:
318 319 hint = (_(b'the "lfs.url" config may be used to override %s')
319 320 % self.baseurl)
320 321 raise LfsRemoteError(_(b'LFS error: %s') % _urlerrorreason(ex),
321 322 hint=hint)
322 323 try:
323 324 response = json.loads(rawjson)
324 325 except ValueError:
325 326 raise LfsRemoteError(_(b'LFS server returns invalid JSON: %s')
326 327 % rawjson.encode("utf-8"))
327 328
328 329 if self.ui.debugflag:
329 330 self.ui.debug(b'Status: %d\n' % rsp.status)
330 331 # lfs-test-server and hg serve return headers in different order
331 332 headers = pycompat.bytestr(rsp.info())
332 333 self.ui.debug(b'%s\n'
333 334 % b'\n'.join(sorted(headers.splitlines())))
334 335
335 if 'objects' in response:
336 response['objects'] = sorted(response['objects'],
337 key=lambda p: p['oid'])
338 self.ui.debug('%s\n'
339 % json.dumps(response, indent=2,
340 separators=('', ': '), sort_keys=True))
336 if r'objects' in response:
337 response[r'objects'] = sorted(response[r'objects'],
338 key=lambda p: p[r'oid'])
339 self.ui.debug(b'%s\n'
340 % pycompat.bytesurl(
341 json.dumps(response, indent=2,
342 separators=(r'', r': '),
343 sort_keys=True)))
341 344
342 345 return response
343 346
344 347 def _checkforservererror(self, pointers, responses, action):
345 348 """Scans errors from objects
346 349
347 350 Raises LfsRemoteError if any objects have an error"""
348 351 for response in responses:
349 352 # The server should return 404 when objects cannot be found. Some
350 353 # server implementation (ex. lfs-test-server) does not set "error"
351 354 # but just removes "download" from "actions". Treat that case
352 355 # as the same as 404 error.
353 356 if b'error' not in response:
354 357 if (action == b'download'
355 358 and action not in response.get(b'actions', [])):
356 359 code = 404
357 360 else:
358 361 continue
359 362 else:
360 363 # An error dict without a code doesn't make much sense, so
361 364 # treat as a server error.
362 365 code = response.get(b'error').get(b'code', 500)
363 366
364 367 ptrmap = {p.oid(): p for p in pointers}
365 368 p = ptrmap.get(response[b'oid'], None)
366 369 if p:
367 370 filename = getattr(p, 'filename', b'unknown')
368 371 errors = {
369 372 404: b'The object does not exist',
370 373 410: b'The object was removed by the owner',
371 374 422: b'Validation error',
372 375 500: b'Internal server error',
373 376 }
374 377 msg = errors.get(code, b'status code %d' % code)
375 378 raise LfsRemoteError(_(b'LFS server error for "%s": %s')
376 379 % (filename, msg))
377 380 else:
378 381 raise LfsRemoteError(
379 382 _(b'LFS server error. Unsolicited response for oid %s')
380 383 % response[b'oid'])
381 384
382 385 def _extractobjects(self, response, pointers, action):
383 386 """extract objects from response of the batch API
384 387
385 388 response: parsed JSON object returned by batch API
386 389 return response['objects'] filtered by action
387 390 raise if any object has an error
388 391 """
389 392 # Scan errors from objects - fail early
390 393 objects = response.get(b'objects', [])
391 394 self._checkforservererror(pointers, objects, action)
392 395
393 396 # Filter objects with given action. Practically, this skips uploading
394 397 # objects which exist in the server.
395 398 filteredobjects = [o for o in objects
396 399 if action in o.get(b'actions', [])]
397 400
398 401 return filteredobjects
399 402
400 403 def _basictransfer(self, obj, action, localstore):
401 404 """Download or upload a single object using basic transfer protocol
402 405
403 406 obj: dict, an object description returned by batch API
404 407 action: string, one of ['upload', 'download']
405 408 localstore: blobstore.local
406 409
407 410 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
408 411 basic-transfers.md
409 412 """
410 413 oid = pycompat.bytestr(obj['oid'])
411 414
412 415 href = pycompat.bytestr(obj['actions'][action].get('href'))
413 416 headers = obj['actions'][action].get('header', {}).items()
414 417
415 418 request = util.urlreq.request(href)
416 419 if action == b'upload':
417 420 # If uploading blobs, read data from local blobstore.
418 421 if not localstore.verify(oid):
419 422 raise error.Abort(_(b'detected corrupt lfs object: %s') % oid,
420 423 hint=_(b'run hg verify'))
421 424 request.data = filewithprogress(localstore.open(oid), None)
422 request.get_method = lambda: 'PUT'
423 request.add_header('Content-Type', 'application/octet-stream')
425 request.get_method = lambda: r'PUT'
426 request.add_header(r'Content-Type', r'application/octet-stream')
424 427
425 428 for k, v in headers:
426 429 request.add_header(k, v)
427 430
428 431 response = b''
429 432 try:
430 433 with contextlib.closing(self.urlopener.open(request)) as req:
431 434 ui = self.ui # Shorten debug lines
432 435 if self.ui.debugflag:
433 436 ui.debug(b'Status: %d\n' % req.status)
434 437 # lfs-test-server and hg serve return headers in different
435 438 # order
436 439 headers = pycompat.bytestr(req.info())
437 440 ui.debug(b'%s\n'
438 441 % b'\n'.join(sorted(headers.splitlines())))
439 442
440 443 if action == b'download':
441 444 # If downloading blobs, store downloaded data to local
442 445 # blobstore
443 446 localstore.download(oid, req)
444 447 else:
445 448 while True:
446 449 data = req.read(1048576)
447 450 if not data:
448 451 break
449 452 response += data
450 453 if response:
451 454 ui.debug(b'lfs %s response: %s' % (action, response))
452 455 except util.urlerr.httperror as ex:
453 456 if self.ui.debugflag:
454 457 self.ui.debug(b'%s: %s\n' % (oid, ex.read())) # XXX: also bytes?
455 458 raise LfsRemoteError(_(b'LFS HTTP error: %s (oid=%s, action=%s)')
456 459 % (stringutil.forcebytestr(ex), oid, action))
457 460 except util.urlerr.urlerror as ex:
458 461 hint = (_(b'attempted connection to %s')
459 462 % pycompat.bytesurl(util.urllibcompat.getfullurl(request)))
460 463 raise LfsRemoteError(_(b'LFS error: %s') % _urlerrorreason(ex),
461 464 hint=hint)
462 465
463 466 def _batch(self, pointers, localstore, action):
464 467 if action not in [b'upload', b'download']:
465 468 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
466 469
467 470 response = self._batchrequest(pointers, action)
468 471 objects = self._extractobjects(response, pointers, action)
469 472 total = sum(x.get(b'size', 0) for x in objects)
470 473 sizes = {}
471 474 for obj in objects:
472 475 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
473 476 topic = {b'upload': _(b'lfs uploading'),
474 477 b'download': _(b'lfs downloading')}[action]
475 478 if len(objects) > 1:
476 479 self.ui.note(_(b'lfs: need to transfer %d objects (%s)\n')
477 480 % (len(objects), util.bytecount(total)))
478 481
479 482 def transfer(chunk):
480 483 for obj in chunk:
481 484 objsize = obj.get(b'size', 0)
482 485 if self.ui.verbose:
483 486 if action == b'download':
484 487 msg = _(b'lfs: downloading %s (%s)\n')
485 488 elif action == b'upload':
486 489 msg = _(b'lfs: uploading %s (%s)\n')
487 490 self.ui.note(msg % (obj.get(b'oid'),
488 491 util.bytecount(objsize)))
489 492 retry = self.retry
490 493 while True:
491 494 try:
492 495 self._basictransfer(obj, action, localstore)
493 496 yield 1, obj.get(b'oid')
494 497 break
495 498 except socket.error as ex:
496 499 if retry > 0:
497 500 self.ui.note(
498 501 _(b'lfs: failed: %r (remaining retry %d)\n')
499 502 % (stringutil.forcebytestr(ex), retry))
500 503 retry -= 1
501 504 continue
502 505 raise
503 506
504 507 # Until https multiplexing gets sorted out
505 508 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
506 509 oids = worker.worker(self.ui, 0.1, transfer, (),
507 510 sorted(objects, key=lambda o: o.get(b'oid')))
508 511 else:
509 512 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
510 513
511 514 with self.ui.makeprogress(topic, total=total) as progress:
512 515 progress.update(0)
513 516 processed = 0
514 517 blobs = 0
515 518 for _one, oid in oids:
516 519 processed += sizes[oid]
517 520 blobs += 1
518 521 progress.update(processed)
519 522 self.ui.note(_(b'lfs: processed: %s\n') % oid)
520 523
521 524 if blobs > 0:
522 525 if action == b'upload':
523 526 self.ui.status(_(b'lfs: uploaded %d files (%s)\n')
524 527 % (blobs, util.bytecount(processed)))
525 528 elif action == b'download':
526 529 self.ui.status(_(b'lfs: downloaded %d files (%s)\n')
527 530 % (blobs, util.bytecount(processed)))
528 531
529 532 def __del__(self):
530 533 # copied from mercurial/httppeer.py
531 534 urlopener = getattr(self, 'urlopener', None)
532 535 if urlopener:
533 536 for h in urlopener.handlers:
534 537 h.close()
535 538 getattr(h, "close_all", lambda : None)()
536 539
537 540 class _dummyremote(object):
538 541 """Dummy store storing blobs to temp directory."""
539 542
540 543 def __init__(self, repo, url):
541 544 fullpath = repo.vfs.join(b'lfs', url.path)
542 545 self.vfs = lfsvfs(fullpath)
543 546
544 547 def writebatch(self, pointers, fromstore):
545 548 for p in _deduplicate(pointers):
546 549 content = fromstore.read(p.oid(), verify=True)
547 550 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
548 551 fp.write(content)
549 552
550 553 def readbatch(self, pointers, tostore):
551 554 for p in _deduplicate(pointers):
552 555 with self.vfs(p.oid(), b'rb') as fp:
553 556 tostore.download(p.oid(), fp)
554 557
555 558 class _nullremote(object):
556 559 """Null store storing blobs to /dev/null."""
557 560
558 561 def __init__(self, repo, url):
559 562 pass
560 563
561 564 def writebatch(self, pointers, fromstore):
562 565 pass
563 566
564 567 def readbatch(self, pointers, tostore):
565 568 pass
566 569
567 570 class _promptremote(object):
568 571 """Prompt user to set lfs.url when accessed."""
569 572
570 573 def __init__(self, repo, url):
571 574 pass
572 575
573 576 def writebatch(self, pointers, fromstore, ui=None):
574 577 self._prompt()
575 578
576 579 def readbatch(self, pointers, tostore, ui=None):
577 580 self._prompt()
578 581
579 582 def _prompt(self):
580 583 raise error.Abort(_(b'lfs.url needs to be configured'))
581 584
582 585 _storemap = {
583 586 b'https': _gitlfsremote,
584 587 b'http': _gitlfsremote,
585 588 b'file': _dummyremote,
586 589 b'null': _nullremote,
587 590 None: _promptremote,
588 591 }
589 592
590 593 def _deduplicate(pointers):
591 594 """Remove any duplicate oids that exist in the list"""
592 595 reduced = util.sortdict()
593 596 for p in pointers:
594 597 reduced[p.oid()] = p
595 598 return reduced.values()
596 599
597 600 def _verify(oid, content):
598 601 realoid = node.hex(hashlib.sha256(content).digest())
599 602 if realoid != oid:
600 603 raise LfsCorruptionError(_(b'detected corrupt lfs object: %s') % oid,
601 604 hint=_(b'run hg verify'))
602 605
603 606 def remote(repo, remote=None):
604 607 """remotestore factory. return a store in _storemap depending on config
605 608
606 609 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
607 610 infer the endpoint, based on the remote repository using the same path
608 611 adjustments as git. As an extension, 'http' is supported as well so that
609 612 ``hg serve`` works out of the box.
610 613
611 614 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
612 615 """
613 616 lfsurl = repo.ui.config(b'lfs', b'url')
614 617 url = util.url(lfsurl or '')
615 618 if lfsurl is None:
616 619 if remote:
617 620 path = remote
618 621 elif util.safehasattr(repo, '_subtoppath'):
619 622 # The pull command sets this during the optional update phase, which
620 623 # tells exactly where the pull originated, whether 'paths.default'
621 624 # or explicit.
622 625 path = repo._subtoppath
623 626 else:
624 627 # TODO: investigate 'paths.remote:lfsurl' style path customization,
625 628 # and fall back to inferring from 'paths.remote' if unspecified.
626 629 path = repo.ui.config(b'paths', b'default') or b''
627 630
628 631 defaulturl = util.url(path)
629 632
630 633 # TODO: support local paths as well.
631 634 # TODO: consider the ssh -> https transformation that git applies
632 635 if defaulturl.scheme in (b'http', b'https'):
633 636 if defaulturl.path and defaulturl.path[:-1] != b'/':
634 637 defaulturl.path += b'/'
635 638 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
636 639
637 640 url = util.url(bytes(defaulturl))
638 641 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
639 642
640 643 scheme = url.scheme
641 644 if scheme not in _storemap:
642 645 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
643 646 return _storemap[scheme](repo, url)
644 647
645 648 class LfsRemoteError(error.StorageError):
646 649 pass
647 650
648 651 class LfsCorruptionError(error.Abort):
649 652 """Raised when a corrupt blob is detected, aborting an operation
650 653
651 654 It exists to allow specialized handling on the server side."""
General Comments 0
You need to be logged in to leave comments. Login now