##// END OF EJS Templates
lfs: add "bytes" as the unit to the upload/download progress bar...
Matt Harbison -
r44534:05881d00 default
parent child Browse files
Show More
@@ -1,746 +1,748 b''
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import contextlib
10 import contextlib
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import json
13 import json
14 import os
14 import os
15 import re
15 import re
16 import socket
16 import socket
17
17
18 from mercurial.i18n import _
18 from mercurial.i18n import _
19 from mercurial.pycompat import getattr
19 from mercurial.pycompat import getattr
20
20
21 from mercurial import (
21 from mercurial import (
22 encoding,
22 encoding,
23 error,
23 error,
24 node,
24 node,
25 pathutil,
25 pathutil,
26 pycompat,
26 pycompat,
27 url as urlmod,
27 url as urlmod,
28 util,
28 util,
29 vfs as vfsmod,
29 vfs as vfsmod,
30 worker,
30 worker,
31 )
31 )
32
32
33 from mercurial.utils import stringutil
33 from mercurial.utils import stringutil
34
34
35 from ..largefiles import lfutil
35 from ..largefiles import lfutil
36
36
37 # 64 bytes for SHA256
37 # 64 bytes for SHA256
38 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
38 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
39
39
40
40
41 class lfsvfs(vfsmod.vfs):
41 class lfsvfs(vfsmod.vfs):
42 def join(self, path):
42 def join(self, path):
43 """split the path at first two characters, like: XX/XXXXX..."""
43 """split the path at first two characters, like: XX/XXXXX..."""
44 if not _lfsre.match(path):
44 if not _lfsre.match(path):
45 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
45 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
46 return super(lfsvfs, self).join(path[0:2], path[2:])
46 return super(lfsvfs, self).join(path[0:2], path[2:])
47
47
48 def walk(self, path=None, onerror=None):
48 def walk(self, path=None, onerror=None):
49 """Yield (dirpath, [], oids) tuple for blobs under path
49 """Yield (dirpath, [], oids) tuple for blobs under path
50
50
51 Oids only exist in the root of this vfs, so dirpath is always ''.
51 Oids only exist in the root of this vfs, so dirpath is always ''.
52 """
52 """
53 root = os.path.normpath(self.base)
53 root = os.path.normpath(self.base)
54 # when dirpath == root, dirpath[prefixlen:] becomes empty
54 # when dirpath == root, dirpath[prefixlen:] becomes empty
55 # because len(dirpath) < prefixlen.
55 # because len(dirpath) < prefixlen.
56 prefixlen = len(pathutil.normasprefix(root))
56 prefixlen = len(pathutil.normasprefix(root))
57 oids = []
57 oids = []
58
58
59 for dirpath, dirs, files in os.walk(
59 for dirpath, dirs, files in os.walk(
60 self.reljoin(self.base, path or b''), onerror=onerror
60 self.reljoin(self.base, path or b''), onerror=onerror
61 ):
61 ):
62 dirpath = dirpath[prefixlen:]
62 dirpath = dirpath[prefixlen:]
63
63
64 # Silently skip unexpected files and directories
64 # Silently skip unexpected files and directories
65 if len(dirpath) == 2:
65 if len(dirpath) == 2:
66 oids.extend(
66 oids.extend(
67 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
67 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
68 )
68 )
69
69
70 yield (b'', [], oids)
70 yield (b'', [], oids)
71
71
72
72
73 class nullvfs(lfsvfs):
73 class nullvfs(lfsvfs):
74 def __init__(self):
74 def __init__(self):
75 pass
75 pass
76
76
77 def exists(self, oid):
77 def exists(self, oid):
78 return False
78 return False
79
79
80 def read(self, oid):
80 def read(self, oid):
81 # store.read() calls into here if the blob doesn't exist in its
81 # store.read() calls into here if the blob doesn't exist in its
82 # self.vfs. Raise the same error as a normal vfs when asked to read a
82 # self.vfs. Raise the same error as a normal vfs when asked to read a
83 # file that doesn't exist. The only difference is the full file path
83 # file that doesn't exist. The only difference is the full file path
84 # isn't available in the error.
84 # isn't available in the error.
85 raise IOError(
85 raise IOError(
86 errno.ENOENT,
86 errno.ENOENT,
87 pycompat.sysstr(b'%s: No such file or directory' % oid),
87 pycompat.sysstr(b'%s: No such file or directory' % oid),
88 )
88 )
89
89
90 def walk(self, path=None, onerror=None):
90 def walk(self, path=None, onerror=None):
91 return (b'', [], [])
91 return (b'', [], [])
92
92
93 def write(self, oid, data):
93 def write(self, oid, data):
94 pass
94 pass
95
95
96
96
97 class filewithprogress(object):
97 class filewithprogress(object):
98 """a file-like object that supports __len__ and read.
98 """a file-like object that supports __len__ and read.
99
99
100 Useful to provide progress information for how many bytes are read.
100 Useful to provide progress information for how many bytes are read.
101 """
101 """
102
102
103 def __init__(self, fp, callback):
103 def __init__(self, fp, callback):
104 self._fp = fp
104 self._fp = fp
105 self._callback = callback # func(readsize)
105 self._callback = callback # func(readsize)
106 fp.seek(0, os.SEEK_END)
106 fp.seek(0, os.SEEK_END)
107 self._len = fp.tell()
107 self._len = fp.tell()
108 fp.seek(0)
108 fp.seek(0)
109
109
110 def __len__(self):
110 def __len__(self):
111 return self._len
111 return self._len
112
112
113 def read(self, size):
113 def read(self, size):
114 if self._fp is None:
114 if self._fp is None:
115 return b''
115 return b''
116 data = self._fp.read(size)
116 data = self._fp.read(size)
117 if data:
117 if data:
118 if self._callback:
118 if self._callback:
119 self._callback(len(data))
119 self._callback(len(data))
120 else:
120 else:
121 self._fp.close()
121 self._fp.close()
122 self._fp = None
122 self._fp = None
123 return data
123 return data
124
124
125
125
126 class local(object):
126 class local(object):
127 """Local blobstore for large file contents.
127 """Local blobstore for large file contents.
128
128
129 This blobstore is used both as a cache and as a staging area for large blobs
129 This blobstore is used both as a cache and as a staging area for large blobs
130 to be uploaded to the remote blobstore.
130 to be uploaded to the remote blobstore.
131 """
131 """
132
132
133 def __init__(self, repo):
133 def __init__(self, repo):
134 fullpath = repo.svfs.join(b'lfs/objects')
134 fullpath = repo.svfs.join(b'lfs/objects')
135 self.vfs = lfsvfs(fullpath)
135 self.vfs = lfsvfs(fullpath)
136
136
137 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
137 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
138 self.cachevfs = nullvfs()
138 self.cachevfs = nullvfs()
139 else:
139 else:
140 usercache = lfutil._usercachedir(repo.ui, b'lfs')
140 usercache = lfutil._usercachedir(repo.ui, b'lfs')
141 self.cachevfs = lfsvfs(usercache)
141 self.cachevfs = lfsvfs(usercache)
142 self.ui = repo.ui
142 self.ui = repo.ui
143
143
144 def open(self, oid):
144 def open(self, oid):
145 """Open a read-only file descriptor to the named blob, in either the
145 """Open a read-only file descriptor to the named blob, in either the
146 usercache or the local store."""
146 usercache or the local store."""
147 # The usercache is the most likely place to hold the file. Commit will
147 # The usercache is the most likely place to hold the file. Commit will
148 # write to both it and the local store, as will anything that downloads
148 # write to both it and the local store, as will anything that downloads
149 # the blobs. However, things like clone without an update won't
149 # the blobs. However, things like clone without an update won't
150 # populate the local store. For an init + push of a local clone,
150 # populate the local store. For an init + push of a local clone,
151 # the usercache is the only place it _could_ be. If not present, the
151 # the usercache is the only place it _could_ be. If not present, the
152 # missing file msg here will indicate the local repo, not the usercache.
152 # missing file msg here will indicate the local repo, not the usercache.
153 if self.cachevfs.exists(oid):
153 if self.cachevfs.exists(oid):
154 return self.cachevfs(oid, b'rb')
154 return self.cachevfs(oid, b'rb')
155
155
156 return self.vfs(oid, b'rb')
156 return self.vfs(oid, b'rb')
157
157
158 def download(self, oid, src):
158 def download(self, oid, src):
159 """Read the blob from the remote source in chunks, verify the content,
159 """Read the blob from the remote source in chunks, verify the content,
160 and write to this local blobstore."""
160 and write to this local blobstore."""
161 sha256 = hashlib.sha256()
161 sha256 = hashlib.sha256()
162
162
163 with self.vfs(oid, b'wb', atomictemp=True) as fp:
163 with self.vfs(oid, b'wb', atomictemp=True) as fp:
164 for chunk in util.filechunkiter(src, size=1048576):
164 for chunk in util.filechunkiter(src, size=1048576):
165 fp.write(chunk)
165 fp.write(chunk)
166 sha256.update(chunk)
166 sha256.update(chunk)
167
167
168 realoid = node.hex(sha256.digest())
168 realoid = node.hex(sha256.digest())
169 if realoid != oid:
169 if realoid != oid:
170 raise LfsCorruptionError(
170 raise LfsCorruptionError(
171 _(b'corrupt remote lfs object: %s') % oid
171 _(b'corrupt remote lfs object: %s') % oid
172 )
172 )
173
173
174 self._linktousercache(oid)
174 self._linktousercache(oid)
175
175
176 def write(self, oid, data):
176 def write(self, oid, data):
177 """Write blob to local blobstore.
177 """Write blob to local blobstore.
178
178
179 This should only be called from the filelog during a commit or similar.
179 This should only be called from the filelog during a commit or similar.
180 As such, there is no need to verify the data. Imports from a remote
180 As such, there is no need to verify the data. Imports from a remote
181 store must use ``download()`` instead."""
181 store must use ``download()`` instead."""
182 with self.vfs(oid, b'wb', atomictemp=True) as fp:
182 with self.vfs(oid, b'wb', atomictemp=True) as fp:
183 fp.write(data)
183 fp.write(data)
184
184
185 self._linktousercache(oid)
185 self._linktousercache(oid)
186
186
187 def linkfromusercache(self, oid):
187 def linkfromusercache(self, oid):
188 """Link blobs found in the user cache into this store.
188 """Link blobs found in the user cache into this store.
189
189
190 The server module needs to do this when it lets the client know not to
190 The server module needs to do this when it lets the client know not to
191 upload the blob, to ensure it is always available in this store.
191 upload the blob, to ensure it is always available in this store.
192 Normally this is done implicitly when the client reads or writes the
192 Normally this is done implicitly when the client reads or writes the
193 blob, but that doesn't happen when the server tells the client that it
193 blob, but that doesn't happen when the server tells the client that it
194 already has the blob.
194 already has the blob.
195 """
195 """
196 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
196 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
197 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
197 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
198 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
198 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
199
199
200 def _linktousercache(self, oid):
200 def _linktousercache(self, oid):
201 # XXX: should we verify the content of the cache, and hardlink back to
201 # XXX: should we verify the content of the cache, and hardlink back to
202 # the local store on success, but truncate, write and link on failure?
202 # the local store on success, but truncate, write and link on failure?
203 if not self.cachevfs.exists(oid) and not isinstance(
203 if not self.cachevfs.exists(oid) and not isinstance(
204 self.cachevfs, nullvfs
204 self.cachevfs, nullvfs
205 ):
205 ):
206 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
206 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
207 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
207 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
208
208
209 def read(self, oid, verify=True):
209 def read(self, oid, verify=True):
210 """Read blob from local blobstore."""
210 """Read blob from local blobstore."""
211 if not self.vfs.exists(oid):
211 if not self.vfs.exists(oid):
212 blob = self._read(self.cachevfs, oid, verify)
212 blob = self._read(self.cachevfs, oid, verify)
213
213
214 # Even if revlog will verify the content, it needs to be verified
214 # Even if revlog will verify the content, it needs to be verified
215 # now before making the hardlink to avoid propagating corrupt blobs.
215 # now before making the hardlink to avoid propagating corrupt blobs.
216 # Don't abort if corruption is detected, because `hg verify` will
216 # Don't abort if corruption is detected, because `hg verify` will
217 # give more useful info about the corruption- simply don't add the
217 # give more useful info about the corruption- simply don't add the
218 # hardlink.
218 # hardlink.
219 if verify or node.hex(hashlib.sha256(blob).digest()) == oid:
219 if verify or node.hex(hashlib.sha256(blob).digest()) == oid:
220 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
220 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
221 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
221 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
222 else:
222 else:
223 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
223 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
224 blob = self._read(self.vfs, oid, verify)
224 blob = self._read(self.vfs, oid, verify)
225 return blob
225 return blob
226
226
227 def _read(self, vfs, oid, verify):
227 def _read(self, vfs, oid, verify):
228 """Read blob (after verifying) from the given store"""
228 """Read blob (after verifying) from the given store"""
229 blob = vfs.read(oid)
229 blob = vfs.read(oid)
230 if verify:
230 if verify:
231 _verify(oid, blob)
231 _verify(oid, blob)
232 return blob
232 return blob
233
233
234 def verify(self, oid):
234 def verify(self, oid):
235 """Indicate whether or not the hash of the underlying file matches its
235 """Indicate whether or not the hash of the underlying file matches its
236 name."""
236 name."""
237 sha256 = hashlib.sha256()
237 sha256 = hashlib.sha256()
238
238
239 with self.open(oid) as fp:
239 with self.open(oid) as fp:
240 for chunk in util.filechunkiter(fp, size=1048576):
240 for chunk in util.filechunkiter(fp, size=1048576):
241 sha256.update(chunk)
241 sha256.update(chunk)
242
242
243 return oid == node.hex(sha256.digest())
243 return oid == node.hex(sha256.digest())
244
244
245 def has(self, oid):
245 def has(self, oid):
246 """Returns True if the local blobstore contains the requested blob,
246 """Returns True if the local blobstore contains the requested blob,
247 False otherwise."""
247 False otherwise."""
248 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
248 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
249
249
250
250
251 def _urlerrorreason(urlerror):
251 def _urlerrorreason(urlerror):
252 '''Create a friendly message for the given URLError to be used in an
252 '''Create a friendly message for the given URLError to be used in an
253 LfsRemoteError message.
253 LfsRemoteError message.
254 '''
254 '''
255 inst = urlerror
255 inst = urlerror
256
256
257 if isinstance(urlerror.reason, Exception):
257 if isinstance(urlerror.reason, Exception):
258 inst = urlerror.reason
258 inst = urlerror.reason
259
259
260 if util.safehasattr(inst, b'reason'):
260 if util.safehasattr(inst, b'reason'):
261 try: # usually it is in the form (errno, strerror)
261 try: # usually it is in the form (errno, strerror)
262 reason = inst.reason.args[1]
262 reason = inst.reason.args[1]
263 except (AttributeError, IndexError):
263 except (AttributeError, IndexError):
264 # it might be anything, for example a string
264 # it might be anything, for example a string
265 reason = inst.reason
265 reason = inst.reason
266 if isinstance(reason, pycompat.unicode):
266 if isinstance(reason, pycompat.unicode):
267 # SSLError of Python 2.7.9 contains a unicode
267 # SSLError of Python 2.7.9 contains a unicode
268 reason = encoding.unitolocal(reason)
268 reason = encoding.unitolocal(reason)
269 return reason
269 return reason
270 elif getattr(inst, "strerror", None):
270 elif getattr(inst, "strerror", None):
271 return encoding.strtolocal(inst.strerror)
271 return encoding.strtolocal(inst.strerror)
272 else:
272 else:
273 return stringutil.forcebytestr(urlerror)
273 return stringutil.forcebytestr(urlerror)
274
274
275
275
276 class lfsauthhandler(util.urlreq.basehandler):
276 class lfsauthhandler(util.urlreq.basehandler):
277 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
277 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
278
278
279 def http_error_401(self, req, fp, code, msg, headers):
279 def http_error_401(self, req, fp, code, msg, headers):
280 """Enforces that any authentication performed is HTTP Basic
280 """Enforces that any authentication performed is HTTP Basic
281 Authentication. No authentication is also acceptable.
281 Authentication. No authentication is also acceptable.
282 """
282 """
283 authreq = headers.get('www-authenticate', None)
283 authreq = headers.get('www-authenticate', None)
284 if authreq:
284 if authreq:
285 scheme = authreq.split()[0]
285 scheme = authreq.split()[0]
286
286
287 if scheme.lower() != 'basic':
287 if scheme.lower() != 'basic':
288 msg = _(b'the server must support Basic Authentication')
288 msg = _(b'the server must support Basic Authentication')
289 raise util.urlerr.httperror(
289 raise util.urlerr.httperror(
290 req.get_full_url(),
290 req.get_full_url(),
291 code,
291 code,
292 encoding.strfromlocal(msg),
292 encoding.strfromlocal(msg),
293 headers,
293 headers,
294 fp,
294 fp,
295 )
295 )
296 return None
296 return None
297
297
298
298
299 class _gitlfsremote(object):
299 class _gitlfsremote(object):
300 def __init__(self, repo, url):
300 def __init__(self, repo, url):
301 ui = repo.ui
301 ui = repo.ui
302 self.ui = ui
302 self.ui = ui
303 baseurl, authinfo = url.authinfo()
303 baseurl, authinfo = url.authinfo()
304 self.baseurl = baseurl.rstrip(b'/')
304 self.baseurl = baseurl.rstrip(b'/')
305 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
305 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
306 if not useragent:
306 if not useragent:
307 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
307 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
308 self.urlopener = urlmod.opener(ui, authinfo, useragent)
308 self.urlopener = urlmod.opener(ui, authinfo, useragent)
309 self.urlopener.add_handler(lfsauthhandler())
309 self.urlopener.add_handler(lfsauthhandler())
310 self.retry = ui.configint(b'lfs', b'retry')
310 self.retry = ui.configint(b'lfs', b'retry')
311
311
312 def writebatch(self, pointers, fromstore):
312 def writebatch(self, pointers, fromstore):
313 """Batch upload from local to remote blobstore."""
313 """Batch upload from local to remote blobstore."""
314 self._batch(_deduplicate(pointers), fromstore, b'upload')
314 self._batch(_deduplicate(pointers), fromstore, b'upload')
315
315
316 def readbatch(self, pointers, tostore):
316 def readbatch(self, pointers, tostore):
317 """Batch download from remote to local blostore."""
317 """Batch download from remote to local blostore."""
318 self._batch(_deduplicate(pointers), tostore, b'download')
318 self._batch(_deduplicate(pointers), tostore, b'download')
319
319
320 def _batchrequest(self, pointers, action):
320 def _batchrequest(self, pointers, action):
321 """Get metadata about objects pointed by pointers for given action
321 """Get metadata about objects pointed by pointers for given action
322
322
323 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
323 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
324 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
324 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
325 """
325 """
326 objects = [
326 objects = [
327 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
327 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
328 for p in pointers
328 for p in pointers
329 ]
329 ]
330 requestdata = pycompat.bytesurl(
330 requestdata = pycompat.bytesurl(
331 json.dumps(
331 json.dumps(
332 {'objects': objects, 'operation': pycompat.strurl(action),}
332 {'objects': objects, 'operation': pycompat.strurl(action),}
333 )
333 )
334 )
334 )
335 url = b'%s/objects/batch' % self.baseurl
335 url = b'%s/objects/batch' % self.baseurl
336 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
336 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
337 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
337 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
338 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
338 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
339 try:
339 try:
340 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
340 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
341 rawjson = rsp.read()
341 rawjson = rsp.read()
342 except util.urlerr.httperror as ex:
342 except util.urlerr.httperror as ex:
343 hints = {
343 hints = {
344 400: _(
344 400: _(
345 b'check that lfs serving is enabled on %s and "%s" is '
345 b'check that lfs serving is enabled on %s and "%s" is '
346 b'supported'
346 b'supported'
347 )
347 )
348 % (self.baseurl, action),
348 % (self.baseurl, action),
349 404: _(b'the "lfs.url" config may be used to override %s')
349 404: _(b'the "lfs.url" config may be used to override %s')
350 % self.baseurl,
350 % self.baseurl,
351 }
351 }
352 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
352 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
353 raise LfsRemoteError(
353 raise LfsRemoteError(
354 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
354 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
355 hint=hint,
355 hint=hint,
356 )
356 )
357 except util.urlerr.urlerror as ex:
357 except util.urlerr.urlerror as ex:
358 hint = (
358 hint = (
359 _(b'the "lfs.url" config may be used to override %s')
359 _(b'the "lfs.url" config may be used to override %s')
360 % self.baseurl
360 % self.baseurl
361 )
361 )
362 raise LfsRemoteError(
362 raise LfsRemoteError(
363 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
363 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
364 )
364 )
365 try:
365 try:
366 response = pycompat.json_loads(rawjson)
366 response = pycompat.json_loads(rawjson)
367 except ValueError:
367 except ValueError:
368 raise LfsRemoteError(
368 raise LfsRemoteError(
369 _(b'LFS server returns invalid JSON: %s')
369 _(b'LFS server returns invalid JSON: %s')
370 % rawjson.encode("utf-8")
370 % rawjson.encode("utf-8")
371 )
371 )
372
372
373 if self.ui.debugflag:
373 if self.ui.debugflag:
374 self.ui.debug(b'Status: %d\n' % rsp.status)
374 self.ui.debug(b'Status: %d\n' % rsp.status)
375 # lfs-test-server and hg serve return headers in different order
375 # lfs-test-server and hg serve return headers in different order
376 headers = pycompat.bytestr(rsp.info()).strip()
376 headers = pycompat.bytestr(rsp.info()).strip()
377 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
377 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
378
378
379 if 'objects' in response:
379 if 'objects' in response:
380 response['objects'] = sorted(
380 response['objects'] = sorted(
381 response['objects'], key=lambda p: p['oid']
381 response['objects'], key=lambda p: p['oid']
382 )
382 )
383 self.ui.debug(
383 self.ui.debug(
384 b'%s\n'
384 b'%s\n'
385 % pycompat.bytesurl(
385 % pycompat.bytesurl(
386 json.dumps(
386 json.dumps(
387 response,
387 response,
388 indent=2,
388 indent=2,
389 separators=('', ': '),
389 separators=('', ': '),
390 sort_keys=True,
390 sort_keys=True,
391 )
391 )
392 )
392 )
393 )
393 )
394
394
395 def encodestr(x):
395 def encodestr(x):
396 if isinstance(x, pycompat.unicode):
396 if isinstance(x, pycompat.unicode):
397 return x.encode('utf-8')
397 return x.encode('utf-8')
398 return x
398 return x
399
399
400 return pycompat.rapply(encodestr, response)
400 return pycompat.rapply(encodestr, response)
401
401
402 def _checkforservererror(self, pointers, responses, action):
402 def _checkforservererror(self, pointers, responses, action):
403 """Scans errors from objects
403 """Scans errors from objects
404
404
405 Raises LfsRemoteError if any objects have an error"""
405 Raises LfsRemoteError if any objects have an error"""
406 for response in responses:
406 for response in responses:
407 # The server should return 404 when objects cannot be found. Some
407 # The server should return 404 when objects cannot be found. Some
408 # server implementation (ex. lfs-test-server) does not set "error"
408 # server implementation (ex. lfs-test-server) does not set "error"
409 # but just removes "download" from "actions". Treat that case
409 # but just removes "download" from "actions". Treat that case
410 # as the same as 404 error.
410 # as the same as 404 error.
411 if b'error' not in response:
411 if b'error' not in response:
412 if action == b'download' and action not in response.get(
412 if action == b'download' and action not in response.get(
413 b'actions', []
413 b'actions', []
414 ):
414 ):
415 code = 404
415 code = 404
416 else:
416 else:
417 continue
417 continue
418 else:
418 else:
419 # An error dict without a code doesn't make much sense, so
419 # An error dict without a code doesn't make much sense, so
420 # treat as a server error.
420 # treat as a server error.
421 code = response.get(b'error').get(b'code', 500)
421 code = response.get(b'error').get(b'code', 500)
422
422
423 ptrmap = {p.oid(): p for p in pointers}
423 ptrmap = {p.oid(): p for p in pointers}
424 p = ptrmap.get(response[b'oid'], None)
424 p = ptrmap.get(response[b'oid'], None)
425 if p:
425 if p:
426 filename = getattr(p, 'filename', b'unknown')
426 filename = getattr(p, 'filename', b'unknown')
427 errors = {
427 errors = {
428 404: b'The object does not exist',
428 404: b'The object does not exist',
429 410: b'The object was removed by the owner',
429 410: b'The object was removed by the owner',
430 422: b'Validation error',
430 422: b'Validation error',
431 500: b'Internal server error',
431 500: b'Internal server error',
432 }
432 }
433 msg = errors.get(code, b'status code %d' % code)
433 msg = errors.get(code, b'status code %d' % code)
434 raise LfsRemoteError(
434 raise LfsRemoteError(
435 _(b'LFS server error for "%s": %s') % (filename, msg)
435 _(b'LFS server error for "%s": %s') % (filename, msg)
436 )
436 )
437 else:
437 else:
438 raise LfsRemoteError(
438 raise LfsRemoteError(
439 _(b'LFS server error. Unsolicited response for oid %s')
439 _(b'LFS server error. Unsolicited response for oid %s')
440 % response[b'oid']
440 % response[b'oid']
441 )
441 )
442
442
443 def _extractobjects(self, response, pointers, action):
443 def _extractobjects(self, response, pointers, action):
444 """extract objects from response of the batch API
444 """extract objects from response of the batch API
445
445
446 response: parsed JSON object returned by batch API
446 response: parsed JSON object returned by batch API
447 return response['objects'] filtered by action
447 return response['objects'] filtered by action
448 raise if any object has an error
448 raise if any object has an error
449 """
449 """
450 # Scan errors from objects - fail early
450 # Scan errors from objects - fail early
451 objects = response.get(b'objects', [])
451 objects = response.get(b'objects', [])
452 self._checkforservererror(pointers, objects, action)
452 self._checkforservererror(pointers, objects, action)
453
453
454 # Filter objects with given action. Practically, this skips uploading
454 # Filter objects with given action. Practically, this skips uploading
455 # objects which exist in the server.
455 # objects which exist in the server.
456 filteredobjects = [
456 filteredobjects = [
457 o for o in objects if action in o.get(b'actions', [])
457 o for o in objects if action in o.get(b'actions', [])
458 ]
458 ]
459
459
460 return filteredobjects
460 return filteredobjects
461
461
462 def _basictransfer(self, obj, action, localstore):
462 def _basictransfer(self, obj, action, localstore):
463 """Download or upload a single object using basic transfer protocol
463 """Download or upload a single object using basic transfer protocol
464
464
465 obj: dict, an object description returned by batch API
465 obj: dict, an object description returned by batch API
466 action: string, one of ['upload', 'download']
466 action: string, one of ['upload', 'download']
467 localstore: blobstore.local
467 localstore: blobstore.local
468
468
469 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
469 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
470 basic-transfers.md
470 basic-transfers.md
471 """
471 """
472 oid = obj[b'oid']
472 oid = obj[b'oid']
473 href = obj[b'actions'][action].get(b'href')
473 href = obj[b'actions'][action].get(b'href')
474 headers = obj[b'actions'][action].get(b'header', {}).items()
474 headers = obj[b'actions'][action].get(b'header', {}).items()
475
475
476 request = util.urlreq.request(pycompat.strurl(href))
476 request = util.urlreq.request(pycompat.strurl(href))
477 if action == b'upload':
477 if action == b'upload':
478 # If uploading blobs, read data from local blobstore.
478 # If uploading blobs, read data from local blobstore.
479 if not localstore.verify(oid):
479 if not localstore.verify(oid):
480 raise error.Abort(
480 raise error.Abort(
481 _(b'detected corrupt lfs object: %s') % oid,
481 _(b'detected corrupt lfs object: %s') % oid,
482 hint=_(b'run hg verify'),
482 hint=_(b'run hg verify'),
483 )
483 )
484 request.data = filewithprogress(localstore.open(oid), None)
484 request.data = filewithprogress(localstore.open(oid), None)
485 request.get_method = lambda: r'PUT'
485 request.get_method = lambda: r'PUT'
486 request.add_header('Content-Type', 'application/octet-stream')
486 request.add_header('Content-Type', 'application/octet-stream')
487 request.add_header('Content-Length', len(request.data))
487 request.add_header('Content-Length', len(request.data))
488
488
489 for k, v in headers:
489 for k, v in headers:
490 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
490 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
491
491
492 response = b''
492 response = b''
493 try:
493 try:
494 with contextlib.closing(self.urlopener.open(request)) as req:
494 with contextlib.closing(self.urlopener.open(request)) as req:
495 ui = self.ui # Shorten debug lines
495 ui = self.ui # Shorten debug lines
496 if self.ui.debugflag:
496 if self.ui.debugflag:
497 ui.debug(b'Status: %d\n' % req.status)
497 ui.debug(b'Status: %d\n' % req.status)
498 # lfs-test-server and hg serve return headers in different
498 # lfs-test-server and hg serve return headers in different
499 # order
499 # order
500 headers = pycompat.bytestr(req.info()).strip()
500 headers = pycompat.bytestr(req.info()).strip()
501 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
501 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
502
502
503 if action == b'download':
503 if action == b'download':
504 # If downloading blobs, store downloaded data to local
504 # If downloading blobs, store downloaded data to local
505 # blobstore
505 # blobstore
506 localstore.download(oid, req)
506 localstore.download(oid, req)
507 else:
507 else:
508 while True:
508 while True:
509 data = req.read(1048576)
509 data = req.read(1048576)
510 if not data:
510 if not data:
511 break
511 break
512 response += data
512 response += data
513 if response:
513 if response:
514 ui.debug(b'lfs %s response: %s' % (action, response))
514 ui.debug(b'lfs %s response: %s' % (action, response))
515 except util.urlerr.httperror as ex:
515 except util.urlerr.httperror as ex:
516 if self.ui.debugflag:
516 if self.ui.debugflag:
517 self.ui.debug(
517 self.ui.debug(
518 b'%s: %s\n' % (oid, ex.read())
518 b'%s: %s\n' % (oid, ex.read())
519 ) # XXX: also bytes?
519 ) # XXX: also bytes?
520 raise LfsRemoteError(
520 raise LfsRemoteError(
521 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
521 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
522 % (stringutil.forcebytestr(ex), oid, action)
522 % (stringutil.forcebytestr(ex), oid, action)
523 )
523 )
524 except util.urlerr.urlerror as ex:
524 except util.urlerr.urlerror as ex:
525 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
525 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
526 util.urllibcompat.getfullurl(request)
526 util.urllibcompat.getfullurl(request)
527 )
527 )
528 raise LfsRemoteError(
528 raise LfsRemoteError(
529 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
529 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
530 )
530 )
531
531
532 def _batch(self, pointers, localstore, action):
532 def _batch(self, pointers, localstore, action):
533 if action not in [b'upload', b'download']:
533 if action not in [b'upload', b'download']:
534 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
534 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
535
535
536 response = self._batchrequest(pointers, action)
536 response = self._batchrequest(pointers, action)
537 objects = self._extractobjects(response, pointers, action)
537 objects = self._extractobjects(response, pointers, action)
538 total = sum(x.get(b'size', 0) for x in objects)
538 total = sum(x.get(b'size', 0) for x in objects)
539 sizes = {}
539 sizes = {}
540 for obj in objects:
540 for obj in objects:
541 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
541 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
542 topic = {
542 topic = {
543 b'upload': _(b'lfs uploading'),
543 b'upload': _(b'lfs uploading'),
544 b'download': _(b'lfs downloading'),
544 b'download': _(b'lfs downloading'),
545 }[action]
545 }[action]
546 if len(objects) > 1:
546 if len(objects) > 1:
547 self.ui.note(
547 self.ui.note(
548 _(b'lfs: need to transfer %d objects (%s)\n')
548 _(b'lfs: need to transfer %d objects (%s)\n')
549 % (len(objects), util.bytecount(total))
549 % (len(objects), util.bytecount(total))
550 )
550 )
551
551
552 def transfer(chunk):
552 def transfer(chunk):
553 for obj in chunk:
553 for obj in chunk:
554 objsize = obj.get(b'size', 0)
554 objsize = obj.get(b'size', 0)
555 if self.ui.verbose:
555 if self.ui.verbose:
556 if action == b'download':
556 if action == b'download':
557 msg = _(b'lfs: downloading %s (%s)\n')
557 msg = _(b'lfs: downloading %s (%s)\n')
558 elif action == b'upload':
558 elif action == b'upload':
559 msg = _(b'lfs: uploading %s (%s)\n')
559 msg = _(b'lfs: uploading %s (%s)\n')
560 self.ui.note(
560 self.ui.note(
561 msg % (obj.get(b'oid'), util.bytecount(objsize))
561 msg % (obj.get(b'oid'), util.bytecount(objsize))
562 )
562 )
563 retry = self.retry
563 retry = self.retry
564 while True:
564 while True:
565 try:
565 try:
566 self._basictransfer(obj, action, localstore)
566 self._basictransfer(obj, action, localstore)
567 yield 1, obj.get(b'oid')
567 yield 1, obj.get(b'oid')
568 break
568 break
569 except socket.error as ex:
569 except socket.error as ex:
570 if retry > 0:
570 if retry > 0:
571 self.ui.note(
571 self.ui.note(
572 _(b'lfs: failed: %r (remaining retry %d)\n')
572 _(b'lfs: failed: %r (remaining retry %d)\n')
573 % (stringutil.forcebytestr(ex), retry)
573 % (stringutil.forcebytestr(ex), retry)
574 )
574 )
575 retry -= 1
575 retry -= 1
576 continue
576 continue
577 raise
577 raise
578
578
579 # Until https multiplexing gets sorted out
579 # Until https multiplexing gets sorted out
580 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
580 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
581 oids = worker.worker(
581 oids = worker.worker(
582 self.ui,
582 self.ui,
583 0.1,
583 0.1,
584 transfer,
584 transfer,
585 (),
585 (),
586 sorted(objects, key=lambda o: o.get(b'oid')),
586 sorted(objects, key=lambda o: o.get(b'oid')),
587 )
587 )
588 else:
588 else:
589 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
589 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
590
590
591 with self.ui.makeprogress(topic, total=total) as progress:
591 with self.ui.makeprogress(
592 topic, unit=_(b"bytes"), total=total
593 ) as progress:
592 progress.update(0)
594 progress.update(0)
593 processed = 0
595 processed = 0
594 blobs = 0
596 blobs = 0
595 for _one, oid in oids:
597 for _one, oid in oids:
596 processed += sizes[oid]
598 processed += sizes[oid]
597 blobs += 1
599 blobs += 1
598 progress.update(processed)
600 progress.update(processed)
599 self.ui.note(_(b'lfs: processed: %s\n') % oid)
601 self.ui.note(_(b'lfs: processed: %s\n') % oid)
600
602
601 if blobs > 0:
603 if blobs > 0:
602 if action == b'upload':
604 if action == b'upload':
603 self.ui.status(
605 self.ui.status(
604 _(b'lfs: uploaded %d files (%s)\n')
606 _(b'lfs: uploaded %d files (%s)\n')
605 % (blobs, util.bytecount(processed))
607 % (blobs, util.bytecount(processed))
606 )
608 )
607 elif action == b'download':
609 elif action == b'download':
608 self.ui.status(
610 self.ui.status(
609 _(b'lfs: downloaded %d files (%s)\n')
611 _(b'lfs: downloaded %d files (%s)\n')
610 % (blobs, util.bytecount(processed))
612 % (blobs, util.bytecount(processed))
611 )
613 )
612
614
613 def __del__(self):
615 def __del__(self):
614 # copied from mercurial/httppeer.py
616 # copied from mercurial/httppeer.py
615 urlopener = getattr(self, 'urlopener', None)
617 urlopener = getattr(self, 'urlopener', None)
616 if urlopener:
618 if urlopener:
617 for h in urlopener.handlers:
619 for h in urlopener.handlers:
618 h.close()
620 h.close()
619 getattr(h, "close_all", lambda: None)()
621 getattr(h, "close_all", lambda: None)()
620
622
621
623
622 class _dummyremote(object):
624 class _dummyremote(object):
623 """Dummy store storing blobs to temp directory."""
625 """Dummy store storing blobs to temp directory."""
624
626
625 def __init__(self, repo, url):
627 def __init__(self, repo, url):
626 fullpath = repo.vfs.join(b'lfs', url.path)
628 fullpath = repo.vfs.join(b'lfs', url.path)
627 self.vfs = lfsvfs(fullpath)
629 self.vfs = lfsvfs(fullpath)
628
630
629 def writebatch(self, pointers, fromstore):
631 def writebatch(self, pointers, fromstore):
630 for p in _deduplicate(pointers):
632 for p in _deduplicate(pointers):
631 content = fromstore.read(p.oid(), verify=True)
633 content = fromstore.read(p.oid(), verify=True)
632 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
634 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
633 fp.write(content)
635 fp.write(content)
634
636
635 def readbatch(self, pointers, tostore):
637 def readbatch(self, pointers, tostore):
636 for p in _deduplicate(pointers):
638 for p in _deduplicate(pointers):
637 with self.vfs(p.oid(), b'rb') as fp:
639 with self.vfs(p.oid(), b'rb') as fp:
638 tostore.download(p.oid(), fp)
640 tostore.download(p.oid(), fp)
639
641
640
642
641 class _nullremote(object):
643 class _nullremote(object):
642 """Null store storing blobs to /dev/null."""
644 """Null store storing blobs to /dev/null."""
643
645
644 def __init__(self, repo, url):
646 def __init__(self, repo, url):
645 pass
647 pass
646
648
647 def writebatch(self, pointers, fromstore):
649 def writebatch(self, pointers, fromstore):
648 pass
650 pass
649
651
650 def readbatch(self, pointers, tostore):
652 def readbatch(self, pointers, tostore):
651 pass
653 pass
652
654
653
655
654 class _promptremote(object):
656 class _promptremote(object):
655 """Prompt user to set lfs.url when accessed."""
657 """Prompt user to set lfs.url when accessed."""
656
658
657 def __init__(self, repo, url):
659 def __init__(self, repo, url):
658 pass
660 pass
659
661
660 def writebatch(self, pointers, fromstore, ui=None):
662 def writebatch(self, pointers, fromstore, ui=None):
661 self._prompt()
663 self._prompt()
662
664
663 def readbatch(self, pointers, tostore, ui=None):
665 def readbatch(self, pointers, tostore, ui=None):
664 self._prompt()
666 self._prompt()
665
667
666 def _prompt(self):
668 def _prompt(self):
667 raise error.Abort(_(b'lfs.url needs to be configured'))
669 raise error.Abort(_(b'lfs.url needs to be configured'))
668
670
669
671
670 _storemap = {
672 _storemap = {
671 b'https': _gitlfsremote,
673 b'https': _gitlfsremote,
672 b'http': _gitlfsremote,
674 b'http': _gitlfsremote,
673 b'file': _dummyremote,
675 b'file': _dummyremote,
674 b'null': _nullremote,
676 b'null': _nullremote,
675 None: _promptremote,
677 None: _promptremote,
676 }
678 }
677
679
678
680
679 def _deduplicate(pointers):
681 def _deduplicate(pointers):
680 """Remove any duplicate oids that exist in the list"""
682 """Remove any duplicate oids that exist in the list"""
681 reduced = util.sortdict()
683 reduced = util.sortdict()
682 for p in pointers:
684 for p in pointers:
683 reduced[p.oid()] = p
685 reduced[p.oid()] = p
684 return reduced.values()
686 return reduced.values()
685
687
686
688
687 def _verify(oid, content):
689 def _verify(oid, content):
688 realoid = node.hex(hashlib.sha256(content).digest())
690 realoid = node.hex(hashlib.sha256(content).digest())
689 if realoid != oid:
691 if realoid != oid:
690 raise LfsCorruptionError(
692 raise LfsCorruptionError(
691 _(b'detected corrupt lfs object: %s') % oid,
693 _(b'detected corrupt lfs object: %s') % oid,
692 hint=_(b'run hg verify'),
694 hint=_(b'run hg verify'),
693 )
695 )
694
696
695
697
696 def remote(repo, remote=None):
698 def remote(repo, remote=None):
697 """remotestore factory. return a store in _storemap depending on config
699 """remotestore factory. return a store in _storemap depending on config
698
700
699 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
701 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
700 infer the endpoint, based on the remote repository using the same path
702 infer the endpoint, based on the remote repository using the same path
701 adjustments as git. As an extension, 'http' is supported as well so that
703 adjustments as git. As an extension, 'http' is supported as well so that
702 ``hg serve`` works out of the box.
704 ``hg serve`` works out of the box.
703
705
704 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
706 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
705 """
707 """
706 lfsurl = repo.ui.config(b'lfs', b'url')
708 lfsurl = repo.ui.config(b'lfs', b'url')
707 url = util.url(lfsurl or b'')
709 url = util.url(lfsurl or b'')
708 if lfsurl is None:
710 if lfsurl is None:
709 if remote:
711 if remote:
710 path = remote
712 path = remote
711 elif util.safehasattr(repo, b'_subtoppath'):
713 elif util.safehasattr(repo, b'_subtoppath'):
712 # The pull command sets this during the optional update phase, which
714 # The pull command sets this during the optional update phase, which
713 # tells exactly where the pull originated, whether 'paths.default'
715 # tells exactly where the pull originated, whether 'paths.default'
714 # or explicit.
716 # or explicit.
715 path = repo._subtoppath
717 path = repo._subtoppath
716 else:
718 else:
717 # TODO: investigate 'paths.remote:lfsurl' style path customization,
719 # TODO: investigate 'paths.remote:lfsurl' style path customization,
718 # and fall back to inferring from 'paths.remote' if unspecified.
720 # and fall back to inferring from 'paths.remote' if unspecified.
719 path = repo.ui.config(b'paths', b'default') or b''
721 path = repo.ui.config(b'paths', b'default') or b''
720
722
721 defaulturl = util.url(path)
723 defaulturl = util.url(path)
722
724
723 # TODO: support local paths as well.
725 # TODO: support local paths as well.
724 # TODO: consider the ssh -> https transformation that git applies
726 # TODO: consider the ssh -> https transformation that git applies
725 if defaulturl.scheme in (b'http', b'https'):
727 if defaulturl.scheme in (b'http', b'https'):
726 if defaulturl.path and defaulturl.path[:-1] != b'/':
728 if defaulturl.path and defaulturl.path[:-1] != b'/':
727 defaulturl.path += b'/'
729 defaulturl.path += b'/'
728 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
730 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
729
731
730 url = util.url(bytes(defaulturl))
732 url = util.url(bytes(defaulturl))
731 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
733 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
732
734
733 scheme = url.scheme
735 scheme = url.scheme
734 if scheme not in _storemap:
736 if scheme not in _storemap:
735 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
737 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
736 return _storemap[scheme](repo, url)
738 return _storemap[scheme](repo, url)
737
739
738
740
739 class LfsRemoteError(error.StorageError):
741 class LfsRemoteError(error.StorageError):
740 pass
742 pass
741
743
742
744
743 class LfsCorruptionError(error.Abort):
745 class LfsCorruptionError(error.Abort):
744 """Raised when a corrupt blob is detected, aborting an operation
746 """Raised when a corrupt blob is detected, aborting an operation
745
747
746 It exists to allow specialized handling on the server side."""
748 It exists to allow specialized handling on the server side."""
General Comments 0
You need to be logged in to leave comments. Login now