##// END OF EJS Templates
lfs: improve an exception message for blob corruption detected on transfer...
Matt Harbison -
r50427:250d9c8a default
parent child Browse files
Show More
@@ -1,769 +1,773 b''
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import contextlib
9 import contextlib
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import json
12 import json
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.pycompat import getattr
18 from mercurial.pycompat import getattr
19 from mercurial.node import hex
19 from mercurial.node import hex
20
20
21 from mercurial import (
21 from mercurial import (
22 encoding,
22 encoding,
23 error,
23 error,
24 httpconnection as httpconnectionmod,
24 httpconnection as httpconnectionmod,
25 pathutil,
25 pathutil,
26 pycompat,
26 pycompat,
27 url as urlmod,
27 url as urlmod,
28 util,
28 util,
29 vfs as vfsmod,
29 vfs as vfsmod,
30 worker,
30 worker,
31 )
31 )
32
32
33 from mercurial.utils import (
33 from mercurial.utils import (
34 stringutil,
34 stringutil,
35 urlutil,
35 urlutil,
36 )
36 )
37
37
38 from ..largefiles import lfutil
38 from ..largefiles import lfutil
39
39
40 # 64 bytes for SHA256
40 # 64 bytes for SHA256
41 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
41 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
42
42
43
43
44 class lfsvfs(vfsmod.vfs):
44 class lfsvfs(vfsmod.vfs):
45 def join(self, path):
45 def join(self, path):
46 """split the path at first two characters, like: XX/XXXXX..."""
46 """split the path at first two characters, like: XX/XXXXX..."""
47 if not _lfsre.match(path):
47 if not _lfsre.match(path):
48 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
48 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
49 return super(lfsvfs, self).join(path[0:2], path[2:])
49 return super(lfsvfs, self).join(path[0:2], path[2:])
50
50
51 def walk(self, path=None, onerror=None):
51 def walk(self, path=None, onerror=None):
52 """Yield (dirpath, [], oids) tuple for blobs under path
52 """Yield (dirpath, [], oids) tuple for blobs under path
53
53
54 Oids only exist in the root of this vfs, so dirpath is always ''.
54 Oids only exist in the root of this vfs, so dirpath is always ''.
55 """
55 """
56 root = os.path.normpath(self.base)
56 root = os.path.normpath(self.base)
57 # when dirpath == root, dirpath[prefixlen:] becomes empty
57 # when dirpath == root, dirpath[prefixlen:] becomes empty
58 # because len(dirpath) < prefixlen.
58 # because len(dirpath) < prefixlen.
59 prefixlen = len(pathutil.normasprefix(root))
59 prefixlen = len(pathutil.normasprefix(root))
60 oids = []
60 oids = []
61
61
62 for dirpath, dirs, files in os.walk(
62 for dirpath, dirs, files in os.walk(
63 self.reljoin(self.base, path or b''), onerror=onerror
63 self.reljoin(self.base, path or b''), onerror=onerror
64 ):
64 ):
65 dirpath = dirpath[prefixlen:]
65 dirpath = dirpath[prefixlen:]
66
66
67 # Silently skip unexpected files and directories
67 # Silently skip unexpected files and directories
68 if len(dirpath) == 2:
68 if len(dirpath) == 2:
69 oids.extend(
69 oids.extend(
70 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
70 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
71 )
71 )
72
72
73 yield (b'', [], oids)
73 yield (b'', [], oids)
74
74
75
75
76 class nullvfs(lfsvfs):
76 class nullvfs(lfsvfs):
77 def __init__(self):
77 def __init__(self):
78 pass
78 pass
79
79
80 def exists(self, oid):
80 def exists(self, oid):
81 return False
81 return False
82
82
83 def read(self, oid):
83 def read(self, oid):
84 # store.read() calls into here if the blob doesn't exist in its
84 # store.read() calls into here if the blob doesn't exist in its
85 # self.vfs. Raise the same error as a normal vfs when asked to read a
85 # self.vfs. Raise the same error as a normal vfs when asked to read a
86 # file that doesn't exist. The only difference is the full file path
86 # file that doesn't exist. The only difference is the full file path
87 # isn't available in the error.
87 # isn't available in the error.
88 raise IOError(
88 raise IOError(
89 errno.ENOENT,
89 errno.ENOENT,
90 pycompat.sysstr(b'%s: No such file or directory' % oid),
90 pycompat.sysstr(b'%s: No such file or directory' % oid),
91 )
91 )
92
92
93 def walk(self, path=None, onerror=None):
93 def walk(self, path=None, onerror=None):
94 return (b'', [], [])
94 return (b'', [], [])
95
95
96 def write(self, oid, data):
96 def write(self, oid, data):
97 pass
97 pass
98
98
99
99
100 class lfsuploadfile(httpconnectionmod.httpsendfile):
100 class lfsuploadfile(httpconnectionmod.httpsendfile):
101 """a file-like object that supports keepalive."""
101 """a file-like object that supports keepalive."""
102
102
103 def __init__(self, ui, filename):
103 def __init__(self, ui, filename):
104 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
104 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
105 self.read = self._data.read
105 self.read = self._data.read
106
106
107 def _makeprogress(self):
107 def _makeprogress(self):
108 return None # progress is handled by the worker client
108 return None # progress is handled by the worker client
109
109
110
110
111 class local:
111 class local:
112 """Local blobstore for large file contents.
112 """Local blobstore for large file contents.
113
113
114 This blobstore is used both as a cache and as a staging area for large blobs
114 This blobstore is used both as a cache and as a staging area for large blobs
115 to be uploaded to the remote blobstore.
115 to be uploaded to the remote blobstore.
116 """
116 """
117
117
118 def __init__(self, repo):
118 def __init__(self, repo):
119 fullpath = repo.svfs.join(b'lfs/objects')
119 fullpath = repo.svfs.join(b'lfs/objects')
120 self.vfs = lfsvfs(fullpath)
120 self.vfs = lfsvfs(fullpath)
121
121
122 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
122 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
123 self.cachevfs = nullvfs()
123 self.cachevfs = nullvfs()
124 else:
124 else:
125 usercache = lfutil._usercachedir(repo.ui, b'lfs')
125 usercache = lfutil._usercachedir(repo.ui, b'lfs')
126 self.cachevfs = lfsvfs(usercache)
126 self.cachevfs = lfsvfs(usercache)
127 self.ui = repo.ui
127 self.ui = repo.ui
128
128
129 def open(self, oid):
129 def open(self, oid):
130 """Open a read-only file descriptor to the named blob, in either the
130 """Open a read-only file descriptor to the named blob, in either the
131 usercache or the local store."""
131 usercache or the local store."""
132 return open(self.path(oid), 'rb')
132 return open(self.path(oid), 'rb')
133
133
134 def path(self, oid):
134 def path(self, oid):
135 """Build the path for the given blob ``oid``.
135 """Build the path for the given blob ``oid``.
136
136
137 If the blob exists locally, the path may point to either the usercache
137 If the blob exists locally, the path may point to either the usercache
138 or the local store. If it doesn't, it will point to the local store.
138 or the local store. If it doesn't, it will point to the local store.
139 This is meant for situations where existing code that isn't LFS aware
139 This is meant for situations where existing code that isn't LFS aware
140 needs to open a blob. Generally, prefer the ``open`` method on this
140 needs to open a blob. Generally, prefer the ``open`` method on this
141 class.
141 class.
142 """
142 """
143 # The usercache is the most likely place to hold the file. Commit will
143 # The usercache is the most likely place to hold the file. Commit will
144 # write to both it and the local store, as will anything that downloads
144 # write to both it and the local store, as will anything that downloads
145 # the blobs. However, things like clone without an update won't
145 # the blobs. However, things like clone without an update won't
146 # populate the local store. For an init + push of a local clone,
146 # populate the local store. For an init + push of a local clone,
147 # the usercache is the only place it _could_ be. If not present, the
147 # the usercache is the only place it _could_ be. If not present, the
148 # missing file msg here will indicate the local repo, not the usercache.
148 # missing file msg here will indicate the local repo, not the usercache.
149 if self.cachevfs.exists(oid):
149 if self.cachevfs.exists(oid):
150 return self.cachevfs.join(oid)
150 return self.cachevfs.join(oid)
151
151
152 return self.vfs.join(oid)
152 return self.vfs.join(oid)
153
153
154 def download(self, oid, src, content_length):
154 def download(self, oid, src, content_length):
155 """Read the blob from the remote source in chunks, verify the content,
155 """Read the blob from the remote source in chunks, verify the content,
156 and write to this local blobstore."""
156 and write to this local blobstore."""
157 sha256 = hashlib.sha256()
157 sha256 = hashlib.sha256()
158 size = 0
158 size = 0
159
159
160 with self.vfs(oid, b'wb', atomictemp=True) as fp:
160 with self.vfs(oid, b'wb', atomictemp=True) as fp:
161 for chunk in util.filechunkiter(src, size=1048576):
161 for chunk in util.filechunkiter(src, size=1048576):
162 fp.write(chunk)
162 fp.write(chunk)
163 sha256.update(chunk)
163 sha256.update(chunk)
164 size += len(chunk)
164 size += len(chunk)
165
165
166 # If the server advertised a length longer than what we actually
166 # If the server advertised a length longer than what we actually
167 # received, then we should expect that the server crashed while
167 # received, then we should expect that the server crashed while
168 # producing the response (but the server has no way of telling us
168 # producing the response (but the server has no way of telling us
169 # that), and we really don't need to try to write the response to
169 # that), and we really don't need to try to write the response to
170 # the localstore, because it's not going to match the expected.
170 # the localstore, because it's not going to match the expected.
171 # The server also uses this method to store data uploaded by the
172 # client, so if this happens on the server side, it's possible
173 # that the client crashed or an antivirus interfered with the
174 # upload.
171 if content_length is not None and int(content_length) != size:
175 if content_length is not None and int(content_length) != size:
172 msg = (
176 msg = (
173 b"Response length (%d) does not match Content-Length "
177 b"Response length (%d) does not match Content-Length "
174 b"header (%d): likely server-side crash"
178 b"header (%d) for %s"
175 )
179 )
176 raise LfsRemoteError(_(msg) % (size, int(content_length)))
180 raise LfsRemoteError(_(msg) % (size, int(content_length), oid))
177
181
178 realoid = hex(sha256.digest())
182 realoid = hex(sha256.digest())
179 if realoid != oid:
183 if realoid != oid:
180 raise LfsCorruptionError(
184 raise LfsCorruptionError(
181 _(b'corrupt remote lfs object: %s') % oid
185 _(b'corrupt remote lfs object: %s') % oid
182 )
186 )
183
187
184 self._linktousercache(oid)
188 self._linktousercache(oid)
185
189
186 def write(self, oid, data):
190 def write(self, oid, data):
187 """Write blob to local blobstore.
191 """Write blob to local blobstore.
188
192
189 This should only be called from the filelog during a commit or similar.
193 This should only be called from the filelog during a commit or similar.
190 As such, there is no need to verify the data. Imports from a remote
194 As such, there is no need to verify the data. Imports from a remote
191 store must use ``download()`` instead."""
195 store must use ``download()`` instead."""
192 with self.vfs(oid, b'wb', atomictemp=True) as fp:
196 with self.vfs(oid, b'wb', atomictemp=True) as fp:
193 fp.write(data)
197 fp.write(data)
194
198
195 self._linktousercache(oid)
199 self._linktousercache(oid)
196
200
197 def linkfromusercache(self, oid):
201 def linkfromusercache(self, oid):
198 """Link blobs found in the user cache into this store.
202 """Link blobs found in the user cache into this store.
199
203
200 The server module needs to do this when it lets the client know not to
204 The server module needs to do this when it lets the client know not to
201 upload the blob, to ensure it is always available in this store.
205 upload the blob, to ensure it is always available in this store.
202 Normally this is done implicitly when the client reads or writes the
206 Normally this is done implicitly when the client reads or writes the
203 blob, but that doesn't happen when the server tells the client that it
207 blob, but that doesn't happen when the server tells the client that it
204 already has the blob.
208 already has the blob.
205 """
209 """
206 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
210 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
207 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
211 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
208 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
212 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
209
213
210 def _linktousercache(self, oid):
214 def _linktousercache(self, oid):
211 # XXX: should we verify the content of the cache, and hardlink back to
215 # XXX: should we verify the content of the cache, and hardlink back to
212 # the local store on success, but truncate, write and link on failure?
216 # the local store on success, but truncate, write and link on failure?
213 if not self.cachevfs.exists(oid) and not isinstance(
217 if not self.cachevfs.exists(oid) and not isinstance(
214 self.cachevfs, nullvfs
218 self.cachevfs, nullvfs
215 ):
219 ):
216 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
220 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
217 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
221 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
218
222
219 def read(self, oid, verify=True):
223 def read(self, oid, verify=True):
220 """Read blob from local blobstore."""
224 """Read blob from local blobstore."""
221 if not self.vfs.exists(oid):
225 if not self.vfs.exists(oid):
222 blob = self._read(self.cachevfs, oid, verify)
226 blob = self._read(self.cachevfs, oid, verify)
223
227
224 # Even if revlog will verify the content, it needs to be verified
228 # Even if revlog will verify the content, it needs to be verified
225 # now before making the hardlink to avoid propagating corrupt blobs.
229 # now before making the hardlink to avoid propagating corrupt blobs.
226 # Don't abort if corruption is detected, because `hg verify` will
230 # Don't abort if corruption is detected, because `hg verify` will
227 # give more useful info about the corruption- simply don't add the
231 # give more useful info about the corruption- simply don't add the
228 # hardlink.
232 # hardlink.
229 if verify or hex(hashlib.sha256(blob).digest()) == oid:
233 if verify or hex(hashlib.sha256(blob).digest()) == oid:
230 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
234 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
231 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
235 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
232 else:
236 else:
233 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
237 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
234 blob = self._read(self.vfs, oid, verify)
238 blob = self._read(self.vfs, oid, verify)
235 return blob
239 return blob
236
240
237 def _read(self, vfs, oid, verify):
241 def _read(self, vfs, oid, verify):
238 """Read blob (after verifying) from the given store"""
242 """Read blob (after verifying) from the given store"""
239 blob = vfs.read(oid)
243 blob = vfs.read(oid)
240 if verify:
244 if verify:
241 _verify(oid, blob)
245 _verify(oid, blob)
242 return blob
246 return blob
243
247
244 def verify(self, oid):
248 def verify(self, oid):
245 """Indicate whether or not the hash of the underlying file matches its
249 """Indicate whether or not the hash of the underlying file matches its
246 name."""
250 name."""
247 sha256 = hashlib.sha256()
251 sha256 = hashlib.sha256()
248
252
249 with self.open(oid) as fp:
253 with self.open(oid) as fp:
250 for chunk in util.filechunkiter(fp, size=1048576):
254 for chunk in util.filechunkiter(fp, size=1048576):
251 sha256.update(chunk)
255 sha256.update(chunk)
252
256
253 return oid == hex(sha256.digest())
257 return oid == hex(sha256.digest())
254
258
255 def has(self, oid):
259 def has(self, oid):
256 """Returns True if the local blobstore contains the requested blob,
260 """Returns True if the local blobstore contains the requested blob,
257 False otherwise."""
261 False otherwise."""
258 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
262 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
259
263
260
264
261 def _urlerrorreason(urlerror):
265 def _urlerrorreason(urlerror):
262 """Create a friendly message for the given URLError to be used in an
266 """Create a friendly message for the given URLError to be used in an
263 LfsRemoteError message.
267 LfsRemoteError message.
264 """
268 """
265 inst = urlerror
269 inst = urlerror
266
270
267 if isinstance(urlerror.reason, Exception):
271 if isinstance(urlerror.reason, Exception):
268 inst = urlerror.reason
272 inst = urlerror.reason
269
273
270 if util.safehasattr(inst, b'reason'):
274 if util.safehasattr(inst, b'reason'):
271 try: # usually it is in the form (errno, strerror)
275 try: # usually it is in the form (errno, strerror)
272 reason = inst.reason.args[1]
276 reason = inst.reason.args[1]
273 except (AttributeError, IndexError):
277 except (AttributeError, IndexError):
274 # it might be anything, for example a string
278 # it might be anything, for example a string
275 reason = inst.reason
279 reason = inst.reason
276 if isinstance(reason, str):
280 if isinstance(reason, str):
277 # SSLError of Python 2.7.9 contains a unicode
281 # SSLError of Python 2.7.9 contains a unicode
278 reason = encoding.unitolocal(reason)
282 reason = encoding.unitolocal(reason)
279 return reason
283 return reason
280 elif getattr(inst, "strerror", None):
284 elif getattr(inst, "strerror", None):
281 return encoding.strtolocal(inst.strerror)
285 return encoding.strtolocal(inst.strerror)
282 else:
286 else:
283 return stringutil.forcebytestr(urlerror)
287 return stringutil.forcebytestr(urlerror)
284
288
285
289
286 class lfsauthhandler(util.urlreq.basehandler):
290 class lfsauthhandler(util.urlreq.basehandler):
287 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
291 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
288
292
289 def http_error_401(self, req, fp, code, msg, headers):
293 def http_error_401(self, req, fp, code, msg, headers):
290 """Enforces that any authentication performed is HTTP Basic
294 """Enforces that any authentication performed is HTTP Basic
291 Authentication. No authentication is also acceptable.
295 Authentication. No authentication is also acceptable.
292 """
296 """
293 authreq = headers.get('www-authenticate', None)
297 authreq = headers.get('www-authenticate', None)
294 if authreq:
298 if authreq:
295 scheme = authreq.split()[0]
299 scheme = authreq.split()[0]
296
300
297 if scheme.lower() != 'basic':
301 if scheme.lower() != 'basic':
298 msg = _(b'the server must support Basic Authentication')
302 msg = _(b'the server must support Basic Authentication')
299 raise util.urlerr.httperror(
303 raise util.urlerr.httperror(
300 req.get_full_url(),
304 req.get_full_url(),
301 code,
305 code,
302 encoding.strfromlocal(msg),
306 encoding.strfromlocal(msg),
303 headers,
307 headers,
304 fp,
308 fp,
305 )
309 )
306 return None
310 return None
307
311
308
312
309 class _gitlfsremote:
313 class _gitlfsremote:
310 def __init__(self, repo, url):
314 def __init__(self, repo, url):
311 ui = repo.ui
315 ui = repo.ui
312 self.ui = ui
316 self.ui = ui
313 baseurl, authinfo = url.authinfo()
317 baseurl, authinfo = url.authinfo()
314 self.baseurl = baseurl.rstrip(b'/')
318 self.baseurl = baseurl.rstrip(b'/')
315 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
319 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
316 if not useragent:
320 if not useragent:
317 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
321 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
318 self.urlopener = urlmod.opener(ui, authinfo, useragent)
322 self.urlopener = urlmod.opener(ui, authinfo, useragent)
319 self.urlopener.add_handler(lfsauthhandler())
323 self.urlopener.add_handler(lfsauthhandler())
320 self.retry = ui.configint(b'lfs', b'retry')
324 self.retry = ui.configint(b'lfs', b'retry')
321
325
322 def writebatch(self, pointers, fromstore):
326 def writebatch(self, pointers, fromstore):
323 """Batch upload from local to remote blobstore."""
327 """Batch upload from local to remote blobstore."""
324 self._batch(_deduplicate(pointers), fromstore, b'upload')
328 self._batch(_deduplicate(pointers), fromstore, b'upload')
325
329
326 def readbatch(self, pointers, tostore):
330 def readbatch(self, pointers, tostore):
327 """Batch download from remote to local blostore."""
331 """Batch download from remote to local blostore."""
328 self._batch(_deduplicate(pointers), tostore, b'download')
332 self._batch(_deduplicate(pointers), tostore, b'download')
329
333
330 def _batchrequest(self, pointers, action):
334 def _batchrequest(self, pointers, action):
331 """Get metadata about objects pointed by pointers for given action
335 """Get metadata about objects pointed by pointers for given action
332
336
333 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
337 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
334 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
338 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
335 """
339 """
336 objects = [
340 objects = [
337 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
341 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
338 for p in pointers
342 for p in pointers
339 ]
343 ]
340 requestdata = pycompat.bytesurl(
344 requestdata = pycompat.bytesurl(
341 json.dumps(
345 json.dumps(
342 {
346 {
343 'objects': objects,
347 'objects': objects,
344 'operation': pycompat.strurl(action),
348 'operation': pycompat.strurl(action),
345 }
349 }
346 )
350 )
347 )
351 )
348 url = b'%s/objects/batch' % self.baseurl
352 url = b'%s/objects/batch' % self.baseurl
349 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
353 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
350 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
354 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
351 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
355 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
352 try:
356 try:
353 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
357 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
354 rawjson = rsp.read()
358 rawjson = rsp.read()
355 except util.urlerr.httperror as ex:
359 except util.urlerr.httperror as ex:
356 hints = {
360 hints = {
357 400: _(
361 400: _(
358 b'check that lfs serving is enabled on %s and "%s" is '
362 b'check that lfs serving is enabled on %s and "%s" is '
359 b'supported'
363 b'supported'
360 )
364 )
361 % (self.baseurl, action),
365 % (self.baseurl, action),
362 404: _(b'the "lfs.url" config may be used to override %s')
366 404: _(b'the "lfs.url" config may be used to override %s')
363 % self.baseurl,
367 % self.baseurl,
364 }
368 }
365 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
369 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
366 raise LfsRemoteError(
370 raise LfsRemoteError(
367 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
371 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
368 hint=hint,
372 hint=hint,
369 )
373 )
370 except util.urlerr.urlerror as ex:
374 except util.urlerr.urlerror as ex:
371 hint = (
375 hint = (
372 _(b'the "lfs.url" config may be used to override %s')
376 _(b'the "lfs.url" config may be used to override %s')
373 % self.baseurl
377 % self.baseurl
374 )
378 )
375 raise LfsRemoteError(
379 raise LfsRemoteError(
376 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
380 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
377 )
381 )
378 try:
382 try:
379 response = pycompat.json_loads(rawjson)
383 response = pycompat.json_loads(rawjson)
380 except ValueError:
384 except ValueError:
381 raise LfsRemoteError(
385 raise LfsRemoteError(
382 _(b'LFS server returns invalid JSON: %s')
386 _(b'LFS server returns invalid JSON: %s')
383 % rawjson.encode("utf-8")
387 % rawjson.encode("utf-8")
384 )
388 )
385
389
386 if self.ui.debugflag:
390 if self.ui.debugflag:
387 self.ui.debug(b'Status: %d\n' % rsp.status)
391 self.ui.debug(b'Status: %d\n' % rsp.status)
388 # lfs-test-server and hg serve return headers in different order
392 # lfs-test-server and hg serve return headers in different order
389 headers = pycompat.bytestr(rsp.info()).strip()
393 headers = pycompat.bytestr(rsp.info()).strip()
390 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
394 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
391
395
392 if 'objects' in response:
396 if 'objects' in response:
393 response['objects'] = sorted(
397 response['objects'] = sorted(
394 response['objects'], key=lambda p: p['oid']
398 response['objects'], key=lambda p: p['oid']
395 )
399 )
396 self.ui.debug(
400 self.ui.debug(
397 b'%s\n'
401 b'%s\n'
398 % pycompat.bytesurl(
402 % pycompat.bytesurl(
399 json.dumps(
403 json.dumps(
400 response,
404 response,
401 indent=2,
405 indent=2,
402 separators=('', ': '),
406 separators=('', ': '),
403 sort_keys=True,
407 sort_keys=True,
404 )
408 )
405 )
409 )
406 )
410 )
407
411
408 def encodestr(x):
412 def encodestr(x):
409 if isinstance(x, str):
413 if isinstance(x, str):
410 return x.encode('utf-8')
414 return x.encode('utf-8')
411 return x
415 return x
412
416
413 return pycompat.rapply(encodestr, response)
417 return pycompat.rapply(encodestr, response)
414
418
415 def _checkforservererror(self, pointers, responses, action):
419 def _checkforservererror(self, pointers, responses, action):
416 """Scans errors from objects
420 """Scans errors from objects
417
421
418 Raises LfsRemoteError if any objects have an error"""
422 Raises LfsRemoteError if any objects have an error"""
419 for response in responses:
423 for response in responses:
420 # The server should return 404 when objects cannot be found. Some
424 # The server should return 404 when objects cannot be found. Some
421 # server implementation (ex. lfs-test-server) does not set "error"
425 # server implementation (ex. lfs-test-server) does not set "error"
422 # but just removes "download" from "actions". Treat that case
426 # but just removes "download" from "actions". Treat that case
423 # as the same as 404 error.
427 # as the same as 404 error.
424 if b'error' not in response:
428 if b'error' not in response:
425 if action == b'download' and action not in response.get(
429 if action == b'download' and action not in response.get(
426 b'actions', []
430 b'actions', []
427 ):
431 ):
428 code = 404
432 code = 404
429 else:
433 else:
430 continue
434 continue
431 else:
435 else:
432 # An error dict without a code doesn't make much sense, so
436 # An error dict without a code doesn't make much sense, so
433 # treat as a server error.
437 # treat as a server error.
434 code = response.get(b'error').get(b'code', 500)
438 code = response.get(b'error').get(b'code', 500)
435
439
436 ptrmap = {p.oid(): p for p in pointers}
440 ptrmap = {p.oid(): p for p in pointers}
437 p = ptrmap.get(response[b'oid'], None)
441 p = ptrmap.get(response[b'oid'], None)
438 if p:
442 if p:
439 filename = getattr(p, 'filename', b'unknown')
443 filename = getattr(p, 'filename', b'unknown')
440 errors = {
444 errors = {
441 404: b'The object does not exist',
445 404: b'The object does not exist',
442 410: b'The object was removed by the owner',
446 410: b'The object was removed by the owner',
443 422: b'Validation error',
447 422: b'Validation error',
444 500: b'Internal server error',
448 500: b'Internal server error',
445 }
449 }
446 msg = errors.get(code, b'status code %d' % code)
450 msg = errors.get(code, b'status code %d' % code)
447 raise LfsRemoteError(
451 raise LfsRemoteError(
448 _(b'LFS server error for "%s": %s') % (filename, msg)
452 _(b'LFS server error for "%s": %s') % (filename, msg)
449 )
453 )
450 else:
454 else:
451 raise LfsRemoteError(
455 raise LfsRemoteError(
452 _(b'LFS server error. Unsolicited response for oid %s')
456 _(b'LFS server error. Unsolicited response for oid %s')
453 % response[b'oid']
457 % response[b'oid']
454 )
458 )
455
459
456 def _extractobjects(self, response, pointers, action):
460 def _extractobjects(self, response, pointers, action):
457 """extract objects from response of the batch API
461 """extract objects from response of the batch API
458
462
459 response: parsed JSON object returned by batch API
463 response: parsed JSON object returned by batch API
460 return response['objects'] filtered by action
464 return response['objects'] filtered by action
461 raise if any object has an error
465 raise if any object has an error
462 """
466 """
463 # Scan errors from objects - fail early
467 # Scan errors from objects - fail early
464 objects = response.get(b'objects', [])
468 objects = response.get(b'objects', [])
465 self._checkforservererror(pointers, objects, action)
469 self._checkforservererror(pointers, objects, action)
466
470
467 # Filter objects with given action. Practically, this skips uploading
471 # Filter objects with given action. Practically, this skips uploading
468 # objects which exist in the server.
472 # objects which exist in the server.
469 filteredobjects = [
473 filteredobjects = [
470 o for o in objects if action in o.get(b'actions', [])
474 o for o in objects if action in o.get(b'actions', [])
471 ]
475 ]
472
476
473 return filteredobjects
477 return filteredobjects
474
478
475 def _basictransfer(self, obj, action, localstore):
479 def _basictransfer(self, obj, action, localstore):
476 """Download or upload a single object using basic transfer protocol
480 """Download or upload a single object using basic transfer protocol
477
481
478 obj: dict, an object description returned by batch API
482 obj: dict, an object description returned by batch API
479 action: string, one of ['upload', 'download']
483 action: string, one of ['upload', 'download']
480 localstore: blobstore.local
484 localstore: blobstore.local
481
485
482 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
486 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
483 basic-transfers.md
487 basic-transfers.md
484 """
488 """
485 oid = obj[b'oid']
489 oid = obj[b'oid']
486 href = obj[b'actions'][action].get(b'href')
490 href = obj[b'actions'][action].get(b'href')
487 headers = obj[b'actions'][action].get(b'header', {}).items()
491 headers = obj[b'actions'][action].get(b'header', {}).items()
488
492
489 request = util.urlreq.request(pycompat.strurl(href))
493 request = util.urlreq.request(pycompat.strurl(href))
490 if action == b'upload':
494 if action == b'upload':
491 # If uploading blobs, read data from local blobstore.
495 # If uploading blobs, read data from local blobstore.
492 if not localstore.verify(oid):
496 if not localstore.verify(oid):
493 raise error.Abort(
497 raise error.Abort(
494 _(b'detected corrupt lfs object: %s') % oid,
498 _(b'detected corrupt lfs object: %s') % oid,
495 hint=_(b'run hg verify'),
499 hint=_(b'run hg verify'),
496 )
500 )
497
501
498 for k, v in headers:
502 for k, v in headers:
499 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
503 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
500
504
501 try:
505 try:
502 if action == b'upload':
506 if action == b'upload':
503 request.data = lfsuploadfile(self.ui, localstore.path(oid))
507 request.data = lfsuploadfile(self.ui, localstore.path(oid))
504 request.get_method = lambda: 'PUT'
508 request.get_method = lambda: 'PUT'
505 request.add_header('Content-Type', 'application/octet-stream')
509 request.add_header('Content-Type', 'application/octet-stream')
506 request.add_header('Content-Length', request.data.length)
510 request.add_header('Content-Length', request.data.length)
507
511
508 with contextlib.closing(self.urlopener.open(request)) as res:
512 with contextlib.closing(self.urlopener.open(request)) as res:
509 contentlength = res.info().get(b"content-length")
513 contentlength = res.info().get(b"content-length")
510 ui = self.ui # Shorten debug lines
514 ui = self.ui # Shorten debug lines
511 if self.ui.debugflag:
515 if self.ui.debugflag:
512 ui.debug(b'Status: %d\n' % res.status)
516 ui.debug(b'Status: %d\n' % res.status)
513 # lfs-test-server and hg serve return headers in different
517 # lfs-test-server and hg serve return headers in different
514 # order
518 # order
515 headers = pycompat.bytestr(res.info()).strip()
519 headers = pycompat.bytestr(res.info()).strip()
516 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
520 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
517
521
518 if action == b'download':
522 if action == b'download':
519 # If downloading blobs, store downloaded data to local
523 # If downloading blobs, store downloaded data to local
520 # blobstore
524 # blobstore
521 localstore.download(oid, res, contentlength)
525 localstore.download(oid, res, contentlength)
522 else:
526 else:
523 blocks = []
527 blocks = []
524 while True:
528 while True:
525 data = res.read(1048576)
529 data = res.read(1048576)
526 if not data:
530 if not data:
527 break
531 break
528 blocks.append(data)
532 blocks.append(data)
529
533
530 response = b"".join(blocks)
534 response = b"".join(blocks)
531 if response:
535 if response:
532 ui.debug(b'lfs %s response: %s' % (action, response))
536 ui.debug(b'lfs %s response: %s' % (action, response))
533 except util.urlerr.httperror as ex:
537 except util.urlerr.httperror as ex:
534 if self.ui.debugflag:
538 if self.ui.debugflag:
535 self.ui.debug(
539 self.ui.debug(
536 b'%s: %s\n' % (oid, ex.read())
540 b'%s: %s\n' % (oid, ex.read())
537 ) # XXX: also bytes?
541 ) # XXX: also bytes?
538 raise LfsRemoteError(
542 raise LfsRemoteError(
539 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
543 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
540 % (stringutil.forcebytestr(ex), oid, action)
544 % (stringutil.forcebytestr(ex), oid, action)
541 )
545 )
542 except util.urlerr.urlerror as ex:
546 except util.urlerr.urlerror as ex:
543 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
547 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
544 util.urllibcompat.getfullurl(request)
548 util.urllibcompat.getfullurl(request)
545 )
549 )
546 raise LfsRemoteError(
550 raise LfsRemoteError(
547 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
551 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
548 )
552 )
549 finally:
553 finally:
550 if request.data:
554 if request.data:
551 request.data.close()
555 request.data.close()
552
556
553 def _batch(self, pointers, localstore, action):
557 def _batch(self, pointers, localstore, action):
554 if action not in [b'upload', b'download']:
558 if action not in [b'upload', b'download']:
555 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
559 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
556
560
557 response = self._batchrequest(pointers, action)
561 response = self._batchrequest(pointers, action)
558 objects = self._extractobjects(response, pointers, action)
562 objects = self._extractobjects(response, pointers, action)
559 total = sum(x.get(b'size', 0) for x in objects)
563 total = sum(x.get(b'size', 0) for x in objects)
560 sizes = {}
564 sizes = {}
561 for obj in objects:
565 for obj in objects:
562 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
566 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
563 topic = {
567 topic = {
564 b'upload': _(b'lfs uploading'),
568 b'upload': _(b'lfs uploading'),
565 b'download': _(b'lfs downloading'),
569 b'download': _(b'lfs downloading'),
566 }[action]
570 }[action]
567 if len(objects) > 1:
571 if len(objects) > 1:
568 self.ui.note(
572 self.ui.note(
569 _(b'lfs: need to transfer %d objects (%s)\n')
573 _(b'lfs: need to transfer %d objects (%s)\n')
570 % (len(objects), util.bytecount(total))
574 % (len(objects), util.bytecount(total))
571 )
575 )
572
576
573 def transfer(chunk):
577 def transfer(chunk):
574 for obj in chunk:
578 for obj in chunk:
575 objsize = obj.get(b'size', 0)
579 objsize = obj.get(b'size', 0)
576 if self.ui.verbose:
580 if self.ui.verbose:
577 if action == b'download':
581 if action == b'download':
578 msg = _(b'lfs: downloading %s (%s)\n')
582 msg = _(b'lfs: downloading %s (%s)\n')
579 elif action == b'upload':
583 elif action == b'upload':
580 msg = _(b'lfs: uploading %s (%s)\n')
584 msg = _(b'lfs: uploading %s (%s)\n')
581 self.ui.note(
585 self.ui.note(
582 msg % (obj.get(b'oid'), util.bytecount(objsize))
586 msg % (obj.get(b'oid'), util.bytecount(objsize))
583 )
587 )
584 retry = self.retry
588 retry = self.retry
585 while True:
589 while True:
586 try:
590 try:
587 self._basictransfer(obj, action, localstore)
591 self._basictransfer(obj, action, localstore)
588 yield 1, obj.get(b'oid')
592 yield 1, obj.get(b'oid')
589 break
593 break
590 except socket.error as ex:
594 except socket.error as ex:
591 if retry > 0:
595 if retry > 0:
592 self.ui.note(
596 self.ui.note(
593 _(b'lfs: failed: %r (remaining retry %d)\n')
597 _(b'lfs: failed: %r (remaining retry %d)\n')
594 % (stringutil.forcebytestr(ex), retry)
598 % (stringutil.forcebytestr(ex), retry)
595 )
599 )
596 retry -= 1
600 retry -= 1
597 continue
601 continue
598 raise
602 raise
599
603
600 # Until https multiplexing gets sorted out
604 # Until https multiplexing gets sorted out
601 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
605 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
602 oids = worker.worker(
606 oids = worker.worker(
603 self.ui,
607 self.ui,
604 0.1,
608 0.1,
605 transfer,
609 transfer,
606 (),
610 (),
607 sorted(objects, key=lambda o: o.get(b'oid')),
611 sorted(objects, key=lambda o: o.get(b'oid')),
608 )
612 )
609 else:
613 else:
610 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
614 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
611
615
612 with self.ui.makeprogress(
616 with self.ui.makeprogress(
613 topic, unit=_(b"bytes"), total=total
617 topic, unit=_(b"bytes"), total=total
614 ) as progress:
618 ) as progress:
615 progress.update(0)
619 progress.update(0)
616 processed = 0
620 processed = 0
617 blobs = 0
621 blobs = 0
618 for _one, oid in oids:
622 for _one, oid in oids:
619 processed += sizes[oid]
623 processed += sizes[oid]
620 blobs += 1
624 blobs += 1
621 progress.update(processed)
625 progress.update(processed)
622 self.ui.note(_(b'lfs: processed: %s\n') % oid)
626 self.ui.note(_(b'lfs: processed: %s\n') % oid)
623
627
624 if blobs > 0:
628 if blobs > 0:
625 if action == b'upload':
629 if action == b'upload':
626 self.ui.status(
630 self.ui.status(
627 _(b'lfs: uploaded %d files (%s)\n')
631 _(b'lfs: uploaded %d files (%s)\n')
628 % (blobs, util.bytecount(processed))
632 % (blobs, util.bytecount(processed))
629 )
633 )
630 elif action == b'download':
634 elif action == b'download':
631 self.ui.status(
635 self.ui.status(
632 _(b'lfs: downloaded %d files (%s)\n')
636 _(b'lfs: downloaded %d files (%s)\n')
633 % (blobs, util.bytecount(processed))
637 % (blobs, util.bytecount(processed))
634 )
638 )
635
639
636 def __del__(self):
640 def __del__(self):
637 # copied from mercurial/httppeer.py
641 # copied from mercurial/httppeer.py
638 urlopener = getattr(self, 'urlopener', None)
642 urlopener = getattr(self, 'urlopener', None)
639 if urlopener:
643 if urlopener:
640 for h in urlopener.handlers:
644 for h in urlopener.handlers:
641 h.close()
645 h.close()
642 getattr(h, "close_all", lambda: None)()
646 getattr(h, "close_all", lambda: None)()
643
647
644
648
645 class _dummyremote:
649 class _dummyremote:
646 """Dummy store storing blobs to temp directory."""
650 """Dummy store storing blobs to temp directory."""
647
651
648 def __init__(self, repo, url):
652 def __init__(self, repo, url):
649 fullpath = repo.vfs.join(b'lfs', url.path)
653 fullpath = repo.vfs.join(b'lfs', url.path)
650 self.vfs = lfsvfs(fullpath)
654 self.vfs = lfsvfs(fullpath)
651
655
652 def writebatch(self, pointers, fromstore):
656 def writebatch(self, pointers, fromstore):
653 for p in _deduplicate(pointers):
657 for p in _deduplicate(pointers):
654 content = fromstore.read(p.oid(), verify=True)
658 content = fromstore.read(p.oid(), verify=True)
655 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
659 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
656 fp.write(content)
660 fp.write(content)
657
661
658 def readbatch(self, pointers, tostore):
662 def readbatch(self, pointers, tostore):
659 for p in _deduplicate(pointers):
663 for p in _deduplicate(pointers):
660 with self.vfs(p.oid(), b'rb') as fp:
664 with self.vfs(p.oid(), b'rb') as fp:
661 tostore.download(p.oid(), fp, None)
665 tostore.download(p.oid(), fp, None)
662
666
663
667
664 class _nullremote:
668 class _nullremote:
665 """Null store storing blobs to /dev/null."""
669 """Null store storing blobs to /dev/null."""
666
670
667 def __init__(self, repo, url):
671 def __init__(self, repo, url):
668 pass
672 pass
669
673
670 def writebatch(self, pointers, fromstore):
674 def writebatch(self, pointers, fromstore):
671 pass
675 pass
672
676
673 def readbatch(self, pointers, tostore):
677 def readbatch(self, pointers, tostore):
674 pass
678 pass
675
679
676
680
677 class _promptremote:
681 class _promptremote:
678 """Prompt user to set lfs.url when accessed."""
682 """Prompt user to set lfs.url when accessed."""
679
683
680 def __init__(self, repo, url):
684 def __init__(self, repo, url):
681 pass
685 pass
682
686
683 def writebatch(self, pointers, fromstore, ui=None):
687 def writebatch(self, pointers, fromstore, ui=None):
684 self._prompt()
688 self._prompt()
685
689
686 def readbatch(self, pointers, tostore, ui=None):
690 def readbatch(self, pointers, tostore, ui=None):
687 self._prompt()
691 self._prompt()
688
692
689 def _prompt(self):
693 def _prompt(self):
690 raise error.Abort(_(b'lfs.url needs to be configured'))
694 raise error.Abort(_(b'lfs.url needs to be configured'))
691
695
692
696
693 _storemap = {
697 _storemap = {
694 b'https': _gitlfsremote,
698 b'https': _gitlfsremote,
695 b'http': _gitlfsremote,
699 b'http': _gitlfsremote,
696 b'file': _dummyremote,
700 b'file': _dummyremote,
697 b'null': _nullremote,
701 b'null': _nullremote,
698 None: _promptremote,
702 None: _promptremote,
699 }
703 }
700
704
701
705
702 def _deduplicate(pointers):
706 def _deduplicate(pointers):
703 """Remove any duplicate oids that exist in the list"""
707 """Remove any duplicate oids that exist in the list"""
704 reduced = util.sortdict()
708 reduced = util.sortdict()
705 for p in pointers:
709 for p in pointers:
706 reduced[p.oid()] = p
710 reduced[p.oid()] = p
707 return reduced.values()
711 return reduced.values()
708
712
709
713
710 def _verify(oid, content):
714 def _verify(oid, content):
711 realoid = hex(hashlib.sha256(content).digest())
715 realoid = hex(hashlib.sha256(content).digest())
712 if realoid != oid:
716 if realoid != oid:
713 raise LfsCorruptionError(
717 raise LfsCorruptionError(
714 _(b'detected corrupt lfs object: %s') % oid,
718 _(b'detected corrupt lfs object: %s') % oid,
715 hint=_(b'run hg verify'),
719 hint=_(b'run hg verify'),
716 )
720 )
717
721
718
722
719 def remote(repo, remote=None):
723 def remote(repo, remote=None):
720 """remotestore factory. return a store in _storemap depending on config
724 """remotestore factory. return a store in _storemap depending on config
721
725
722 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
726 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
723 infer the endpoint, based on the remote repository using the same path
727 infer the endpoint, based on the remote repository using the same path
724 adjustments as git. As an extension, 'http' is supported as well so that
728 adjustments as git. As an extension, 'http' is supported as well so that
725 ``hg serve`` works out of the box.
729 ``hg serve`` works out of the box.
726
730
727 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
731 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
728 """
732 """
729 lfsurl = repo.ui.config(b'lfs', b'url')
733 lfsurl = repo.ui.config(b'lfs', b'url')
730 url = urlutil.url(lfsurl or b'')
734 url = urlutil.url(lfsurl or b'')
731 if lfsurl is None:
735 if lfsurl is None:
732 if remote:
736 if remote:
733 path = remote
737 path = remote
734 elif util.safehasattr(repo, b'_subtoppath'):
738 elif util.safehasattr(repo, b'_subtoppath'):
735 # The pull command sets this during the optional update phase, which
739 # The pull command sets this during the optional update phase, which
736 # tells exactly where the pull originated, whether 'paths.default'
740 # tells exactly where the pull originated, whether 'paths.default'
737 # or explicit.
741 # or explicit.
738 path = repo._subtoppath
742 path = repo._subtoppath
739 else:
743 else:
740 # TODO: investigate 'paths.remote:lfsurl' style path customization,
744 # TODO: investigate 'paths.remote:lfsurl' style path customization,
741 # and fall back to inferring from 'paths.remote' if unspecified.
745 # and fall back to inferring from 'paths.remote' if unspecified.
742 path = repo.ui.config(b'paths', b'default') or b''
746 path = repo.ui.config(b'paths', b'default') or b''
743
747
744 defaulturl = urlutil.url(path)
748 defaulturl = urlutil.url(path)
745
749
746 # TODO: support local paths as well.
750 # TODO: support local paths as well.
747 # TODO: consider the ssh -> https transformation that git applies
751 # TODO: consider the ssh -> https transformation that git applies
748 if defaulturl.scheme in (b'http', b'https'):
752 if defaulturl.scheme in (b'http', b'https'):
749 if defaulturl.path and defaulturl.path[:-1] != b'/':
753 if defaulturl.path and defaulturl.path[:-1] != b'/':
750 defaulturl.path += b'/'
754 defaulturl.path += b'/'
751 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
755 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
752
756
753 url = urlutil.url(bytes(defaulturl))
757 url = urlutil.url(bytes(defaulturl))
754 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
758 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
755
759
756 scheme = url.scheme
760 scheme = url.scheme
757 if scheme not in _storemap:
761 if scheme not in _storemap:
758 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
762 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
759 return _storemap[scheme](repo, url)
763 return _storemap[scheme](repo, url)
760
764
761
765
762 class LfsRemoteError(error.StorageError):
766 class LfsRemoteError(error.StorageError):
763 pass
767 pass
764
768
765
769
766 class LfsCorruptionError(error.Abort):
770 class LfsCorruptionError(error.Abort):
767 """Raised when a corrupt blob is detected, aborting an operation
771 """Raised when a corrupt blob is detected, aborting an operation
768
772
769 It exists to allow specialized handling on the server side."""
773 It exists to allow specialized handling on the server side."""
General Comments 0
You need to be logged in to leave comments. Login now