##// END OF EJS Templates
lfs: fix interpolation of int and %s in an exception case...
Matt Harbison -
r50421:192949b6 stable
parent child Browse files
Show More
@@ -1,769 +1,769 b''
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
1 # blobstore.py - local and remote (speaking Git-LFS protocol) blob storages
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import contextlib
9 import contextlib
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import json
12 import json
13 import os
13 import os
14 import re
14 import re
15 import socket
15 import socket
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.pycompat import getattr
18 from mercurial.pycompat import getattr
19 from mercurial.node import hex
19 from mercurial.node import hex
20
20
21 from mercurial import (
21 from mercurial import (
22 encoding,
22 encoding,
23 error,
23 error,
24 httpconnection as httpconnectionmod,
24 httpconnection as httpconnectionmod,
25 pathutil,
25 pathutil,
26 pycompat,
26 pycompat,
27 url as urlmod,
27 url as urlmod,
28 util,
28 util,
29 vfs as vfsmod,
29 vfs as vfsmod,
30 worker,
30 worker,
31 )
31 )
32
32
33 from mercurial.utils import (
33 from mercurial.utils import (
34 stringutil,
34 stringutil,
35 urlutil,
35 urlutil,
36 )
36 )
37
37
38 from ..largefiles import lfutil
38 from ..largefiles import lfutil
39
39
40 # 64 bytes for SHA256
40 # 64 bytes for SHA256
41 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
41 _lfsre = re.compile(br'\A[a-f0-9]{64}\Z')
42
42
43
43
44 class lfsvfs(vfsmod.vfs):
44 class lfsvfs(vfsmod.vfs):
45 def join(self, path):
45 def join(self, path):
46 """split the path at first two characters, like: XX/XXXXX..."""
46 """split the path at first two characters, like: XX/XXXXX..."""
47 if not _lfsre.match(path):
47 if not _lfsre.match(path):
48 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
48 raise error.ProgrammingError(b'unexpected lfs path: %s' % path)
49 return super(lfsvfs, self).join(path[0:2], path[2:])
49 return super(lfsvfs, self).join(path[0:2], path[2:])
50
50
51 def walk(self, path=None, onerror=None):
51 def walk(self, path=None, onerror=None):
52 """Yield (dirpath, [], oids) tuple for blobs under path
52 """Yield (dirpath, [], oids) tuple for blobs under path
53
53
54 Oids only exist in the root of this vfs, so dirpath is always ''.
54 Oids only exist in the root of this vfs, so dirpath is always ''.
55 """
55 """
56 root = os.path.normpath(self.base)
56 root = os.path.normpath(self.base)
57 # when dirpath == root, dirpath[prefixlen:] becomes empty
57 # when dirpath == root, dirpath[prefixlen:] becomes empty
58 # because len(dirpath) < prefixlen.
58 # because len(dirpath) < prefixlen.
59 prefixlen = len(pathutil.normasprefix(root))
59 prefixlen = len(pathutil.normasprefix(root))
60 oids = []
60 oids = []
61
61
62 for dirpath, dirs, files in os.walk(
62 for dirpath, dirs, files in os.walk(
63 self.reljoin(self.base, path or b''), onerror=onerror
63 self.reljoin(self.base, path or b''), onerror=onerror
64 ):
64 ):
65 dirpath = dirpath[prefixlen:]
65 dirpath = dirpath[prefixlen:]
66
66
67 # Silently skip unexpected files and directories
67 # Silently skip unexpected files and directories
68 if len(dirpath) == 2:
68 if len(dirpath) == 2:
69 oids.extend(
69 oids.extend(
70 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
70 [dirpath + f for f in files if _lfsre.match(dirpath + f)]
71 )
71 )
72
72
73 yield (b'', [], oids)
73 yield (b'', [], oids)
74
74
75
75
76 class nullvfs(lfsvfs):
76 class nullvfs(lfsvfs):
77 def __init__(self):
77 def __init__(self):
78 pass
78 pass
79
79
80 def exists(self, oid):
80 def exists(self, oid):
81 return False
81 return False
82
82
83 def read(self, oid):
83 def read(self, oid):
84 # store.read() calls into here if the blob doesn't exist in its
84 # store.read() calls into here if the blob doesn't exist in its
85 # self.vfs. Raise the same error as a normal vfs when asked to read a
85 # self.vfs. Raise the same error as a normal vfs when asked to read a
86 # file that doesn't exist. The only difference is the full file path
86 # file that doesn't exist. The only difference is the full file path
87 # isn't available in the error.
87 # isn't available in the error.
88 raise IOError(
88 raise IOError(
89 errno.ENOENT,
89 errno.ENOENT,
90 pycompat.sysstr(b'%s: No such file or directory' % oid),
90 pycompat.sysstr(b'%s: No such file or directory' % oid),
91 )
91 )
92
92
93 def walk(self, path=None, onerror=None):
93 def walk(self, path=None, onerror=None):
94 return (b'', [], [])
94 return (b'', [], [])
95
95
96 def write(self, oid, data):
96 def write(self, oid, data):
97 pass
97 pass
98
98
99
99
100 class lfsuploadfile(httpconnectionmod.httpsendfile):
100 class lfsuploadfile(httpconnectionmod.httpsendfile):
101 """a file-like object that supports keepalive."""
101 """a file-like object that supports keepalive."""
102
102
103 def __init__(self, ui, filename):
103 def __init__(self, ui, filename):
104 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
104 super(lfsuploadfile, self).__init__(ui, filename, b'rb')
105 self.read = self._data.read
105 self.read = self._data.read
106
106
107 def _makeprogress(self):
107 def _makeprogress(self):
108 return None # progress is handled by the worker client
108 return None # progress is handled by the worker client
109
109
110
110
111 class local:
111 class local:
112 """Local blobstore for large file contents.
112 """Local blobstore for large file contents.
113
113
114 This blobstore is used both as a cache and as a staging area for large blobs
114 This blobstore is used both as a cache and as a staging area for large blobs
115 to be uploaded to the remote blobstore.
115 to be uploaded to the remote blobstore.
116 """
116 """
117
117
118 def __init__(self, repo):
118 def __init__(self, repo):
119 fullpath = repo.svfs.join(b'lfs/objects')
119 fullpath = repo.svfs.join(b'lfs/objects')
120 self.vfs = lfsvfs(fullpath)
120 self.vfs = lfsvfs(fullpath)
121
121
122 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
122 if repo.ui.configbool(b'experimental', b'lfs.disableusercache'):
123 self.cachevfs = nullvfs()
123 self.cachevfs = nullvfs()
124 else:
124 else:
125 usercache = lfutil._usercachedir(repo.ui, b'lfs')
125 usercache = lfutil._usercachedir(repo.ui, b'lfs')
126 self.cachevfs = lfsvfs(usercache)
126 self.cachevfs = lfsvfs(usercache)
127 self.ui = repo.ui
127 self.ui = repo.ui
128
128
129 def open(self, oid):
129 def open(self, oid):
130 """Open a read-only file descriptor to the named blob, in either the
130 """Open a read-only file descriptor to the named blob, in either the
131 usercache or the local store."""
131 usercache or the local store."""
132 return open(self.path(oid), 'rb')
132 return open(self.path(oid), 'rb')
133
133
134 def path(self, oid):
134 def path(self, oid):
135 """Build the path for the given blob ``oid``.
135 """Build the path for the given blob ``oid``.
136
136
137 If the blob exists locally, the path may point to either the usercache
137 If the blob exists locally, the path may point to either the usercache
138 or the local store. If it doesn't, it will point to the local store.
138 or the local store. If it doesn't, it will point to the local store.
139 This is meant for situations where existing code that isn't LFS aware
139 This is meant for situations where existing code that isn't LFS aware
140 needs to open a blob. Generally, prefer the ``open`` method on this
140 needs to open a blob. Generally, prefer the ``open`` method on this
141 class.
141 class.
142 """
142 """
143 # The usercache is the most likely place to hold the file. Commit will
143 # The usercache is the most likely place to hold the file. Commit will
144 # write to both it and the local store, as will anything that downloads
144 # write to both it and the local store, as will anything that downloads
145 # the blobs. However, things like clone without an update won't
145 # the blobs. However, things like clone without an update won't
146 # populate the local store. For an init + push of a local clone,
146 # populate the local store. For an init + push of a local clone,
147 # the usercache is the only place it _could_ be. If not present, the
147 # the usercache is the only place it _could_ be. If not present, the
148 # missing file msg here will indicate the local repo, not the usercache.
148 # missing file msg here will indicate the local repo, not the usercache.
149 if self.cachevfs.exists(oid):
149 if self.cachevfs.exists(oid):
150 return self.cachevfs.join(oid)
150 return self.cachevfs.join(oid)
151
151
152 return self.vfs.join(oid)
152 return self.vfs.join(oid)
153
153
154 def download(self, oid, src, content_length):
154 def download(self, oid, src, content_length):
155 """Read the blob from the remote source in chunks, verify the content,
155 """Read the blob from the remote source in chunks, verify the content,
156 and write to this local blobstore."""
156 and write to this local blobstore."""
157 sha256 = hashlib.sha256()
157 sha256 = hashlib.sha256()
158 size = 0
158 size = 0
159
159
160 with self.vfs(oid, b'wb', atomictemp=True) as fp:
160 with self.vfs(oid, b'wb', atomictemp=True) as fp:
161 for chunk in util.filechunkiter(src, size=1048576):
161 for chunk in util.filechunkiter(src, size=1048576):
162 fp.write(chunk)
162 fp.write(chunk)
163 sha256.update(chunk)
163 sha256.update(chunk)
164 size += len(chunk)
164 size += len(chunk)
165
165
166 # If the server advertised a length longer than what we actually
166 # If the server advertised a length longer than what we actually
167 # received, then we should expect that the server crashed while
167 # received, then we should expect that the server crashed while
168 # producing the response (but the server has no way of telling us
168 # producing the response (but the server has no way of telling us
169 # that), and we really don't need to try to write the response to
169 # that), and we really don't need to try to write the response to
170 # the localstore, because it's not going to match the expected.
170 # the localstore, because it's not going to match the expected.
171 if content_length is not None and int(content_length) != size:
171 if content_length is not None and int(content_length) != size:
172 msg = (
172 msg = (
173 b"Response length (%s) does not match Content-Length "
173 b"Response length (%d) does not match Content-Length "
174 b"header (%d): likely server-side crash"
174 b"header (%d): likely server-side crash"
175 )
175 )
176 raise LfsRemoteError(_(msg) % (size, int(content_length)))
176 raise LfsRemoteError(_(msg) % (size, int(content_length)))
177
177
178 realoid = hex(sha256.digest())
178 realoid = hex(sha256.digest())
179 if realoid != oid:
179 if realoid != oid:
180 raise LfsCorruptionError(
180 raise LfsCorruptionError(
181 _(b'corrupt remote lfs object: %s') % oid
181 _(b'corrupt remote lfs object: %s') % oid
182 )
182 )
183
183
184 self._linktousercache(oid)
184 self._linktousercache(oid)
185
185
186 def write(self, oid, data):
186 def write(self, oid, data):
187 """Write blob to local blobstore.
187 """Write blob to local blobstore.
188
188
189 This should only be called from the filelog during a commit or similar.
189 This should only be called from the filelog during a commit or similar.
190 As such, there is no need to verify the data. Imports from a remote
190 As such, there is no need to verify the data. Imports from a remote
191 store must use ``download()`` instead."""
191 store must use ``download()`` instead."""
192 with self.vfs(oid, b'wb', atomictemp=True) as fp:
192 with self.vfs(oid, b'wb', atomictemp=True) as fp:
193 fp.write(data)
193 fp.write(data)
194
194
195 self._linktousercache(oid)
195 self._linktousercache(oid)
196
196
197 def linkfromusercache(self, oid):
197 def linkfromusercache(self, oid):
198 """Link blobs found in the user cache into this store.
198 """Link blobs found in the user cache into this store.
199
199
200 The server module needs to do this when it lets the client know not to
200 The server module needs to do this when it lets the client know not to
201 upload the blob, to ensure it is always available in this store.
201 upload the blob, to ensure it is always available in this store.
202 Normally this is done implicitly when the client reads or writes the
202 Normally this is done implicitly when the client reads or writes the
203 blob, but that doesn't happen when the server tells the client that it
203 blob, but that doesn't happen when the server tells the client that it
204 already has the blob.
204 already has the blob.
205 """
205 """
206 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
206 if not isinstance(self.cachevfs, nullvfs) and not self.vfs.exists(oid):
207 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
207 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
208 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
208 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
209
209
210 def _linktousercache(self, oid):
210 def _linktousercache(self, oid):
211 # XXX: should we verify the content of the cache, and hardlink back to
211 # XXX: should we verify the content of the cache, and hardlink back to
212 # the local store on success, but truncate, write and link on failure?
212 # the local store on success, but truncate, write and link on failure?
213 if not self.cachevfs.exists(oid) and not isinstance(
213 if not self.cachevfs.exists(oid) and not isinstance(
214 self.cachevfs, nullvfs
214 self.cachevfs, nullvfs
215 ):
215 ):
216 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
216 self.ui.note(_(b'lfs: adding %s to the usercache\n') % oid)
217 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
217 lfutil.link(self.vfs.join(oid), self.cachevfs.join(oid))
218
218
219 def read(self, oid, verify=True):
219 def read(self, oid, verify=True):
220 """Read blob from local blobstore."""
220 """Read blob from local blobstore."""
221 if not self.vfs.exists(oid):
221 if not self.vfs.exists(oid):
222 blob = self._read(self.cachevfs, oid, verify)
222 blob = self._read(self.cachevfs, oid, verify)
223
223
224 # Even if revlog will verify the content, it needs to be verified
224 # Even if revlog will verify the content, it needs to be verified
225 # now before making the hardlink to avoid propagating corrupt blobs.
225 # now before making the hardlink to avoid propagating corrupt blobs.
226 # Don't abort if corruption is detected, because `hg verify` will
226 # Don't abort if corruption is detected, because `hg verify` will
227 # give more useful info about the corruption- simply don't add the
227 # give more useful info about the corruption- simply don't add the
228 # hardlink.
228 # hardlink.
229 if verify or hex(hashlib.sha256(blob).digest()) == oid:
229 if verify or hex(hashlib.sha256(blob).digest()) == oid:
230 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
230 self.ui.note(_(b'lfs: found %s in the usercache\n') % oid)
231 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
231 lfutil.link(self.cachevfs.join(oid), self.vfs.join(oid))
232 else:
232 else:
233 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
233 self.ui.note(_(b'lfs: found %s in the local lfs store\n') % oid)
234 blob = self._read(self.vfs, oid, verify)
234 blob = self._read(self.vfs, oid, verify)
235 return blob
235 return blob
236
236
237 def _read(self, vfs, oid, verify):
237 def _read(self, vfs, oid, verify):
238 """Read blob (after verifying) from the given store"""
238 """Read blob (after verifying) from the given store"""
239 blob = vfs.read(oid)
239 blob = vfs.read(oid)
240 if verify:
240 if verify:
241 _verify(oid, blob)
241 _verify(oid, blob)
242 return blob
242 return blob
243
243
244 def verify(self, oid):
244 def verify(self, oid):
245 """Indicate whether or not the hash of the underlying file matches its
245 """Indicate whether or not the hash of the underlying file matches its
246 name."""
246 name."""
247 sha256 = hashlib.sha256()
247 sha256 = hashlib.sha256()
248
248
249 with self.open(oid) as fp:
249 with self.open(oid) as fp:
250 for chunk in util.filechunkiter(fp, size=1048576):
250 for chunk in util.filechunkiter(fp, size=1048576):
251 sha256.update(chunk)
251 sha256.update(chunk)
252
252
253 return oid == hex(sha256.digest())
253 return oid == hex(sha256.digest())
254
254
255 def has(self, oid):
255 def has(self, oid):
256 """Returns True if the local blobstore contains the requested blob,
256 """Returns True if the local blobstore contains the requested blob,
257 False otherwise."""
257 False otherwise."""
258 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
258 return self.cachevfs.exists(oid) or self.vfs.exists(oid)
259
259
260
260
261 def _urlerrorreason(urlerror):
261 def _urlerrorreason(urlerror):
262 """Create a friendly message for the given URLError to be used in an
262 """Create a friendly message for the given URLError to be used in an
263 LfsRemoteError message.
263 LfsRemoteError message.
264 """
264 """
265 inst = urlerror
265 inst = urlerror
266
266
267 if isinstance(urlerror.reason, Exception):
267 if isinstance(urlerror.reason, Exception):
268 inst = urlerror.reason
268 inst = urlerror.reason
269
269
270 if util.safehasattr(inst, b'reason'):
270 if util.safehasattr(inst, b'reason'):
271 try: # usually it is in the form (errno, strerror)
271 try: # usually it is in the form (errno, strerror)
272 reason = inst.reason.args[1]
272 reason = inst.reason.args[1]
273 except (AttributeError, IndexError):
273 except (AttributeError, IndexError):
274 # it might be anything, for example a string
274 # it might be anything, for example a string
275 reason = inst.reason
275 reason = inst.reason
276 if isinstance(reason, str):
276 if isinstance(reason, str):
277 # SSLError of Python 2.7.9 contains a unicode
277 # SSLError of Python 2.7.9 contains a unicode
278 reason = encoding.unitolocal(reason)
278 reason = encoding.unitolocal(reason)
279 return reason
279 return reason
280 elif getattr(inst, "strerror", None):
280 elif getattr(inst, "strerror", None):
281 return encoding.strtolocal(inst.strerror)
281 return encoding.strtolocal(inst.strerror)
282 else:
282 else:
283 return stringutil.forcebytestr(urlerror)
283 return stringutil.forcebytestr(urlerror)
284
284
285
285
286 class lfsauthhandler(util.urlreq.basehandler):
286 class lfsauthhandler(util.urlreq.basehandler):
287 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
287 handler_order = 480 # Before HTTPDigestAuthHandler (== 490)
288
288
289 def http_error_401(self, req, fp, code, msg, headers):
289 def http_error_401(self, req, fp, code, msg, headers):
290 """Enforces that any authentication performed is HTTP Basic
290 """Enforces that any authentication performed is HTTP Basic
291 Authentication. No authentication is also acceptable.
291 Authentication. No authentication is also acceptable.
292 """
292 """
293 authreq = headers.get('www-authenticate', None)
293 authreq = headers.get('www-authenticate', None)
294 if authreq:
294 if authreq:
295 scheme = authreq.split()[0]
295 scheme = authreq.split()[0]
296
296
297 if scheme.lower() != 'basic':
297 if scheme.lower() != 'basic':
298 msg = _(b'the server must support Basic Authentication')
298 msg = _(b'the server must support Basic Authentication')
299 raise util.urlerr.httperror(
299 raise util.urlerr.httperror(
300 req.get_full_url(),
300 req.get_full_url(),
301 code,
301 code,
302 encoding.strfromlocal(msg),
302 encoding.strfromlocal(msg),
303 headers,
303 headers,
304 fp,
304 fp,
305 )
305 )
306 return None
306 return None
307
307
308
308
309 class _gitlfsremote:
309 class _gitlfsremote:
310 def __init__(self, repo, url):
310 def __init__(self, repo, url):
311 ui = repo.ui
311 ui = repo.ui
312 self.ui = ui
312 self.ui = ui
313 baseurl, authinfo = url.authinfo()
313 baseurl, authinfo = url.authinfo()
314 self.baseurl = baseurl.rstrip(b'/')
314 self.baseurl = baseurl.rstrip(b'/')
315 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
315 useragent = repo.ui.config(b'experimental', b'lfs.user-agent')
316 if not useragent:
316 if not useragent:
317 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
317 useragent = b'git-lfs/2.3.4 (Mercurial %s)' % util.version()
318 self.urlopener = urlmod.opener(ui, authinfo, useragent)
318 self.urlopener = urlmod.opener(ui, authinfo, useragent)
319 self.urlopener.add_handler(lfsauthhandler())
319 self.urlopener.add_handler(lfsauthhandler())
320 self.retry = ui.configint(b'lfs', b'retry')
320 self.retry = ui.configint(b'lfs', b'retry')
321
321
322 def writebatch(self, pointers, fromstore):
322 def writebatch(self, pointers, fromstore):
323 """Batch upload from local to remote blobstore."""
323 """Batch upload from local to remote blobstore."""
324 self._batch(_deduplicate(pointers), fromstore, b'upload')
324 self._batch(_deduplicate(pointers), fromstore, b'upload')
325
325
326 def readbatch(self, pointers, tostore):
326 def readbatch(self, pointers, tostore):
327 """Batch download from remote to local blostore."""
327 """Batch download from remote to local blostore."""
328 self._batch(_deduplicate(pointers), tostore, b'download')
328 self._batch(_deduplicate(pointers), tostore, b'download')
329
329
330 def _batchrequest(self, pointers, action):
330 def _batchrequest(self, pointers, action):
331 """Get metadata about objects pointed by pointers for given action
331 """Get metadata about objects pointed by pointers for given action
332
332
333 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
333 Return decoded JSON object like {'objects': [{'oid': '', 'size': 1}]}
334 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
334 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/batch.md
335 """
335 """
336 objects = [
336 objects = [
337 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
337 {'oid': pycompat.strurl(p.oid()), 'size': p.size()}
338 for p in pointers
338 for p in pointers
339 ]
339 ]
340 requestdata = pycompat.bytesurl(
340 requestdata = pycompat.bytesurl(
341 json.dumps(
341 json.dumps(
342 {
342 {
343 'objects': objects,
343 'objects': objects,
344 'operation': pycompat.strurl(action),
344 'operation': pycompat.strurl(action),
345 }
345 }
346 )
346 )
347 )
347 )
348 url = b'%s/objects/batch' % self.baseurl
348 url = b'%s/objects/batch' % self.baseurl
349 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
349 batchreq = util.urlreq.request(pycompat.strurl(url), data=requestdata)
350 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
350 batchreq.add_header('Accept', 'application/vnd.git-lfs+json')
351 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
351 batchreq.add_header('Content-Type', 'application/vnd.git-lfs+json')
352 try:
352 try:
353 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
353 with contextlib.closing(self.urlopener.open(batchreq)) as rsp:
354 rawjson = rsp.read()
354 rawjson = rsp.read()
355 except util.urlerr.httperror as ex:
355 except util.urlerr.httperror as ex:
356 hints = {
356 hints = {
357 400: _(
357 400: _(
358 b'check that lfs serving is enabled on %s and "%s" is '
358 b'check that lfs serving is enabled on %s and "%s" is '
359 b'supported'
359 b'supported'
360 )
360 )
361 % (self.baseurl, action),
361 % (self.baseurl, action),
362 404: _(b'the "lfs.url" config may be used to override %s')
362 404: _(b'the "lfs.url" config may be used to override %s')
363 % self.baseurl,
363 % self.baseurl,
364 }
364 }
365 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
365 hint = hints.get(ex.code, _(b'api=%s, action=%s') % (url, action))
366 raise LfsRemoteError(
366 raise LfsRemoteError(
367 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
367 _(b'LFS HTTP error: %s') % stringutil.forcebytestr(ex),
368 hint=hint,
368 hint=hint,
369 )
369 )
370 except util.urlerr.urlerror as ex:
370 except util.urlerr.urlerror as ex:
371 hint = (
371 hint = (
372 _(b'the "lfs.url" config may be used to override %s')
372 _(b'the "lfs.url" config may be used to override %s')
373 % self.baseurl
373 % self.baseurl
374 )
374 )
375 raise LfsRemoteError(
375 raise LfsRemoteError(
376 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
376 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
377 )
377 )
378 try:
378 try:
379 response = pycompat.json_loads(rawjson)
379 response = pycompat.json_loads(rawjson)
380 except ValueError:
380 except ValueError:
381 raise LfsRemoteError(
381 raise LfsRemoteError(
382 _(b'LFS server returns invalid JSON: %s')
382 _(b'LFS server returns invalid JSON: %s')
383 % rawjson.encode("utf-8")
383 % rawjson.encode("utf-8")
384 )
384 )
385
385
386 if self.ui.debugflag:
386 if self.ui.debugflag:
387 self.ui.debug(b'Status: %d\n' % rsp.status)
387 self.ui.debug(b'Status: %d\n' % rsp.status)
388 # lfs-test-server and hg serve return headers in different order
388 # lfs-test-server and hg serve return headers in different order
389 headers = pycompat.bytestr(rsp.info()).strip()
389 headers = pycompat.bytestr(rsp.info()).strip()
390 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
390 self.ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
391
391
392 if 'objects' in response:
392 if 'objects' in response:
393 response['objects'] = sorted(
393 response['objects'] = sorted(
394 response['objects'], key=lambda p: p['oid']
394 response['objects'], key=lambda p: p['oid']
395 )
395 )
396 self.ui.debug(
396 self.ui.debug(
397 b'%s\n'
397 b'%s\n'
398 % pycompat.bytesurl(
398 % pycompat.bytesurl(
399 json.dumps(
399 json.dumps(
400 response,
400 response,
401 indent=2,
401 indent=2,
402 separators=('', ': '),
402 separators=('', ': '),
403 sort_keys=True,
403 sort_keys=True,
404 )
404 )
405 )
405 )
406 )
406 )
407
407
408 def encodestr(x):
408 def encodestr(x):
409 if isinstance(x, str):
409 if isinstance(x, str):
410 return x.encode('utf-8')
410 return x.encode('utf-8')
411 return x
411 return x
412
412
413 return pycompat.rapply(encodestr, response)
413 return pycompat.rapply(encodestr, response)
414
414
415 def _checkforservererror(self, pointers, responses, action):
415 def _checkforservererror(self, pointers, responses, action):
416 """Scans errors from objects
416 """Scans errors from objects
417
417
418 Raises LfsRemoteError if any objects have an error"""
418 Raises LfsRemoteError if any objects have an error"""
419 for response in responses:
419 for response in responses:
420 # The server should return 404 when objects cannot be found. Some
420 # The server should return 404 when objects cannot be found. Some
421 # server implementation (ex. lfs-test-server) does not set "error"
421 # server implementation (ex. lfs-test-server) does not set "error"
422 # but just removes "download" from "actions". Treat that case
422 # but just removes "download" from "actions". Treat that case
423 # as the same as 404 error.
423 # as the same as 404 error.
424 if b'error' not in response:
424 if b'error' not in response:
425 if action == b'download' and action not in response.get(
425 if action == b'download' and action not in response.get(
426 b'actions', []
426 b'actions', []
427 ):
427 ):
428 code = 404
428 code = 404
429 else:
429 else:
430 continue
430 continue
431 else:
431 else:
432 # An error dict without a code doesn't make much sense, so
432 # An error dict without a code doesn't make much sense, so
433 # treat as a server error.
433 # treat as a server error.
434 code = response.get(b'error').get(b'code', 500)
434 code = response.get(b'error').get(b'code', 500)
435
435
436 ptrmap = {p.oid(): p for p in pointers}
436 ptrmap = {p.oid(): p for p in pointers}
437 p = ptrmap.get(response[b'oid'], None)
437 p = ptrmap.get(response[b'oid'], None)
438 if p:
438 if p:
439 filename = getattr(p, 'filename', b'unknown')
439 filename = getattr(p, 'filename', b'unknown')
440 errors = {
440 errors = {
441 404: b'The object does not exist',
441 404: b'The object does not exist',
442 410: b'The object was removed by the owner',
442 410: b'The object was removed by the owner',
443 422: b'Validation error',
443 422: b'Validation error',
444 500: b'Internal server error',
444 500: b'Internal server error',
445 }
445 }
446 msg = errors.get(code, b'status code %d' % code)
446 msg = errors.get(code, b'status code %d' % code)
447 raise LfsRemoteError(
447 raise LfsRemoteError(
448 _(b'LFS server error for "%s": %s') % (filename, msg)
448 _(b'LFS server error for "%s": %s') % (filename, msg)
449 )
449 )
450 else:
450 else:
451 raise LfsRemoteError(
451 raise LfsRemoteError(
452 _(b'LFS server error. Unsolicited response for oid %s')
452 _(b'LFS server error. Unsolicited response for oid %s')
453 % response[b'oid']
453 % response[b'oid']
454 )
454 )
455
455
456 def _extractobjects(self, response, pointers, action):
456 def _extractobjects(self, response, pointers, action):
457 """extract objects from response of the batch API
457 """extract objects from response of the batch API
458
458
459 response: parsed JSON object returned by batch API
459 response: parsed JSON object returned by batch API
460 return response['objects'] filtered by action
460 return response['objects'] filtered by action
461 raise if any object has an error
461 raise if any object has an error
462 """
462 """
463 # Scan errors from objects - fail early
463 # Scan errors from objects - fail early
464 objects = response.get(b'objects', [])
464 objects = response.get(b'objects', [])
465 self._checkforservererror(pointers, objects, action)
465 self._checkforservererror(pointers, objects, action)
466
466
467 # Filter objects with given action. Practically, this skips uploading
467 # Filter objects with given action. Practically, this skips uploading
468 # objects which exist in the server.
468 # objects which exist in the server.
469 filteredobjects = [
469 filteredobjects = [
470 o for o in objects if action in o.get(b'actions', [])
470 o for o in objects if action in o.get(b'actions', [])
471 ]
471 ]
472
472
473 return filteredobjects
473 return filteredobjects
474
474
475 def _basictransfer(self, obj, action, localstore):
475 def _basictransfer(self, obj, action, localstore):
476 """Download or upload a single object using basic transfer protocol
476 """Download or upload a single object using basic transfer protocol
477
477
478 obj: dict, an object description returned by batch API
478 obj: dict, an object description returned by batch API
479 action: string, one of ['upload', 'download']
479 action: string, one of ['upload', 'download']
480 localstore: blobstore.local
480 localstore: blobstore.local
481
481
482 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
482 See https://github.com/git-lfs/git-lfs/blob/master/docs/api/\
483 basic-transfers.md
483 basic-transfers.md
484 """
484 """
485 oid = obj[b'oid']
485 oid = obj[b'oid']
486 href = obj[b'actions'][action].get(b'href')
486 href = obj[b'actions'][action].get(b'href')
487 headers = obj[b'actions'][action].get(b'header', {}).items()
487 headers = obj[b'actions'][action].get(b'header', {}).items()
488
488
489 request = util.urlreq.request(pycompat.strurl(href))
489 request = util.urlreq.request(pycompat.strurl(href))
490 if action == b'upload':
490 if action == b'upload':
491 # If uploading blobs, read data from local blobstore.
491 # If uploading blobs, read data from local blobstore.
492 if not localstore.verify(oid):
492 if not localstore.verify(oid):
493 raise error.Abort(
493 raise error.Abort(
494 _(b'detected corrupt lfs object: %s') % oid,
494 _(b'detected corrupt lfs object: %s') % oid,
495 hint=_(b'run hg verify'),
495 hint=_(b'run hg verify'),
496 )
496 )
497
497
498 for k, v in headers:
498 for k, v in headers:
499 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
499 request.add_header(pycompat.strurl(k), pycompat.strurl(v))
500
500
501 try:
501 try:
502 if action == b'upload':
502 if action == b'upload':
503 request.data = lfsuploadfile(self.ui, localstore.path(oid))
503 request.data = lfsuploadfile(self.ui, localstore.path(oid))
504 request.get_method = lambda: 'PUT'
504 request.get_method = lambda: 'PUT'
505 request.add_header('Content-Type', 'application/octet-stream')
505 request.add_header('Content-Type', 'application/octet-stream')
506 request.add_header('Content-Length', request.data.length)
506 request.add_header('Content-Length', request.data.length)
507
507
508 with contextlib.closing(self.urlopener.open(request)) as res:
508 with contextlib.closing(self.urlopener.open(request)) as res:
509 contentlength = res.info().get(b"content-length")
509 contentlength = res.info().get(b"content-length")
510 ui = self.ui # Shorten debug lines
510 ui = self.ui # Shorten debug lines
511 if self.ui.debugflag:
511 if self.ui.debugflag:
512 ui.debug(b'Status: %d\n' % res.status)
512 ui.debug(b'Status: %d\n' % res.status)
513 # lfs-test-server and hg serve return headers in different
513 # lfs-test-server and hg serve return headers in different
514 # order
514 # order
515 headers = pycompat.bytestr(res.info()).strip()
515 headers = pycompat.bytestr(res.info()).strip()
516 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
516 ui.debug(b'%s\n' % b'\n'.join(sorted(headers.splitlines())))
517
517
518 if action == b'download':
518 if action == b'download':
519 # If downloading blobs, store downloaded data to local
519 # If downloading blobs, store downloaded data to local
520 # blobstore
520 # blobstore
521 localstore.download(oid, res, contentlength)
521 localstore.download(oid, res, contentlength)
522 else:
522 else:
523 blocks = []
523 blocks = []
524 while True:
524 while True:
525 data = res.read(1048576)
525 data = res.read(1048576)
526 if not data:
526 if not data:
527 break
527 break
528 blocks.append(data)
528 blocks.append(data)
529
529
530 response = b"".join(blocks)
530 response = b"".join(blocks)
531 if response:
531 if response:
532 ui.debug(b'lfs %s response: %s' % (action, response))
532 ui.debug(b'lfs %s response: %s' % (action, response))
533 except util.urlerr.httperror as ex:
533 except util.urlerr.httperror as ex:
534 if self.ui.debugflag:
534 if self.ui.debugflag:
535 self.ui.debug(
535 self.ui.debug(
536 b'%s: %s\n' % (oid, ex.read())
536 b'%s: %s\n' % (oid, ex.read())
537 ) # XXX: also bytes?
537 ) # XXX: also bytes?
538 raise LfsRemoteError(
538 raise LfsRemoteError(
539 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
539 _(b'LFS HTTP error: %s (oid=%s, action=%s)')
540 % (stringutil.forcebytestr(ex), oid, action)
540 % (stringutil.forcebytestr(ex), oid, action)
541 )
541 )
542 except util.urlerr.urlerror as ex:
542 except util.urlerr.urlerror as ex:
543 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
543 hint = _(b'attempted connection to %s') % pycompat.bytesurl(
544 util.urllibcompat.getfullurl(request)
544 util.urllibcompat.getfullurl(request)
545 )
545 )
546 raise LfsRemoteError(
546 raise LfsRemoteError(
547 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
547 _(b'LFS error: %s') % _urlerrorreason(ex), hint=hint
548 )
548 )
549 finally:
549 finally:
550 if request.data:
550 if request.data:
551 request.data.close()
551 request.data.close()
552
552
553 def _batch(self, pointers, localstore, action):
553 def _batch(self, pointers, localstore, action):
554 if action not in [b'upload', b'download']:
554 if action not in [b'upload', b'download']:
555 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
555 raise error.ProgrammingError(b'invalid Git-LFS action: %s' % action)
556
556
557 response = self._batchrequest(pointers, action)
557 response = self._batchrequest(pointers, action)
558 objects = self._extractobjects(response, pointers, action)
558 objects = self._extractobjects(response, pointers, action)
559 total = sum(x.get(b'size', 0) for x in objects)
559 total = sum(x.get(b'size', 0) for x in objects)
560 sizes = {}
560 sizes = {}
561 for obj in objects:
561 for obj in objects:
562 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
562 sizes[obj.get(b'oid')] = obj.get(b'size', 0)
563 topic = {
563 topic = {
564 b'upload': _(b'lfs uploading'),
564 b'upload': _(b'lfs uploading'),
565 b'download': _(b'lfs downloading'),
565 b'download': _(b'lfs downloading'),
566 }[action]
566 }[action]
567 if len(objects) > 1:
567 if len(objects) > 1:
568 self.ui.note(
568 self.ui.note(
569 _(b'lfs: need to transfer %d objects (%s)\n')
569 _(b'lfs: need to transfer %d objects (%s)\n')
570 % (len(objects), util.bytecount(total))
570 % (len(objects), util.bytecount(total))
571 )
571 )
572
572
573 def transfer(chunk):
573 def transfer(chunk):
574 for obj in chunk:
574 for obj in chunk:
575 objsize = obj.get(b'size', 0)
575 objsize = obj.get(b'size', 0)
576 if self.ui.verbose:
576 if self.ui.verbose:
577 if action == b'download':
577 if action == b'download':
578 msg = _(b'lfs: downloading %s (%s)\n')
578 msg = _(b'lfs: downloading %s (%s)\n')
579 elif action == b'upload':
579 elif action == b'upload':
580 msg = _(b'lfs: uploading %s (%s)\n')
580 msg = _(b'lfs: uploading %s (%s)\n')
581 self.ui.note(
581 self.ui.note(
582 msg % (obj.get(b'oid'), util.bytecount(objsize))
582 msg % (obj.get(b'oid'), util.bytecount(objsize))
583 )
583 )
584 retry = self.retry
584 retry = self.retry
585 while True:
585 while True:
586 try:
586 try:
587 self._basictransfer(obj, action, localstore)
587 self._basictransfer(obj, action, localstore)
588 yield 1, obj.get(b'oid')
588 yield 1, obj.get(b'oid')
589 break
589 break
590 except socket.error as ex:
590 except socket.error as ex:
591 if retry > 0:
591 if retry > 0:
592 self.ui.note(
592 self.ui.note(
593 _(b'lfs: failed: %r (remaining retry %d)\n')
593 _(b'lfs: failed: %r (remaining retry %d)\n')
594 % (stringutil.forcebytestr(ex), retry)
594 % (stringutil.forcebytestr(ex), retry)
595 )
595 )
596 retry -= 1
596 retry -= 1
597 continue
597 continue
598 raise
598 raise
599
599
600 # Until https multiplexing gets sorted out
600 # Until https multiplexing gets sorted out
601 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
601 if self.ui.configbool(b'experimental', b'lfs.worker-enable'):
602 oids = worker.worker(
602 oids = worker.worker(
603 self.ui,
603 self.ui,
604 0.1,
604 0.1,
605 transfer,
605 transfer,
606 (),
606 (),
607 sorted(objects, key=lambda o: o.get(b'oid')),
607 sorted(objects, key=lambda o: o.get(b'oid')),
608 )
608 )
609 else:
609 else:
610 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
610 oids = transfer(sorted(objects, key=lambda o: o.get(b'oid')))
611
611
612 with self.ui.makeprogress(
612 with self.ui.makeprogress(
613 topic, unit=_(b"bytes"), total=total
613 topic, unit=_(b"bytes"), total=total
614 ) as progress:
614 ) as progress:
615 progress.update(0)
615 progress.update(0)
616 processed = 0
616 processed = 0
617 blobs = 0
617 blobs = 0
618 for _one, oid in oids:
618 for _one, oid in oids:
619 processed += sizes[oid]
619 processed += sizes[oid]
620 blobs += 1
620 blobs += 1
621 progress.update(processed)
621 progress.update(processed)
622 self.ui.note(_(b'lfs: processed: %s\n') % oid)
622 self.ui.note(_(b'lfs: processed: %s\n') % oid)
623
623
624 if blobs > 0:
624 if blobs > 0:
625 if action == b'upload':
625 if action == b'upload':
626 self.ui.status(
626 self.ui.status(
627 _(b'lfs: uploaded %d files (%s)\n')
627 _(b'lfs: uploaded %d files (%s)\n')
628 % (blobs, util.bytecount(processed))
628 % (blobs, util.bytecount(processed))
629 )
629 )
630 elif action == b'download':
630 elif action == b'download':
631 self.ui.status(
631 self.ui.status(
632 _(b'lfs: downloaded %d files (%s)\n')
632 _(b'lfs: downloaded %d files (%s)\n')
633 % (blobs, util.bytecount(processed))
633 % (blobs, util.bytecount(processed))
634 )
634 )
635
635
636 def __del__(self):
636 def __del__(self):
637 # copied from mercurial/httppeer.py
637 # copied from mercurial/httppeer.py
638 urlopener = getattr(self, 'urlopener', None)
638 urlopener = getattr(self, 'urlopener', None)
639 if urlopener:
639 if urlopener:
640 for h in urlopener.handlers:
640 for h in urlopener.handlers:
641 h.close()
641 h.close()
642 getattr(h, "close_all", lambda: None)()
642 getattr(h, "close_all", lambda: None)()
643
643
644
644
645 class _dummyremote:
645 class _dummyremote:
646 """Dummy store storing blobs to temp directory."""
646 """Dummy store storing blobs to temp directory."""
647
647
648 def __init__(self, repo, url):
648 def __init__(self, repo, url):
649 fullpath = repo.vfs.join(b'lfs', url.path)
649 fullpath = repo.vfs.join(b'lfs', url.path)
650 self.vfs = lfsvfs(fullpath)
650 self.vfs = lfsvfs(fullpath)
651
651
652 def writebatch(self, pointers, fromstore):
652 def writebatch(self, pointers, fromstore):
653 for p in _deduplicate(pointers):
653 for p in _deduplicate(pointers):
654 content = fromstore.read(p.oid(), verify=True)
654 content = fromstore.read(p.oid(), verify=True)
655 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
655 with self.vfs(p.oid(), b'wb', atomictemp=True) as fp:
656 fp.write(content)
656 fp.write(content)
657
657
658 def readbatch(self, pointers, tostore):
658 def readbatch(self, pointers, tostore):
659 for p in _deduplicate(pointers):
659 for p in _deduplicate(pointers):
660 with self.vfs(p.oid(), b'rb') as fp:
660 with self.vfs(p.oid(), b'rb') as fp:
661 tostore.download(p.oid(), fp, None)
661 tostore.download(p.oid(), fp, None)
662
662
663
663
664 class _nullremote:
664 class _nullremote:
665 """Null store storing blobs to /dev/null."""
665 """Null store storing blobs to /dev/null."""
666
666
667 def __init__(self, repo, url):
667 def __init__(self, repo, url):
668 pass
668 pass
669
669
670 def writebatch(self, pointers, fromstore):
670 def writebatch(self, pointers, fromstore):
671 pass
671 pass
672
672
673 def readbatch(self, pointers, tostore):
673 def readbatch(self, pointers, tostore):
674 pass
674 pass
675
675
676
676
677 class _promptremote:
677 class _promptremote:
678 """Prompt user to set lfs.url when accessed."""
678 """Prompt user to set lfs.url when accessed."""
679
679
680 def __init__(self, repo, url):
680 def __init__(self, repo, url):
681 pass
681 pass
682
682
683 def writebatch(self, pointers, fromstore, ui=None):
683 def writebatch(self, pointers, fromstore, ui=None):
684 self._prompt()
684 self._prompt()
685
685
686 def readbatch(self, pointers, tostore, ui=None):
686 def readbatch(self, pointers, tostore, ui=None):
687 self._prompt()
687 self._prompt()
688
688
689 def _prompt(self):
689 def _prompt(self):
690 raise error.Abort(_(b'lfs.url needs to be configured'))
690 raise error.Abort(_(b'lfs.url needs to be configured'))
691
691
692
692
693 _storemap = {
693 _storemap = {
694 b'https': _gitlfsremote,
694 b'https': _gitlfsremote,
695 b'http': _gitlfsremote,
695 b'http': _gitlfsremote,
696 b'file': _dummyremote,
696 b'file': _dummyremote,
697 b'null': _nullremote,
697 b'null': _nullremote,
698 None: _promptremote,
698 None: _promptremote,
699 }
699 }
700
700
701
701
702 def _deduplicate(pointers):
702 def _deduplicate(pointers):
703 """Remove any duplicate oids that exist in the list"""
703 """Remove any duplicate oids that exist in the list"""
704 reduced = util.sortdict()
704 reduced = util.sortdict()
705 for p in pointers:
705 for p in pointers:
706 reduced[p.oid()] = p
706 reduced[p.oid()] = p
707 return reduced.values()
707 return reduced.values()
708
708
709
709
710 def _verify(oid, content):
710 def _verify(oid, content):
711 realoid = hex(hashlib.sha256(content).digest())
711 realoid = hex(hashlib.sha256(content).digest())
712 if realoid != oid:
712 if realoid != oid:
713 raise LfsCorruptionError(
713 raise LfsCorruptionError(
714 _(b'detected corrupt lfs object: %s') % oid,
714 _(b'detected corrupt lfs object: %s') % oid,
715 hint=_(b'run hg verify'),
715 hint=_(b'run hg verify'),
716 )
716 )
717
717
718
718
719 def remote(repo, remote=None):
719 def remote(repo, remote=None):
720 """remotestore factory. return a store in _storemap depending on config
720 """remotestore factory. return a store in _storemap depending on config
721
721
722 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
722 If ``lfs.url`` is specified, use that remote endpoint. Otherwise, try to
723 infer the endpoint, based on the remote repository using the same path
723 infer the endpoint, based on the remote repository using the same path
724 adjustments as git. As an extension, 'http' is supported as well so that
724 adjustments as git. As an extension, 'http' is supported as well so that
725 ``hg serve`` works out of the box.
725 ``hg serve`` works out of the box.
726
726
727 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
727 https://github.com/git-lfs/git-lfs/blob/master/docs/api/server-discovery.md
728 """
728 """
729 lfsurl = repo.ui.config(b'lfs', b'url')
729 lfsurl = repo.ui.config(b'lfs', b'url')
730 url = urlutil.url(lfsurl or b'')
730 url = urlutil.url(lfsurl or b'')
731 if lfsurl is None:
731 if lfsurl is None:
732 if remote:
732 if remote:
733 path = remote
733 path = remote
734 elif util.safehasattr(repo, b'_subtoppath'):
734 elif util.safehasattr(repo, b'_subtoppath'):
735 # The pull command sets this during the optional update phase, which
735 # The pull command sets this during the optional update phase, which
736 # tells exactly where the pull originated, whether 'paths.default'
736 # tells exactly where the pull originated, whether 'paths.default'
737 # or explicit.
737 # or explicit.
738 path = repo._subtoppath
738 path = repo._subtoppath
739 else:
739 else:
740 # TODO: investigate 'paths.remote:lfsurl' style path customization,
740 # TODO: investigate 'paths.remote:lfsurl' style path customization,
741 # and fall back to inferring from 'paths.remote' if unspecified.
741 # and fall back to inferring from 'paths.remote' if unspecified.
742 path = repo.ui.config(b'paths', b'default') or b''
742 path = repo.ui.config(b'paths', b'default') or b''
743
743
744 defaulturl = urlutil.url(path)
744 defaulturl = urlutil.url(path)
745
745
746 # TODO: support local paths as well.
746 # TODO: support local paths as well.
747 # TODO: consider the ssh -> https transformation that git applies
747 # TODO: consider the ssh -> https transformation that git applies
748 if defaulturl.scheme in (b'http', b'https'):
748 if defaulturl.scheme in (b'http', b'https'):
749 if defaulturl.path and defaulturl.path[:-1] != b'/':
749 if defaulturl.path and defaulturl.path[:-1] != b'/':
750 defaulturl.path += b'/'
750 defaulturl.path += b'/'
751 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
751 defaulturl.path = (defaulturl.path or b'') + b'.git/info/lfs'
752
752
753 url = urlutil.url(bytes(defaulturl))
753 url = urlutil.url(bytes(defaulturl))
754 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
754 repo.ui.note(_(b'lfs: assuming remote store: %s\n') % url)
755
755
756 scheme = url.scheme
756 scheme = url.scheme
757 if scheme not in _storemap:
757 if scheme not in _storemap:
758 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
758 raise error.Abort(_(b'lfs: unknown url scheme: %s') % scheme)
759 return _storemap[scheme](repo, url)
759 return _storemap[scheme](repo, url)
760
760
761
761
762 class LfsRemoteError(error.StorageError):
762 class LfsRemoteError(error.StorageError):
763 pass
763 pass
764
764
765
765
766 class LfsCorruptionError(error.Abort):
766 class LfsCorruptionError(error.Abort):
767 """Raised when a corrupt blob is detected, aborting an operation
767 """Raised when a corrupt blob is detected, aborting an operation
768
768
769 It exists to allow specialized handling on the server side."""
769 It exists to allow specialized handling on the server side."""
General Comments 0
You need to be logged in to leave comments. Login now