##// END OF EJS Templates
largefiles: don't break existing tests (syntax error, bad imports)
Greg Ward -
r15188:8e115063 default
parent child Browse files
Show More
@@ -1,497 +1,497 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''largefiles utility code: must not import other modules in this package.'''
9 '''largefiles utility code: must not import other modules in this package.'''
10
10
11 import os
11 import os
12 import errno
12 import errno
13 import inspect
13 import inspect
14 import shutil
14 import shutil
15 import stat
15 import stat
16 import hashlib
16 import hashlib
17
17
18 from mercurial import cmdutil, dirstate, httpconnection, match as match_, \
18 from mercurial import cmdutil, dirstate, httpconnection, match as match_, \
19 url as url_, util
19 url as url_, util
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21
21
22 try:
22 try:
23 from mercurial import scmutil
23 from mercurial import scmutil
24 except ImportError:
24 except ImportError:
25 pass
25 pass
26
26
27 shortname = '.hglf'
27 shortname = '.hglf'
28 longname = 'largefiles'
28 longname = 'largefiles'
29
29
30
30
31 # -- Portability wrappers ----------------------------------------------
31 # -- Portability wrappers ----------------------------------------------
32
32
33 if 'subrepos' in inspect.getargspec(dirstate.dirstate.status)[0]:
33 if 'subrepos' in inspect.getargspec(dirstate.dirstate.status)[0]:
34 # for Mercurial >= 1.5
34 # for Mercurial >= 1.5
35 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
35 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
36 return dirstate.walk(matcher, [], unknown, ignored)
36 return dirstate.walk(matcher, [], unknown, ignored)
37 else:
37 else:
38 # for Mercurial <= 1.4
38 # for Mercurial <= 1.4
39 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
39 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
40 return dirstate.walk(matcher, unknown, ignored)
40 return dirstate.walk(matcher, unknown, ignored)
41
41
42 def repo_add(repo, list):
42 def repo_add(repo, list):
43 try:
43 try:
44 # Mercurial <= 1.5
44 # Mercurial <= 1.5
45 add = repo.add
45 add = repo.add
46 except AttributeError:
46 except AttributeError:
47 # Mercurial >= 1.6
47 # Mercurial >= 1.6
48 add = repo[None].add
48 add = repo[None].add
49 return add(list)
49 return add(list)
50
50
51 def repo_remove(repo, list, unlink=False):
51 def repo_remove(repo, list, unlink=False):
52 try:
52 try:
53 # Mercurial <= 1.5
53 # Mercurial <= 1.5
54 remove = repo.remove
54 remove = repo.remove
55 except AttributeError:
55 except AttributeError:
56 # Mercurial >= 1.6
56 # Mercurial >= 1.6
57 try:
57 try:
58 # Mercurial <= 1.8
58 # Mercurial <= 1.8
59 remove = repo[None].remove
59 remove = repo[None].remove
60 except AttributeError:
60 except AttributeError:
61 # Mercurial >= 1.9
61 # Mercurial >= 1.9
62 def remove(list, unlink):
62 def remove(list, unlink):
63 wlock = repo.wlock()
63 wlock = repo.wlock()
64 try:
64 try:
65 if unlink:
65 if unlink:
66 for f in list:
66 for f in list:
67 try:
67 try:
68 util.unlinkpath(repo.wjoin(f))
68 util.unlinkpath(repo.wjoin(f))
69 except OSError, inst:
69 except OSError, inst:
70 if inst.errno != errno.ENOENT:
70 if inst.errno != errno.ENOENT:
71 raise
71 raise
72 repo[None].forget(list)
72 repo[None].forget(list)
73 finally:
73 finally:
74 wlock.release()
74 wlock.release()
75
75
76 return remove(list, unlink=unlink)
76 return remove(list, unlink=unlink)
77
77
78 def repo_forget(repo, list):
78 def repo_forget(repo, list):
79 try:
79 try:
80 # Mercurial <= 1.5
80 # Mercurial <= 1.5
81 forget = repo.forget
81 forget = repo.forget
82 except AttributeError:
82 except AttributeError:
83 # Mercurial >= 1.6
83 # Mercurial >= 1.6
84 forget = repo[None].forget
84 forget = repo[None].forget
85 return forget(list)
85 return forget(list)
86
86
87 def findoutgoing(repo, remote, force):
87 def findoutgoing(repo, remote, force):
88 # First attempt is for Mercurial <= 1.5 second is for >= 1.6
88 # First attempt is for Mercurial <= 1.5 second is for >= 1.6
89 try:
89 try:
90 return repo.findoutgoing(remote)
90 return repo.findoutgoing(remote)
91 except AttributeError:
91 except AttributeError:
92 from mercurial import discovery
92 from mercurial import discovery
93 try:
93 try:
94 # Mercurial <= 1.8
94 # Mercurial <= 1.8
95 return discovery.findoutgoing(repo, remote, force=force)
95 return discovery.findoutgoing(repo, remote, force=force)
96 except AttributeError:
96 except AttributeError:
97 # Mercurial >= 1.9
97 # Mercurial >= 1.9
98 common, _anyinc, _heads = discovery.findcommonincoming(repo,
98 common, _anyinc, _heads = discovery.findcommonincoming(repo,
99 remote, force=force)
99 remote, force=force)
100 return repo.changelog.findmissing(common)
100 return repo.changelog.findmissing(common)
101
101
102 # -- Private worker functions ------------------------------------------
102 # -- Private worker functions ------------------------------------------
103
103
104 if os.name == 'nt':
104 if os.name == 'nt':
105 from mercurial import win32
105 from mercurial import win32
106 linkfn = win32.oslink
106 linkfn = win32.oslink
107
107
108 def link(src, dest):
108 def link(src, dest):
109 try:
109 try:
110 linkfn(src, dest)
110 linkfn(src, dest)
111 except OSError:
111 except OSError:
112 # If hardlinks fail fall back on copy
112 # If hardlinks fail fall back on copy
113 shutil.copyfile(src, dest)
113 shutil.copyfile(src, dest)
114 os.chmod(dest, os.stat(src).st_mode)
114 os.chmod(dest, os.stat(src).st_mode)
115
115
116 def systemcachepath(ui, hash):
116 def systemcachepath(ui, hash):
117 path = ui.config(longname, 'systemcache', None)
117 path = ui.config(longname, 'systemcache', None)
118 if path:
118 if path:
119 path = os.path.join(path, hash)
119 path = os.path.join(path, hash)
120 else:
120 else:
121 if os.name == 'nt':
121 if os.name == 'nt':
122 path = os.path.join(os.getenv('LOCALAPPDATA') or \
122 path = os.path.join(os.getenv('LOCALAPPDATA') or \
123 os.getenv('APPDATA'), longname, hash)
123 os.getenv('APPDATA'), longname, hash)
124 elif os.name == 'posix':
124 elif os.name == 'posix':
125 path = os.path.join(os.getenv('HOME'), '.' + longname, hash)
125 path = os.path.join(os.getenv('HOME'), '.' + longname, hash)
126 else:
126 else:
127 raise util.Abort(_('Unknown operating system: %s\n') % os.name)
127 raise util.Abort(_('Unknown operating system: %s\n') % os.name)
128 return path
128 return path
129
129
130 def insystemcache(ui, hash):
130 def insystemcache(ui, hash):
131 return os.path.exists(systemcachepath(ui, hash))
131 return os.path.exists(systemcachepath(ui, hash))
132
132
133 def findfile(repo, hash):
133 def findfile(repo, hash):
134 if incache(repo, hash):
134 if incache(repo, hash):
135 repo.ui.note(_('Found %s in cache\n') % hash)
135 repo.ui.note(_('Found %s in cache\n') % hash)
136 return cachepath(repo, hash)
136 return cachepath(repo, hash)
137 if insystemcache(repo.ui, hash):
137 if insystemcache(repo.ui, hash):
138 repo.ui.note(_('Found %s in system cache\n') % hash)
138 repo.ui.note(_('Found %s in system cache\n') % hash)
139 return systemcachepath(repo.ui, hash)
139 return systemcachepath(repo.ui, hash)
140 return None
140 return None
141
141
142 class largefiles_dirstate(dirstate.dirstate):
142 class largefiles_dirstate(dirstate.dirstate):
143 def __getitem__(self, key):
143 def __getitem__(self, key):
144 return super(largefiles_dirstate, self).__getitem__(unixpath(key))
144 return super(largefiles_dirstate, self).__getitem__(unixpath(key))
145 def normal(self, f):
145 def normal(self, f):
146 return super(largefiles_dirstate, self).normal(unixpath(f))
146 return super(largefiles_dirstate, self).normal(unixpath(f))
147 def remove(self, f):
147 def remove(self, f):
148 return super(largefiles_dirstate, self).remove(unixpath(f))
148 return super(largefiles_dirstate, self).remove(unixpath(f))
149 def add(self, f):
149 def add(self, f):
150 return super(largefiles_dirstate, self).add(unixpath(f))
150 return super(largefiles_dirstate, self).add(unixpath(f))
151 def drop(self, f):
151 def drop(self, f):
152 return super(largefiles_dirstate, self).drop(unixpath(f))
152 return super(largefiles_dirstate, self).drop(unixpath(f))
153 def forget(self, f):
153 def forget(self, f):
154 return super(largefiles_dirstate, self).forget(unixpath(f))
154 return super(largefiles_dirstate, self).forget(unixpath(f))
155
155
156 def openlfdirstate(ui, repo):
156 def openlfdirstate(ui, repo):
157 '''
157 '''
158 Return a dirstate object that tracks big files: i.e. its root is the
158 Return a dirstate object that tracks big files: i.e. its root is the
159 repo root, but it is saved in .hg/largefiles/dirstate.
159 repo root, but it is saved in .hg/largefiles/dirstate.
160 '''
160 '''
161 admin = repo.join(longname)
161 admin = repo.join(longname)
162 try:
162 try:
163 # Mercurial >= 1.9
163 # Mercurial >= 1.9
164 opener = scmutil.opener(admin)
164 opener = scmutil.opener(admin)
165 except ImportError:
165 except ImportError:
166 # Mercurial <= 1.8
166 # Mercurial <= 1.8
167 opener = util.opener(admin)
167 opener = util.opener(admin)
168 if util.safehasattr(repo.dirstate, '_validate'):
168 if util.safehasattr(repo.dirstate, '_validate'):
169 lfdirstate = largefiles_dirstate(opener, ui, repo.root,
169 lfdirstate = largefiles_dirstate(opener, ui, repo.root,
170 repo.dirstate._validate)
170 repo.dirstate._validate)
171 else:
171 else:
172 lfdirstate = largefiles_dirstate(opener, ui, repo.root)
172 lfdirstate = largefiles_dirstate(opener, ui, repo.root)
173
173
174 # If the largefiles dirstate does not exist, populate and create it. This
174 # If the largefiles dirstate does not exist, populate and create it. This
175 # ensures that we create it on the first meaningful largefiles operation in
175 # ensures that we create it on the first meaningful largefiles operation in
176 # a new clone. It also gives us an easy way to forcibly rebuild largefiles
176 # a new clone. It also gives us an easy way to forcibly rebuild largefiles
177 # state:
177 # state:
178 # rm .hg/largefiles/dirstate && hg status
178 # rm .hg/largefiles/dirstate && hg status
179 # Or even, if things are really messed up:
179 # Or even, if things are really messed up:
180 # rm -rf .hg/largefiles && hg status
180 # rm -rf .hg/largefiles && hg status
181 if not os.path.exists(os.path.join(admin, 'dirstate')):
181 if not os.path.exists(os.path.join(admin, 'dirstate')):
182 util.makedirs(admin)
182 util.makedirs(admin)
183 matcher = getstandinmatcher(repo)
183 matcher = getstandinmatcher(repo)
184 for standin in dirstate_walk(repo.dirstate, matcher):
184 for standin in dirstate_walk(repo.dirstate, matcher):
185 lfile = splitstandin(standin)
185 lfile = splitstandin(standin)
186 hash = readstandin(repo, lfile)
186 hash = readstandin(repo, lfile)
187 lfdirstate.normallookup(lfile)
187 lfdirstate.normallookup(lfile)
188 try:
188 try:
189 if hash == hashfile(lfile):
189 if hash == hashfile(lfile):
190 lfdirstate.normal(lfile)
190 lfdirstate.normal(lfile)
191 except IOError, err:
191 except IOError, err:
192 if err.errno != errno.ENOENT:
192 if err.errno != errno.ENOENT:
193 raise
193 raise
194
194
195 lfdirstate.write()
195 lfdirstate.write()
196
196
197 return lfdirstate
197 return lfdirstate
198
198
199 def lfdirstate_status(lfdirstate, repo, rev):
199 def lfdirstate_status(lfdirstate, repo, rev):
200 wlock = repo.wlock()
200 wlock = repo.wlock()
201 try:
201 try:
202 match = match_.always(repo.root, repo.getcwd())
202 match = match_.always(repo.root, repo.getcwd())
203 s = lfdirstate.status(match, [], False, False, False)
203 s = lfdirstate.status(match, [], False, False, False)
204 unsure, modified, added, removed, missing, unknown, ignored, clean = s
204 unsure, modified, added, removed, missing, unknown, ignored, clean = s
205 for lfile in unsure:
205 for lfile in unsure:
206 if repo[rev][standin(lfile)].data().strip() != \
206 if repo[rev][standin(lfile)].data().strip() != \
207 hashfile(repo.wjoin(lfile)):
207 hashfile(repo.wjoin(lfile)):
208 modified.append(lfile)
208 modified.append(lfile)
209 else:
209 else:
210 clean.append(lfile)
210 clean.append(lfile)
211 lfdirstate.normal(lfile)
211 lfdirstate.normal(lfile)
212 lfdirstate.write()
212 lfdirstate.write()
213 finally:
213 finally:
214 wlock.release()
214 wlock.release()
215 return (modified, added, removed, missing, unknown, ignored, clean)
215 return (modified, added, removed, missing, unknown, ignored, clean)
216
216
217 def listlfiles(repo, rev=None, matcher=None):
217 def listlfiles(repo, rev=None, matcher=None):
218 '''list largefiles in the working copy or specified changeset'''
218 '''list largefiles in the working copy or specified changeset'''
219
219
220 if matcher is None:
220 if matcher is None:
221 matcher = getstandinmatcher(repo)
221 matcher = getstandinmatcher(repo)
222
222
223 # ignore unknown files in working directory
223 # ignore unknown files in working directory
224 return [splitstandin(f) for f in repo[rev].walk(matcher) \
224 return [splitstandin(f) for f in repo[rev].walk(matcher) \
225 if rev is not None or repo.dirstate[f] != '?']
225 if rev is not None or repo.dirstate[f] != '?']
226
226
227 def incache(repo, hash):
227 def incache(repo, hash):
228 return os.path.exists(cachepath(repo, hash))
228 return os.path.exists(cachepath(repo, hash))
229
229
230 def createdir(dir):
230 def createdir(dir):
231 if not os.path.exists(dir):
231 if not os.path.exists(dir):
232 os.makedirs(dir)
232 os.makedirs(dir)
233
233
234 def cachepath(repo, hash):
234 def cachepath(repo, hash):
235 return repo.join(os.path.join(longname, hash))
235 return repo.join(os.path.join(longname, hash))
236
236
237 def copyfromcache(repo, hash, filename):
237 def copyfromcache(repo, hash, filename):
238 '''copyfromcache copies the specified largefile from the repo or system
238 '''copyfromcache copies the specified largefile from the repo or system
239 cache to the specified location in the repository. It will not throw an
239 cache to the specified location in the repository. It will not throw an
240 exception on failure, as it is meant to be called only after ensuring that
240 exception on failure, as it is meant to be called only after ensuring that
241 the needed largefile exists in the cache.'''
241 the needed largefile exists in the cache.'''
242 path = findfile(repo, hash)
242 path = findfile(repo, hash)
243 if path is None:
243 if path is None:
244 return False
244 return False
245 util.makedirs(os.path.dirname(repo.wjoin(filename)))
245 util.makedirs(os.path.dirname(repo.wjoin(filename)))
246 shutil.copy(path, repo.wjoin(filename))
246 shutil.copy(path, repo.wjoin(filename))
247 return True
247 return True
248
248
249 def copytocache(repo, rev, file, uploaded=False):
249 def copytocache(repo, rev, file, uploaded=False):
250 hash = readstandin(repo, file)
250 hash = readstandin(repo, file)
251 if incache(repo, hash):
251 if incache(repo, hash):
252 return
252 return
253 copytocacheabsolute(repo, repo.wjoin(file), hash)
253 copytocacheabsolute(repo, repo.wjoin(file), hash)
254
254
255 def copytocacheabsolute(repo, file, hash):
255 def copytocacheabsolute(repo, file, hash):
256 createdir(os.path.dirname(cachepath(repo, hash)))
256 createdir(os.path.dirname(cachepath(repo, hash)))
257 if insystemcache(repo.ui, hash):
257 if insystemcache(repo.ui, hash):
258 link(systemcachepath(repo.ui, hash), cachepath(repo, hash))
258 link(systemcachepath(repo.ui, hash), cachepath(repo, hash))
259 else:
259 else:
260 shutil.copyfile(file, cachepath(repo, hash))
260 shutil.copyfile(file, cachepath(repo, hash))
261 os.chmod(cachepath(repo, hash), os.stat(file).st_mode)
261 os.chmod(cachepath(repo, hash), os.stat(file).st_mode)
262 linktosystemcache(repo, hash)
262 linktosystemcache(repo, hash)
263
263
264 def linktosystemcache(repo, hash):
264 def linktosystemcache(repo, hash):
265 createdir(os.path.dirname(systemcachepath(repo.ui, hash)))
265 createdir(os.path.dirname(systemcachepath(repo.ui, hash)))
266 link(cachepath(repo, hash), systemcachepath(repo.ui, hash))
266 link(cachepath(repo, hash), systemcachepath(repo.ui, hash))
267
267
268 def getstandinmatcher(repo, pats=[], opts={}):
268 def getstandinmatcher(repo, pats=[], opts={}):
269 '''Return a match object that applies pats to the standin directory'''
269 '''Return a match object that applies pats to the standin directory'''
270 standindir = repo.pathto(shortname)
270 standindir = repo.pathto(shortname)
271 if pats:
271 if pats:
272 # patterns supplied: search standin directory relative to current dir
272 # patterns supplied: search standin directory relative to current dir
273 cwd = repo.getcwd()
273 cwd = repo.getcwd()
274 if os.path.isabs(cwd):
274 if os.path.isabs(cwd):
275 # cwd is an absolute path for hg -R <reponame>
275 # cwd is an absolute path for hg -R <reponame>
276 # work relative to the repository root in this case
276 # work relative to the repository root in this case
277 cwd = ''
277 cwd = ''
278 pats = [os.path.join(standindir, cwd, pat) for pat in pats]
278 pats = [os.path.join(standindir, cwd, pat) for pat in pats]
279 elif os.path.isdir(standindir):
279 elif os.path.isdir(standindir):
280 # no patterns: relative to repo root
280 # no patterns: relative to repo root
281 pats = [standindir]
281 pats = [standindir]
282 else:
282 else:
283 # no patterns and no standin dir: return matcher that matches nothing
283 # no patterns and no standin dir: return matcher that matches nothing
284 match = match_.match(repo.root, None, [], exact=True)
284 match = match_.match(repo.root, None, [], exact=True)
285 match.matchfn = lambda f: False
285 match.matchfn = lambda f: False
286 return match
286 return match
287 return getmatcher(repo, pats, opts, showbad=False)
287 return getmatcher(repo, pats, opts, showbad=False)
288
288
289 def getmatcher(repo, pats=[], opts={}, showbad=True):
289 def getmatcher(repo, pats=[], opts={}, showbad=True):
290 '''Wrapper around scmutil.match() that adds showbad: if false, neuter
290 '''Wrapper around scmutil.match() that adds showbad: if false, neuter
291 the match object\'s bad() method so it does not print any warnings
291 the match object\'s bad() method so it does not print any warnings
292 about missing files or directories.'''
292 about missing files or directories.'''
293 try:
293 try:
294 # Mercurial >= 1.9
294 # Mercurial >= 1.9
295 match = scmutil.match(repo[None], pats, opts)
295 match = scmutil.match(repo[None], pats, opts)
296 except ImportError:
296 except ImportError:
297 # Mercurial <= 1.8
297 # Mercurial <= 1.8
298 match = cmdutil.match(repo, pats, opts)
298 match = cmdutil.match(repo, pats, opts)
299
299
300 if not showbad:
300 if not showbad:
301 match.bad = lambda f, msg: None
301 match.bad = lambda f, msg: None
302 return match
302 return match
303
303
304 def composestandinmatcher(repo, rmatcher):
304 def composestandinmatcher(repo, rmatcher):
305 '''Return a matcher that accepts standins corresponding to the files
305 '''Return a matcher that accepts standins corresponding to the files
306 accepted by rmatcher. Pass the list of files in the matcher as the
306 accepted by rmatcher. Pass the list of files in the matcher as the
307 paths specified by the user.'''
307 paths specified by the user.'''
308 smatcher = getstandinmatcher(repo, rmatcher.files())
308 smatcher = getstandinmatcher(repo, rmatcher.files())
309 isstandin = smatcher.matchfn
309 isstandin = smatcher.matchfn
310 def composed_matchfn(f):
310 def composed_matchfn(f):
311 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
311 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
312 smatcher.matchfn = composed_matchfn
312 smatcher.matchfn = composed_matchfn
313
313
314 return smatcher
314 return smatcher
315
315
316 def standin(filename):
316 def standin(filename):
317 '''Return the repo-relative path to the standin for the specified big
317 '''Return the repo-relative path to the standin for the specified big
318 file.'''
318 file.'''
319 # Notes:
319 # Notes:
320 # 1) Most callers want an absolute path, but _create_standin() needs
320 # 1) Most callers want an absolute path, but _create_standin() needs
321 # it repo-relative so lfadd() can pass it to repo_add(). So leave
321 # it repo-relative so lfadd() can pass it to repo_add(). So leave
322 # it up to the caller to use repo.wjoin() to get an absolute path.
322 # it up to the caller to use repo.wjoin() to get an absolute path.
323 # 2) Join with '/' because that's what dirstate always uses, even on
323 # 2) Join with '/' because that's what dirstate always uses, even on
324 # Windows. Change existing separator to '/' first in case we are
324 # Windows. Change existing separator to '/' first in case we are
325 # passed filenames from an external source (like the command line).
325 # passed filenames from an external source (like the command line).
326 return shortname + '/' + filename.replace(os.sep, '/')
326 return shortname + '/' + filename.replace(os.sep, '/')
327
327
328 def isstandin(filename):
328 def isstandin(filename):
329 '''Return true if filename is a big file standin. filename must
329 '''Return true if filename is a big file standin. filename must
330 be in Mercurial\'s internal form (slash-separated).'''
330 be in Mercurial\'s internal form (slash-separated).'''
331 return filename.startswith(shortname + '/')
331 return filename.startswith(shortname + '/')
332
332
333 def splitstandin(filename):
333 def splitstandin(filename):
334 # Split on / because that's what dirstate always uses, even on Windows.
334 # Split on / because that's what dirstate always uses, even on Windows.
335 # Change local separator to / first just in case we are passed filenames
335 # Change local separator to / first just in case we are passed filenames
336 # from an external source (like the command line).
336 # from an external source (like the command line).
337 bits = filename.replace(os.sep, '/').split('/', 1)
337 bits = filename.replace(os.sep, '/').split('/', 1)
338 if len(bits) == 2 and bits[0] == shortname:
338 if len(bits) == 2 and bits[0] == shortname:
339 return bits[1]
339 return bits[1]
340 else:
340 else:
341 return None
341 return None
342
342
343 def updatestandin(repo, standin):
343 def updatestandin(repo, standin):
344 file = repo.wjoin(splitstandin(standin))
344 file = repo.wjoin(splitstandin(standin))
345 if os.path.exists(file):
345 if os.path.exists(file):
346 hash = hashfile(file)
346 hash = hashfile(file)
347 executable = getexecutable(file)
347 executable = getexecutable(file)
348 writestandin(repo, standin, hash, executable)
348 writestandin(repo, standin, hash, executable)
349
349
350 def readstandin(repo, filename, node=None):
350 def readstandin(repo, filename, node=None):
351 '''read hex hash from standin for filename at given node, or working
351 '''read hex hash from standin for filename at given node, or working
352 directory if no node is given'''
352 directory if no node is given'''
353 return repo[node][standin(filename)].data().strip()
353 return repo[node][standin(filename)].data().strip()
354
354
355 def writestandin(repo, standin, hash, executable):
355 def writestandin(repo, standin, hash, executable):
356 '''write hhash to <repo.root>/<standin>'''
356 '''write hhash to <repo.root>/<standin>'''
357 writehash(hash, repo.wjoin(standin), executable)
357 writehash(hash, repo.wjoin(standin), executable)
358
358
359 def copyandhash(instream, outfile):
359 def copyandhash(instream, outfile):
360 '''Read bytes from instream (iterable) and write them to outfile,
360 '''Read bytes from instream (iterable) and write them to outfile,
361 computing the SHA-1 hash of the data along the way. Close outfile
361 computing the SHA-1 hash of the data along the way. Close outfile
362 when done and return the binary hash.'''
362 when done and return the binary hash.'''
363 hasher = util.sha1('')
363 hasher = util.sha1('')
364 for data in instream:
364 for data in instream:
365 hasher.update(data)
365 hasher.update(data)
366 outfile.write(data)
366 outfile.write(data)
367
367
368 # Blecch: closing a file that somebody else opened is rude and
368 # Blecch: closing a file that somebody else opened is rude and
369 # wrong. But it's so darn convenient and practical! After all,
369 # wrong. But it's so darn convenient and practical! After all,
370 # outfile was opened just to copy and hash.
370 # outfile was opened just to copy and hash.
371 outfile.close()
371 outfile.close()
372
372
373 return hasher.digest()
373 return hasher.digest()
374
374
375 def hashrepofile(repo, file):
375 def hashrepofile(repo, file):
376 return hashfile(repo.wjoin(file))
376 return hashfile(repo.wjoin(file))
377
377
378 def hashfile(file):
378 def hashfile(file):
379 if not os.path.exists(file):
379 if not os.path.exists(file):
380 return ''
380 return ''
381 hasher = util.sha1('')
381 hasher = util.sha1('')
382 fd = open(file, 'rb')
382 fd = open(file, 'rb')
383 for data in blockstream(fd):
383 for data in blockstream(fd):
384 hasher.update(data)
384 hasher.update(data)
385 fd.close()
385 fd.close()
386 return hasher.hexdigest()
386 return hasher.hexdigest()
387
387
388 class limitreader(object):
388 class limitreader(object):
389 def __init__(self, f, limit):
389 def __init__(self, f, limit):
390 self.f = f
390 self.f = f
391 self.limit = limit
391 self.limit = limit
392
392
393 def read(self, length):
393 def read(self, length):
394 if self.limit == 0:
394 if self.limit == 0:
395 return ''
395 return ''
396 length = length > self.limit and self.limit or length
396 length = length > self.limit and self.limit or length
397 self.limit -= length
397 self.limit -= length
398 return self.f.read(length)
398 return self.f.read(length)
399
399
400 def close(self):
400 def close(self):
401 pass
401 pass
402
402
403 def blockstream(infile, blocksize=128 * 1024):
403 def blockstream(infile, blocksize=128 * 1024):
404 """Generator that yields blocks of data from infile and closes infile."""
404 """Generator that yields blocks of data from infile and closes infile."""
405 while True:
405 while True:
406 data = infile.read(blocksize)
406 data = infile.read(blocksize)
407 if not data:
407 if not data:
408 break
408 break
409 yield data
409 yield data
410 # Same blecch as above.
410 # Same blecch as above.
411 infile.close()
411 infile.close()
412
412
413 def readhash(filename):
413 def readhash(filename):
414 rfile = open(filename, 'rb')
414 rfile = open(filename, 'rb')
415 hash = rfile.read(40)
415 hash = rfile.read(40)
416 rfile.close()
416 rfile.close()
417 if len(hash) < 40:
417 if len(hash) < 40:
418 raise util.Abort(_('bad hash in \'%s\' (only %d bytes long)')
418 raise util.Abort(_('bad hash in \'%s\' (only %d bytes long)')
419 % (filename, len(hash)))
419 % (filename, len(hash)))
420 return hash
420 return hash
421
421
422 def writehash(hash, filename, executable):
422 def writehash(hash, filename, executable):
423 util.makedirs(os.path.dirname(filename))
423 util.makedirs(os.path.dirname(filename))
424 if os.path.exists(filename):
424 if os.path.exists(filename):
425 os.unlink(filename)
425 os.unlink(filename)
426 wfile = open(filename, 'wb')
426 wfile = open(filename, 'wb')
427
427
428 try:
428 try:
429 wfile.write(hash)
429 wfile.write(hash)
430 wfile.write('\n')
430 wfile.write('\n')
431 finally:
431 finally:
432 wfile.close()
432 wfile.close()
433 if os.path.exists(filename):
433 if os.path.exists(filename):
434 os.chmod(filename, getmode(executable))
434 os.chmod(filename, getmode(executable))
435
435
436 def getexecutable(filename):
436 def getexecutable(filename):
437 mode = os.stat(filename).st_mode
437 mode = os.stat(filename).st_mode
438 return (mode & stat.S_IXUSR) and (mode & stat.S_IXGRP) and (mode & \
438 return (mode & stat.S_IXUSR) and (mode & stat.S_IXGRP) and (mode & \
439 stat.S_IXOTH)
439 stat.S_IXOTH)
440
440
441 def getmode(executable):
441 def getmode(executable):
442 if executable:
442 if executable:
443 return 0755
443 return 0755
444 else:
444 else:
445 return 0644
445 return 0644
446
446
447 def urljoin(first, second, *arg):
447 def urljoin(first, second, *arg):
448 def join(left, right):
448 def join(left, right):
449 if not left.endswith('/'):
449 if not left.endswith('/'):
450 left += '/'
450 left += '/'
451 if right.startswith('/'):
451 if right.startswith('/'):
452 right = right[1:]
452 right = right[1:]
453 return left + right
453 return left + right
454
454
455 url = join(first, second)
455 url = join(first, second)
456 for a in arg:
456 for a in arg:
457 url = join(url, a)
457 url = join(url, a)
458 return url
458 return url
459
459
460 def hexsha1(data):
460 def hexsha1(data):
461 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
461 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
462 object data"""
462 object data"""
463 h = hashlib.sha1()
463 h = hashlib.sha1()
464 for chunk in util.filechunkiter(data):
464 for chunk in util.filechunkiter(data):
465 h.update(chunk)
465 h.update(chunk)
466 return h.hexdigest()
466 return h.hexdigest()
467
467
468 def httpsendfile(ui, filename):
468 def httpsendfile(ui, filename):
469 try:
469 try:
470 # Mercurial >= 1.9
470 # Mercurial >= 1.9
471 return httpconnection.httpsendfile(ui, filename, 'rb')
471 return httpconnection.httpsendfile(ui, filename, 'rb')
472 except ImportError:
472 except ImportError:
473 if 'ui' in inspect.getargspec(url_.httpsendfile.__init__)[0]:
473 if 'ui' in inspect.getargspec(url_.httpsendfile.__init__)[0]:
474 # Mercurial == 1.8
474 # Mercurial == 1.8
475 return url_.httpsendfile(ui, filename, 'rb')
475 return url_.httpsendfile(ui, filename, 'rb')
476 else:
476 else:
477 # Mercurial <= 1.7
477 # Mercurial <= 1.7
478 return url_.httpsendfile(filename, 'rb')
478 return url_.httpsendfile(filename, 'rb')
479
479
480 # Convert a path to a unix style path. This is used to give a
480 # Convert a path to a unix style path. This is used to give a
481 # canonical path to the lfdirstate.
481 # canonical path to the lfdirstate.
482 def unixpath(path):
482 def unixpath(path):
483 return os.path.normpath(path).replace(os.sep, '/')
483 return os.path.normpath(path).replace(os.sep, '/')
484
484
485 def islfilesrepo(repo):
485 def islfilesrepo(repo):
486 return ('largefiles' in repo.requirements and
486 return ('largefiles' in repo.requirements and
487 any_(shortname + '/' in f[0] for f in repo.store.datafiles())
487 any_(shortname + '/' in f[0] for f in repo.store.datafiles()))
488
488
489 def any_(gen):
489 def any_(gen):
490 for x in gen:
490 for x in gen:
491 if x:
491 if x:
492 return True
492 return True
493 return False
493 return False
494
494
495 class storeprotonotcapable(BaseException):
495 class storeprotonotcapable(BaseException):
496 def __init__(self, storetypes):
496 def __init__(self, storetypes):
497 self.storetypes = storetypes
497 self.storetypes = storetypes
@@ -1,106 +1,105 b''
1 # Copyright 2010-2011 Fog Creek Software
1 # Copyright 2010-2011 Fog Creek Software
2 # Copyright 2010-2011 Unity Technologies
2 # Copyright 2010-2011 Unity Technologies
3 #
3 #
4 # This software may be used and distributed according to the terms of the
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
5 # GNU General Public License version 2 or any later version.
6
6
7 '''Remote largefile store; the base class for servestore'''
7 '''Remote largefile store; the base class for servestore'''
8
8
9 import urllib2
9 import urllib2
10 import HTTPError
11
10
12 from mercurial import util
11 from mercurial import util
13 from mercurial.i18n import _
12 from mercurial.i18n import _
14
13
15 import lfutil
14 import lfutil
16 import basestore
15 import basestore
17
16
18 class remotestore(basestore.basestore):
17 class remotestore(basestore.basestore):
19 """A largefile store accessed over a network"""
18 """A largefile store accessed over a network"""
20 def __init__(self, ui, repo, url):
19 def __init__(self, ui, repo, url):
21 super(remotestore, self).__init__(ui, repo, url)
20 super(remotestore, self).__init__(ui, repo, url)
22
21
23 def put(self, source, hash):
22 def put(self, source, hash):
24 if self._verify(hash):
23 if self._verify(hash):
25 return
24 return
26 if self.sendfile(source, hash):
25 if self.sendfile(source, hash):
27 raise util.Abort(
26 raise util.Abort(
28 _('remotestore: could not put %s to remote store %s')
27 _('remotestore: could not put %s to remote store %s')
29 % (source, self.url))
28 % (source, self.url))
30 self.ui.debug(
29 self.ui.debug(
31 _('remotestore: put %s to remote store %s') % (source, self.url))
30 _('remotestore: put %s to remote store %s') % (source, self.url))
32
31
33 def exists(self, hash):
32 def exists(self, hash):
34 return self._verify(hash)
33 return self._verify(hash)
35
34
36 def sendfile(self, filename, hash):
35 def sendfile(self, filename, hash):
37 self.ui.debug('remotestore: sendfile(%s, %s)\n' % (filename, hash))
36 self.ui.debug('remotestore: sendfile(%s, %s)\n' % (filename, hash))
38 fd = None
37 fd = None
39 try:
38 try:
40 try:
39 try:
41 fd = lfutil.httpsendfile(self.ui, filename)
40 fd = lfutil.httpsendfile(self.ui, filename)
42 except IOError, e:
41 except IOError, e:
43 raise util.Abort(
42 raise util.Abort(
44 _('remotestore: could not open file %s: %s')
43 _('remotestore: could not open file %s: %s')
45 % (filename, str(e)))
44 % (filename, str(e)))
46 return self._put(hash, fd)
45 return self._put(hash, fd)
47 finally:
46 finally:
48 if fd:
47 if fd:
49 fd.close()
48 fd.close()
50
49
51 def _getfile(self, tmpfile, filename, hash):
50 def _getfile(self, tmpfile, filename, hash):
52 # quit if the largefile isn't there
51 # quit if the largefile isn't there
53 stat = self._stat(hash)
52 stat = self._stat(hash)
54 if stat:
53 if stat:
55 raise util.Abort(_('remotestore: largefile %s is %s') %
54 raise util.Abort(_('remotestore: largefile %s is %s') %
56 (hash, stat == 1 and 'invalid' or 'missing'))
55 (hash, stat == 1 and 'invalid' or 'missing'))
57
56
58 try:
57 try:
59 length, infile = self._get(hash)
58 length, infile = self._get(hash)
60 except HTTPError, e:
59 except urllib2.HTTPError, e:
61 # 401s get converted to util.Aborts; everything else is fine being
60 # 401s get converted to util.Aborts; everything else is fine being
62 # turned into a StoreError
61 # turned into a StoreError
63 raise basestore.StoreError(filename, hash, self.url, str(e))
62 raise basestore.StoreError(filename, hash, self.url, str(e))
64 except urllib2.URLError, e:
63 except urllib2.URLError, e:
65 # This usually indicates a connection problem, so don't
64 # This usually indicates a connection problem, so don't
66 # keep trying with the other files... they will probably
65 # keep trying with the other files... they will probably
67 # all fail too.
66 # all fail too.
68 raise util.Abort('%s: %s' % (self.url, str(e.reason)))
67 raise util.Abort('%s: %s' % (self.url, str(e.reason)))
69 except IOError, e:
68 except IOError, e:
70 raise basestore.StoreError(filename, hash, self.url, str(e))
69 raise basestore.StoreError(filename, hash, self.url, str(e))
71
70
72 # Mercurial does not close its SSH connections after writing a stream
71 # Mercurial does not close its SSH connections after writing a stream
73 if length is not None:
72 if length is not None:
74 infile = lfutil.limitreader(infile, length)
73 infile = lfutil.limitreader(infile, length)
75 return lfutil.copyandhash(lfutil.blockstream(infile), tmpfile)
74 return lfutil.copyandhash(lfutil.blockstream(infile), tmpfile)
76
75
77 def _verify(self, hash):
76 def _verify(self, hash):
78 return not self._stat(hash)
77 return not self._stat(hash)
79
78
80 def _verifyfile(self, cctx, cset, contents, standin, verified):
79 def _verifyfile(self, cctx, cset, contents, standin, verified):
81 filename = lfutil.splitstandin(standin)
80 filename = lfutil.splitstandin(standin)
82 if not filename:
81 if not filename:
83 return False
82 return False
84 fctx = cctx[standin]
83 fctx = cctx[standin]
85 key = (filename, fctx.filenode())
84 key = (filename, fctx.filenode())
86 if key in verified:
85 if key in verified:
87 return False
86 return False
88
87
89 verified.add(key)
88 verified.add(key)
90
89
91 stat = self._stat(hash)
90 stat = self._stat(hash)
92 if not stat:
91 if not stat:
93 return False
92 return False
94 elif stat == 1:
93 elif stat == 1:
95 self.ui.warn(
94 self.ui.warn(
96 _('changeset %s: %s: contents differ\n')
95 _('changeset %s: %s: contents differ\n')
97 % (cset, filename))
96 % (cset, filename))
98 return True # failed
97 return True # failed
99 elif stat == 2:
98 elif stat == 2:
100 self.ui.warn(
99 self.ui.warn(
101 _('changeset %s: %s missing\n')
100 _('changeset %s: %s missing\n')
102 % (cset, filename))
101 % (cset, filename))
103 return True # failed
102 return True # failed
104 else:
103 else:
105 raise util.Abort(_('check failed, unexpected response'
104 raise util.Abort(_('check failed, unexpected response'
106 'statlfile: %d') % stat)
105 'statlfile: %d') % stat)
@@ -1,138 +1,138 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''setup for largefiles extension: uisetup'''
9 '''setup for largefiles extension: uisetup'''
10
10
11 from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \
11 from mercurial import archival, cmdutil, commands, extensions, filemerge, hg, \
12 httprepo, localrepo, sshrepo, sshserver, wireproto
12 httprepo, localrepo, sshrepo, sshserver, util, wireproto
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial.hgweb import hgweb_mod, protocol
14 from mercurial.hgweb import hgweb_mod, protocol
15
15
16 import overrides
16 import overrides
17 import proto
17 import proto
18
18
19 def uisetup(ui):
19 def uisetup(ui):
20 # Disable auto-status for some commands which assume that all
20 # Disable auto-status for some commands which assume that all
21 # files in the result are under Mercurial's control
21 # files in the result are under Mercurial's control
22
22
23 entry = extensions.wrapcommand(commands.table, 'add',
23 entry = extensions.wrapcommand(commands.table, 'add',
24 overrides.override_add)
24 overrides.override_add)
25 addopt = [('', 'large', None, _('add as largefile')),
25 addopt = [('', 'large', None, _('add as largefile')),
26 ('', 'lfsize', '', _('add all files above this size (in megabytes)'
26 ('', 'lfsize', '', _('add all files above this size (in megabytes)'
27 'as largefiles (default: 10)'))]
27 'as largefiles (default: 10)'))]
28 entry[1].extend(addopt)
28 entry[1].extend(addopt)
29
29
30 entry = extensions.wrapcommand(commands.table, 'addremove',
30 entry = extensions.wrapcommand(commands.table, 'addremove',
31 overrides.override_addremove)
31 overrides.override_addremove)
32 entry = extensions.wrapcommand(commands.table, 'remove',
32 entry = extensions.wrapcommand(commands.table, 'remove',
33 overrides.override_remove)
33 overrides.override_remove)
34 entry = extensions.wrapcommand(commands.table, 'forget',
34 entry = extensions.wrapcommand(commands.table, 'forget',
35 overrides.override_forget)
35 overrides.override_forget)
36 entry = extensions.wrapcommand(commands.table, 'status',
36 entry = extensions.wrapcommand(commands.table, 'status',
37 overrides.override_status)
37 overrides.override_status)
38 entry = extensions.wrapcommand(commands.table, 'log',
38 entry = extensions.wrapcommand(commands.table, 'log',
39 overrides.override_log)
39 overrides.override_log)
40 entry = extensions.wrapcommand(commands.table, 'rollback',
40 entry = extensions.wrapcommand(commands.table, 'rollback',
41 overrides.override_rollback)
41 overrides.override_rollback)
42 entry = extensions.wrapcommand(commands.table, 'verify',
42 entry = extensions.wrapcommand(commands.table, 'verify',
43 overrides.override_verify)
43 overrides.override_verify)
44
44
45 verifyopt = [('', 'large', None, _('verify largefiles')),
45 verifyopt = [('', 'large', None, _('verify largefiles')),
46 ('', 'lfa', None,
46 ('', 'lfa', None,
47 _('verify all revisions of largefiles not just current')),
47 _('verify all revisions of largefiles not just current')),
48 ('', 'lfc', None,
48 ('', 'lfc', None,
49 _('verify largefile contents not just existence'))]
49 _('verify largefile contents not just existence'))]
50 entry[1].extend(verifyopt)
50 entry[1].extend(verifyopt)
51
51
52 entry = extensions.wrapcommand(commands.table, 'outgoing',
52 entry = extensions.wrapcommand(commands.table, 'outgoing',
53 overrides.override_outgoing)
53 overrides.override_outgoing)
54 outgoingopt = [('', 'large', None, _('display outgoing largefiles'))]
54 outgoingopt = [('', 'large', None, _('display outgoing largefiles'))]
55 entry[1].extend(outgoingopt)
55 entry[1].extend(outgoingopt)
56 entry = extensions.wrapcommand(commands.table, 'summary',
56 entry = extensions.wrapcommand(commands.table, 'summary',
57 overrides.override_summary)
57 overrides.override_summary)
58 summaryopt = [('', 'large', None, _('display outgoing largefiles'))]
58 summaryopt = [('', 'large', None, _('display outgoing largefiles'))]
59 entry[1].extend(summaryopt)
59 entry[1].extend(summaryopt)
60
60
61 entry = extensions.wrapcommand(commands.table, 'update',
61 entry = extensions.wrapcommand(commands.table, 'update',
62 overrides.override_update)
62 overrides.override_update)
63 entry = extensions.wrapcommand(commands.table, 'pull',
63 entry = extensions.wrapcommand(commands.table, 'pull',
64 overrides.override_pull)
64 overrides.override_pull)
65 entry = extensions.wrapfunction(filemerge, 'filemerge',
65 entry = extensions.wrapfunction(filemerge, 'filemerge',
66 overrides.override_filemerge)
66 overrides.override_filemerge)
67 entry = extensions.wrapfunction(cmdutil, 'copy',
67 entry = extensions.wrapfunction(cmdutil, 'copy',
68 overrides.override_copy)
68 overrides.override_copy)
69
69
70 # Backout calls revert so we need to override both the command and the
70 # Backout calls revert so we need to override both the command and the
71 # function
71 # function
72 entry = extensions.wrapcommand(commands.table, 'revert',
72 entry = extensions.wrapcommand(commands.table, 'revert',
73 overrides.override_revert)
73 overrides.override_revert)
74 entry = extensions.wrapfunction(commands, 'revert',
74 entry = extensions.wrapfunction(commands, 'revert',
75 overrides.override_revert)
75 overrides.override_revert)
76
76
77 # clone uses hg._update instead of hg.update even though they are the
77 # clone uses hg._update instead of hg.update even though they are the
78 # same function... so wrap both of them)
78 # same function... so wrap both of them)
79 extensions.wrapfunction(hg, 'update', overrides.hg_update)
79 extensions.wrapfunction(hg, 'update', overrides.hg_update)
80 extensions.wrapfunction(hg, '_update', overrides.hg_update)
80 extensions.wrapfunction(hg, '_update', overrides.hg_update)
81 extensions.wrapfunction(hg, 'clean', overrides.hg_clean)
81 extensions.wrapfunction(hg, 'clean', overrides.hg_clean)
82 extensions.wrapfunction(hg, 'merge', overrides.hg_merge)
82 extensions.wrapfunction(hg, 'merge', overrides.hg_merge)
83
83
84 extensions.wrapfunction(archival, 'archive', overrides.override_archive)
84 extensions.wrapfunction(archival, 'archive', overrides.override_archive)
85 if util.safehasattr(cmdutil, 'bailifchanged'):
85 if util.safehasattr(cmdutil, 'bailifchanged'):
86 extensions.wrapfunction(cmdutil, 'bailifchanged',
86 extensions.wrapfunction(cmdutil, 'bailifchanged',
87 overrides.override_bailifchanged)
87 overrides.override_bailifchanged)
88 else:
88 else:
89 extensions.wrapfunction(cmdutil, 'bail_if_changed',
89 extensions.wrapfunction(cmdutil, 'bail_if_changed',
90 overrides.override_bailifchanged)
90 overrides.override_bailifchanged)
91
91
92 # create the new wireproto commands ...
92 # create the new wireproto commands ...
93 wireproto.commands['putlfile'] = (proto.putlfile, 'sha')
93 wireproto.commands['putlfile'] = (proto.putlfile, 'sha')
94 wireproto.commands['getlfile'] = (proto.getlfile, 'sha')
94 wireproto.commands['getlfile'] = (proto.getlfile, 'sha')
95 wireproto.commands['statlfile'] = (proto.statlfile, 'sha')
95 wireproto.commands['statlfile'] = (proto.statlfile, 'sha')
96
96
97 # ... and wrap some existing ones
97 # ... and wrap some existing ones
98 wireproto.commands['capabilities'] = (proto.capabilities, '')
98 wireproto.commands['capabilities'] = (proto.capabilities, '')
99 wireproto.commands['heads'] = (proto.heads, '')
99 wireproto.commands['heads'] = (proto.heads, '')
100 wireproto.commands['lheads'] = (wireproto.heads, '')
100 wireproto.commands['lheads'] = (wireproto.heads, '')
101
101
102 # make putlfile behave the same as push and {get,stat}lfile behave the same
102 # make putlfile behave the same as push and {get,stat}lfile behave the same
103 # as pull w.r.t. permissions checks
103 # as pull w.r.t. permissions checks
104 hgweb_mod.perms['putlfile'] = 'push'
104 hgweb_mod.perms['putlfile'] = 'push'
105 hgweb_mod.perms['getlfile'] = 'pull'
105 hgweb_mod.perms['getlfile'] = 'pull'
106 hgweb_mod.perms['statlfile'] = 'pull'
106 hgweb_mod.perms['statlfile'] = 'pull'
107
107
108 # the hello wireproto command uses wireproto.capabilities, so it won't see
108 # the hello wireproto command uses wireproto.capabilities, so it won't see
109 # our largefiles capability unless we replace the actual function as well.
109 # our largefiles capability unless we replace the actual function as well.
110 proto.capabilities_orig = wireproto.capabilities
110 proto.capabilities_orig = wireproto.capabilities
111 wireproto.capabilities = proto.capabilities
111 wireproto.capabilities = proto.capabilities
112
112
113 # these let us reject non-lfiles clients and make them display our error
113 # these let us reject non-lfiles clients and make them display our error
114 # messages
114 # messages
115 protocol.webproto.refuseclient = proto.webproto_refuseclient
115 protocol.webproto.refuseclient = proto.webproto_refuseclient
116 sshserver.sshserver.refuseclient = proto.sshproto_refuseclient
116 sshserver.sshserver.refuseclient = proto.sshproto_refuseclient
117
117
118 # can't do this in reposetup because it needs to have happened before
118 # can't do this in reposetup because it needs to have happened before
119 # wirerepo.__init__ is called
119 # wirerepo.__init__ is called
120 proto.ssh_oldcallstream = sshrepo.sshrepository._callstream
120 proto.ssh_oldcallstream = sshrepo.sshrepository._callstream
121 proto.http_oldcallstream = httprepo.httprepository._callstream
121 proto.http_oldcallstream = httprepo.httprepository._callstream
122 sshrepo.sshrepository._callstream = proto.sshrepo_callstream
122 sshrepo.sshrepository._callstream = proto.sshrepo_callstream
123 httprepo.httprepository._callstream = proto.httprepo_callstream
123 httprepo.httprepository._callstream = proto.httprepo_callstream
124
124
125 # don't die on seeing a repo with the largefiles requirement
125 # don't die on seeing a repo with the largefiles requirement
126 localrepo.localrepository.supported |= set(['largefiles'])
126 localrepo.localrepository.supported |= set(['largefiles'])
127
127
128 # override some extensions' stuff as well
128 # override some extensions' stuff as well
129 for name, module in extensions.extensions():
129 for name, module in extensions.extensions():
130 if name == 'fetch':
130 if name == 'fetch':
131 extensions.wrapcommand(getattr(module, 'cmdtable'), 'fetch',
131 extensions.wrapcommand(getattr(module, 'cmdtable'), 'fetch',
132 overrides.override_fetch)
132 overrides.override_fetch)
133 if name == 'purge':
133 if name == 'purge':
134 extensions.wrapcommand(getattr(module, 'cmdtable'), 'purge',
134 extensions.wrapcommand(getattr(module, 'cmdtable'), 'purge',
135 overrides.override_purge)
135 overrides.override_purge)
136 if name == 'rebase':
136 if name == 'rebase':
137 extensions.wrapcommand(getattr(module, 'cmdtable'), 'rebase',
137 extensions.wrapcommand(getattr(module, 'cmdtable'), 'rebase',
138 overrides.override_rebase)
138 overrides.override_rebase)
General Comments 0
You need to be logged in to leave comments. Login now