##// END OF EJS Templates
largefiles: fix commit of specified file on non-windows
Na'Tosha Bard -
r15206:f85c76b1 default
parent child Browse files
Show More
@@ -1,497 +1,493 b''
1 # Copyright 2009-2010 Gregory P. Ward
1 # Copyright 2009-2010 Gregory P. Ward
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
2 # Copyright 2009-2010 Intelerad Medical Systems Incorporated
3 # Copyright 2010-2011 Fog Creek Software
3 # Copyright 2010-2011 Fog Creek Software
4 # Copyright 2010-2011 Unity Technologies
4 # Copyright 2010-2011 Unity Technologies
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 '''largefiles utility code: must not import other modules in this package.'''
9 '''largefiles utility code: must not import other modules in this package.'''
10
10
11 import os
11 import os
12 import errno
12 import errno
13 import inspect
13 import inspect
14 import shutil
14 import shutil
15 import stat
15 import stat
16 import hashlib
16 import hashlib
17
17
18 from mercurial import cmdutil, dirstate, httpconnection, match as match_, \
18 from mercurial import cmdutil, dirstate, httpconnection, match as match_, \
19 url as url_, util
19 url as url_, util
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21
21
22 try:
22 try:
23 from mercurial import scmutil
23 from mercurial import scmutil
24 except ImportError:
24 except ImportError:
25 pass
25 pass
26
26
27 shortname = '.hglf'
27 shortname = '.hglf'
28 longname = 'largefiles'
28 longname = 'largefiles'
29
29
30
30
31 # -- Portability wrappers ----------------------------------------------
31 # -- Portability wrappers ----------------------------------------------
32
32
33 if 'subrepos' in inspect.getargspec(dirstate.dirstate.status)[0]:
33 if 'subrepos' in inspect.getargspec(dirstate.dirstate.status)[0]:
34 # for Mercurial >= 1.5
34 # for Mercurial >= 1.5
35 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
35 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
36 return dirstate.walk(matcher, [], unknown, ignored)
36 return dirstate.walk(matcher, [], unknown, ignored)
37 else:
37 else:
38 # for Mercurial <= 1.4
38 # for Mercurial <= 1.4
39 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
39 def dirstate_walk(dirstate, matcher, unknown=False, ignored=False):
40 return dirstate.walk(matcher, unknown, ignored)
40 return dirstate.walk(matcher, unknown, ignored)
41
41
42 def repo_add(repo, list):
42 def repo_add(repo, list):
43 try:
43 try:
44 # Mercurial <= 1.5
44 # Mercurial <= 1.5
45 add = repo.add
45 add = repo.add
46 except AttributeError:
46 except AttributeError:
47 # Mercurial >= 1.6
47 # Mercurial >= 1.6
48 add = repo[None].add
48 add = repo[None].add
49 return add(list)
49 return add(list)
50
50
51 def repo_remove(repo, list, unlink=False):
51 def repo_remove(repo, list, unlink=False):
52 try:
52 try:
53 # Mercurial <= 1.5
53 # Mercurial <= 1.5
54 remove = repo.remove
54 remove = repo.remove
55 except AttributeError:
55 except AttributeError:
56 # Mercurial >= 1.6
56 # Mercurial >= 1.6
57 try:
57 try:
58 # Mercurial <= 1.8
58 # Mercurial <= 1.8
59 remove = repo[None].remove
59 remove = repo[None].remove
60 except AttributeError:
60 except AttributeError:
61 # Mercurial >= 1.9
61 # Mercurial >= 1.9
62 def remove(list, unlink):
62 def remove(list, unlink):
63 wlock = repo.wlock()
63 wlock = repo.wlock()
64 try:
64 try:
65 if unlink:
65 if unlink:
66 for f in list:
66 for f in list:
67 try:
67 try:
68 util.unlinkpath(repo.wjoin(f))
68 util.unlinkpath(repo.wjoin(f))
69 except OSError, inst:
69 except OSError, inst:
70 if inst.errno != errno.ENOENT:
70 if inst.errno != errno.ENOENT:
71 raise
71 raise
72 repo[None].forget(list)
72 repo[None].forget(list)
73 finally:
73 finally:
74 wlock.release()
74 wlock.release()
75
75
76 return remove(list, unlink=unlink)
76 return remove(list, unlink=unlink)
77
77
78 def repo_forget(repo, list):
78 def repo_forget(repo, list):
79 try:
79 try:
80 # Mercurial <= 1.5
80 # Mercurial <= 1.5
81 forget = repo.forget
81 forget = repo.forget
82 except AttributeError:
82 except AttributeError:
83 # Mercurial >= 1.6
83 # Mercurial >= 1.6
84 forget = repo[None].forget
84 forget = repo[None].forget
85 return forget(list)
85 return forget(list)
86
86
87 def findoutgoing(repo, remote, force):
87 def findoutgoing(repo, remote, force):
88 # First attempt is for Mercurial <= 1.5 second is for >= 1.6
88 # First attempt is for Mercurial <= 1.5 second is for >= 1.6
89 try:
89 try:
90 return repo.findoutgoing(remote)
90 return repo.findoutgoing(remote)
91 except AttributeError:
91 except AttributeError:
92 from mercurial import discovery
92 from mercurial import discovery
93 try:
93 try:
94 # Mercurial <= 1.8
94 # Mercurial <= 1.8
95 return discovery.findoutgoing(repo, remote, force=force)
95 return discovery.findoutgoing(repo, remote, force=force)
96 except AttributeError:
96 except AttributeError:
97 # Mercurial >= 1.9
97 # Mercurial >= 1.9
98 common, _anyinc, _heads = discovery.findcommonincoming(repo,
98 common, _anyinc, _heads = discovery.findcommonincoming(repo,
99 remote, force=force)
99 remote, force=force)
100 return repo.changelog.findmissing(common)
100 return repo.changelog.findmissing(common)
101
101
102 # -- Private worker functions ------------------------------------------
102 # -- Private worker functions ------------------------------------------
103
103
104 if os.name == 'nt':
105 from mercurial import win32
106 linkfn = win32.oslink
107
108 def link(src, dest):
104 def link(src, dest):
109 try:
105 try:
110 linkfn(src, dest)
106 util.oslink(src, dest)
111 except OSError:
107 except OSError:
112 # If hardlinks fail fall back on copy
108 # If hardlinks fail fall back on copy
113 shutil.copyfile(src, dest)
109 shutil.copyfile(src, dest)
114 os.chmod(dest, os.stat(src).st_mode)
110 os.chmod(dest, os.stat(src).st_mode)
115
111
116 def systemcachepath(ui, hash):
112 def systemcachepath(ui, hash):
117 path = ui.config(longname, 'systemcache', None)
113 path = ui.config(longname, 'systemcache', None)
118 if path:
114 if path:
119 path = os.path.join(path, hash)
115 path = os.path.join(path, hash)
120 else:
116 else:
121 if os.name == 'nt':
117 if os.name == 'nt':
122 path = os.path.join(os.getenv('LOCALAPPDATA') or \
118 path = os.path.join(os.getenv('LOCALAPPDATA') or \
123 os.getenv('APPDATA'), longname, hash)
119 os.getenv('APPDATA'), longname, hash)
124 elif os.name == 'posix':
120 elif os.name == 'posix':
125 path = os.path.join(os.getenv('HOME'), '.' + longname, hash)
121 path = os.path.join(os.getenv('HOME'), '.' + longname, hash)
126 else:
122 else:
127 raise util.Abort(_('Unknown operating system: %s\n') % os.name)
123 raise util.Abort(_('Unknown operating system: %s\n') % os.name)
128 return path
124 return path
129
125
130 def insystemcache(ui, hash):
126 def insystemcache(ui, hash):
131 return os.path.exists(systemcachepath(ui, hash))
127 return os.path.exists(systemcachepath(ui, hash))
132
128
133 def findfile(repo, hash):
129 def findfile(repo, hash):
134 if incache(repo, hash):
130 if incache(repo, hash):
135 repo.ui.note(_('Found %s in cache\n') % hash)
131 repo.ui.note(_('Found %s in cache\n') % hash)
136 return cachepath(repo, hash)
132 return cachepath(repo, hash)
137 if insystemcache(repo.ui, hash):
133 if insystemcache(repo.ui, hash):
138 repo.ui.note(_('Found %s in system cache\n') % hash)
134 repo.ui.note(_('Found %s in system cache\n') % hash)
139 return systemcachepath(repo.ui, hash)
135 return systemcachepath(repo.ui, hash)
140 return None
136 return None
141
137
142 class largefiles_dirstate(dirstate.dirstate):
138 class largefiles_dirstate(dirstate.dirstate):
143 def __getitem__(self, key):
139 def __getitem__(self, key):
144 return super(largefiles_dirstate, self).__getitem__(unixpath(key))
140 return super(largefiles_dirstate, self).__getitem__(unixpath(key))
145 def normal(self, f):
141 def normal(self, f):
146 return super(largefiles_dirstate, self).normal(unixpath(f))
142 return super(largefiles_dirstate, self).normal(unixpath(f))
147 def remove(self, f):
143 def remove(self, f):
148 return super(largefiles_dirstate, self).remove(unixpath(f))
144 return super(largefiles_dirstate, self).remove(unixpath(f))
149 def add(self, f):
145 def add(self, f):
150 return super(largefiles_dirstate, self).add(unixpath(f))
146 return super(largefiles_dirstate, self).add(unixpath(f))
151 def drop(self, f):
147 def drop(self, f):
152 return super(largefiles_dirstate, self).drop(unixpath(f))
148 return super(largefiles_dirstate, self).drop(unixpath(f))
153 def forget(self, f):
149 def forget(self, f):
154 return super(largefiles_dirstate, self).forget(unixpath(f))
150 return super(largefiles_dirstate, self).forget(unixpath(f))
155
151
156 def openlfdirstate(ui, repo):
152 def openlfdirstate(ui, repo):
157 '''
153 '''
158 Return a dirstate object that tracks big files: i.e. its root is the
154 Return a dirstate object that tracks big files: i.e. its root is the
159 repo root, but it is saved in .hg/largefiles/dirstate.
155 repo root, but it is saved in .hg/largefiles/dirstate.
160 '''
156 '''
161 admin = repo.join(longname)
157 admin = repo.join(longname)
162 try:
158 try:
163 # Mercurial >= 1.9
159 # Mercurial >= 1.9
164 opener = scmutil.opener(admin)
160 opener = scmutil.opener(admin)
165 except ImportError:
161 except ImportError:
166 # Mercurial <= 1.8
162 # Mercurial <= 1.8
167 opener = util.opener(admin)
163 opener = util.opener(admin)
168 if util.safehasattr(repo.dirstate, '_validate'):
164 if util.safehasattr(repo.dirstate, '_validate'):
169 lfdirstate = largefiles_dirstate(opener, ui, repo.root,
165 lfdirstate = largefiles_dirstate(opener, ui, repo.root,
170 repo.dirstate._validate)
166 repo.dirstate._validate)
171 else:
167 else:
172 lfdirstate = largefiles_dirstate(opener, ui, repo.root)
168 lfdirstate = largefiles_dirstate(opener, ui, repo.root)
173
169
174 # If the largefiles dirstate does not exist, populate and create it. This
170 # If the largefiles dirstate does not exist, populate and create it. This
175 # ensures that we create it on the first meaningful largefiles operation in
171 # ensures that we create it on the first meaningful largefiles operation in
176 # a new clone. It also gives us an easy way to forcibly rebuild largefiles
172 # a new clone. It also gives us an easy way to forcibly rebuild largefiles
177 # state:
173 # state:
178 # rm .hg/largefiles/dirstate && hg status
174 # rm .hg/largefiles/dirstate && hg status
179 # Or even, if things are really messed up:
175 # Or even, if things are really messed up:
180 # rm -rf .hg/largefiles && hg status
176 # rm -rf .hg/largefiles && hg status
181 if not os.path.exists(os.path.join(admin, 'dirstate')):
177 if not os.path.exists(os.path.join(admin, 'dirstate')):
182 util.makedirs(admin)
178 util.makedirs(admin)
183 matcher = getstandinmatcher(repo)
179 matcher = getstandinmatcher(repo)
184 for standin in dirstate_walk(repo.dirstate, matcher):
180 for standin in dirstate_walk(repo.dirstate, matcher):
185 lfile = splitstandin(standin)
181 lfile = splitstandin(standin)
186 hash = readstandin(repo, lfile)
182 hash = readstandin(repo, lfile)
187 lfdirstate.normallookup(lfile)
183 lfdirstate.normallookup(lfile)
188 try:
184 try:
189 if hash == hashfile(lfile):
185 if hash == hashfile(lfile):
190 lfdirstate.normal(lfile)
186 lfdirstate.normal(lfile)
191 except IOError, err:
187 except IOError, err:
192 if err.errno != errno.ENOENT:
188 if err.errno != errno.ENOENT:
193 raise
189 raise
194
190
195 lfdirstate.write()
191 lfdirstate.write()
196
192
197 return lfdirstate
193 return lfdirstate
198
194
199 def lfdirstate_status(lfdirstate, repo, rev):
195 def lfdirstate_status(lfdirstate, repo, rev):
200 wlock = repo.wlock()
196 wlock = repo.wlock()
201 try:
197 try:
202 match = match_.always(repo.root, repo.getcwd())
198 match = match_.always(repo.root, repo.getcwd())
203 s = lfdirstate.status(match, [], False, False, False)
199 s = lfdirstate.status(match, [], False, False, False)
204 unsure, modified, added, removed, missing, unknown, ignored, clean = s
200 unsure, modified, added, removed, missing, unknown, ignored, clean = s
205 for lfile in unsure:
201 for lfile in unsure:
206 if repo[rev][standin(lfile)].data().strip() != \
202 if repo[rev][standin(lfile)].data().strip() != \
207 hashfile(repo.wjoin(lfile)):
203 hashfile(repo.wjoin(lfile)):
208 modified.append(lfile)
204 modified.append(lfile)
209 else:
205 else:
210 clean.append(lfile)
206 clean.append(lfile)
211 lfdirstate.normal(lfile)
207 lfdirstate.normal(lfile)
212 lfdirstate.write()
208 lfdirstate.write()
213 finally:
209 finally:
214 wlock.release()
210 wlock.release()
215 return (modified, added, removed, missing, unknown, ignored, clean)
211 return (modified, added, removed, missing, unknown, ignored, clean)
216
212
217 def listlfiles(repo, rev=None, matcher=None):
213 def listlfiles(repo, rev=None, matcher=None):
218 '''list largefiles in the working copy or specified changeset'''
214 '''list largefiles in the working copy or specified changeset'''
219
215
220 if matcher is None:
216 if matcher is None:
221 matcher = getstandinmatcher(repo)
217 matcher = getstandinmatcher(repo)
222
218
223 # ignore unknown files in working directory
219 # ignore unknown files in working directory
224 return [splitstandin(f) for f in repo[rev].walk(matcher) \
220 return [splitstandin(f) for f in repo[rev].walk(matcher) \
225 if rev is not None or repo.dirstate[f] != '?']
221 if rev is not None or repo.dirstate[f] != '?']
226
222
227 def incache(repo, hash):
223 def incache(repo, hash):
228 return os.path.exists(cachepath(repo, hash))
224 return os.path.exists(cachepath(repo, hash))
229
225
230 def createdir(dir):
226 def createdir(dir):
231 if not os.path.exists(dir):
227 if not os.path.exists(dir):
232 os.makedirs(dir)
228 os.makedirs(dir)
233
229
234 def cachepath(repo, hash):
230 def cachepath(repo, hash):
235 return repo.join(os.path.join(longname, hash))
231 return repo.join(os.path.join(longname, hash))
236
232
237 def copyfromcache(repo, hash, filename):
233 def copyfromcache(repo, hash, filename):
238 '''copyfromcache copies the specified largefile from the repo or system
234 '''copyfromcache copies the specified largefile from the repo or system
239 cache to the specified location in the repository. It will not throw an
235 cache to the specified location in the repository. It will not throw an
240 exception on failure, as it is meant to be called only after ensuring that
236 exception on failure, as it is meant to be called only after ensuring that
241 the needed largefile exists in the cache.'''
237 the needed largefile exists in the cache.'''
242 path = findfile(repo, hash)
238 path = findfile(repo, hash)
243 if path is None:
239 if path is None:
244 return False
240 return False
245 util.makedirs(os.path.dirname(repo.wjoin(filename)))
241 util.makedirs(os.path.dirname(repo.wjoin(filename)))
246 shutil.copy(path, repo.wjoin(filename))
242 shutil.copy(path, repo.wjoin(filename))
247 return True
243 return True
248
244
249 def copytocache(repo, rev, file, uploaded=False):
245 def copytocache(repo, rev, file, uploaded=False):
250 hash = readstandin(repo, file)
246 hash = readstandin(repo, file)
251 if incache(repo, hash):
247 if incache(repo, hash):
252 return
248 return
253 copytocacheabsolute(repo, repo.wjoin(file), hash)
249 copytocacheabsolute(repo, repo.wjoin(file), hash)
254
250
255 def copytocacheabsolute(repo, file, hash):
251 def copytocacheabsolute(repo, file, hash):
256 createdir(os.path.dirname(cachepath(repo, hash)))
252 createdir(os.path.dirname(cachepath(repo, hash)))
257 if insystemcache(repo.ui, hash):
253 if insystemcache(repo.ui, hash):
258 link(systemcachepath(repo.ui, hash), cachepath(repo, hash))
254 link(systemcachepath(repo.ui, hash), cachepath(repo, hash))
259 else:
255 else:
260 shutil.copyfile(file, cachepath(repo, hash))
256 shutil.copyfile(file, cachepath(repo, hash))
261 os.chmod(cachepath(repo, hash), os.stat(file).st_mode)
257 os.chmod(cachepath(repo, hash), os.stat(file).st_mode)
262 linktosystemcache(repo, hash)
258 linktosystemcache(repo, hash)
263
259
264 def linktosystemcache(repo, hash):
260 def linktosystemcache(repo, hash):
265 createdir(os.path.dirname(systemcachepath(repo.ui, hash)))
261 createdir(os.path.dirname(systemcachepath(repo.ui, hash)))
266 link(cachepath(repo, hash), systemcachepath(repo.ui, hash))
262 link(cachepath(repo, hash), systemcachepath(repo.ui, hash))
267
263
268 def getstandinmatcher(repo, pats=[], opts={}):
264 def getstandinmatcher(repo, pats=[], opts={}):
269 '''Return a match object that applies pats to the standin directory'''
265 '''Return a match object that applies pats to the standin directory'''
270 standindir = repo.pathto(shortname)
266 standindir = repo.pathto(shortname)
271 if pats:
267 if pats:
272 # patterns supplied: search standin directory relative to current dir
268 # patterns supplied: search standin directory relative to current dir
273 cwd = repo.getcwd()
269 cwd = repo.getcwd()
274 if os.path.isabs(cwd):
270 if os.path.isabs(cwd):
275 # cwd is an absolute path for hg -R <reponame>
271 # cwd is an absolute path for hg -R <reponame>
276 # work relative to the repository root in this case
272 # work relative to the repository root in this case
277 cwd = ''
273 cwd = ''
278 pats = [os.path.join(standindir, cwd, pat) for pat in pats]
274 pats = [os.path.join(standindir, cwd, pat) for pat in pats]
279 elif os.path.isdir(standindir):
275 elif os.path.isdir(standindir):
280 # no patterns: relative to repo root
276 # no patterns: relative to repo root
281 pats = [standindir]
277 pats = [standindir]
282 else:
278 else:
283 # no patterns and no standin dir: return matcher that matches nothing
279 # no patterns and no standin dir: return matcher that matches nothing
284 match = match_.match(repo.root, None, [], exact=True)
280 match = match_.match(repo.root, None, [], exact=True)
285 match.matchfn = lambda f: False
281 match.matchfn = lambda f: False
286 return match
282 return match
287 return getmatcher(repo, pats, opts, showbad=False)
283 return getmatcher(repo, pats, opts, showbad=False)
288
284
289 def getmatcher(repo, pats=[], opts={}, showbad=True):
285 def getmatcher(repo, pats=[], opts={}, showbad=True):
290 '''Wrapper around scmutil.match() that adds showbad: if false, neuter
286 '''Wrapper around scmutil.match() that adds showbad: if false, neuter
291 the match object\'s bad() method so it does not print any warnings
287 the match object\'s bad() method so it does not print any warnings
292 about missing files or directories.'''
288 about missing files or directories.'''
293 try:
289 try:
294 # Mercurial >= 1.9
290 # Mercurial >= 1.9
295 match = scmutil.match(repo[None], pats, opts)
291 match = scmutil.match(repo[None], pats, opts)
296 except ImportError:
292 except ImportError:
297 # Mercurial <= 1.8
293 # Mercurial <= 1.8
298 match = cmdutil.match(repo, pats, opts)
294 match = cmdutil.match(repo, pats, opts)
299
295
300 if not showbad:
296 if not showbad:
301 match.bad = lambda f, msg: None
297 match.bad = lambda f, msg: None
302 return match
298 return match
303
299
304 def composestandinmatcher(repo, rmatcher):
300 def composestandinmatcher(repo, rmatcher):
305 '''Return a matcher that accepts standins corresponding to the files
301 '''Return a matcher that accepts standins corresponding to the files
306 accepted by rmatcher. Pass the list of files in the matcher as the
302 accepted by rmatcher. Pass the list of files in the matcher as the
307 paths specified by the user.'''
303 paths specified by the user.'''
308 smatcher = getstandinmatcher(repo, rmatcher.files())
304 smatcher = getstandinmatcher(repo, rmatcher.files())
309 isstandin = smatcher.matchfn
305 isstandin = smatcher.matchfn
310 def composed_matchfn(f):
306 def composed_matchfn(f):
311 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
307 return isstandin(f) and rmatcher.matchfn(splitstandin(f))
312 smatcher.matchfn = composed_matchfn
308 smatcher.matchfn = composed_matchfn
313
309
314 return smatcher
310 return smatcher
315
311
316 def standin(filename):
312 def standin(filename):
317 '''Return the repo-relative path to the standin for the specified big
313 '''Return the repo-relative path to the standin for the specified big
318 file.'''
314 file.'''
319 # Notes:
315 # Notes:
320 # 1) Most callers want an absolute path, but _create_standin() needs
316 # 1) Most callers want an absolute path, but _create_standin() needs
321 # it repo-relative so lfadd() can pass it to repo_add(). So leave
317 # it repo-relative so lfadd() can pass it to repo_add(). So leave
322 # it up to the caller to use repo.wjoin() to get an absolute path.
318 # it up to the caller to use repo.wjoin() to get an absolute path.
323 # 2) Join with '/' because that's what dirstate always uses, even on
319 # 2) Join with '/' because that's what dirstate always uses, even on
324 # Windows. Change existing separator to '/' first in case we are
320 # Windows. Change existing separator to '/' first in case we are
325 # passed filenames from an external source (like the command line).
321 # passed filenames from an external source (like the command line).
326 return shortname + '/' + filename.replace(os.sep, '/')
322 return shortname + '/' + filename.replace(os.sep, '/')
327
323
328 def isstandin(filename):
324 def isstandin(filename):
329 '''Return true if filename is a big file standin. filename must
325 '''Return true if filename is a big file standin. filename must
330 be in Mercurial\'s internal form (slash-separated).'''
326 be in Mercurial\'s internal form (slash-separated).'''
331 return filename.startswith(shortname + '/')
327 return filename.startswith(shortname + '/')
332
328
333 def splitstandin(filename):
329 def splitstandin(filename):
334 # Split on / because that's what dirstate always uses, even on Windows.
330 # Split on / because that's what dirstate always uses, even on Windows.
335 # Change local separator to / first just in case we are passed filenames
331 # Change local separator to / first just in case we are passed filenames
336 # from an external source (like the command line).
332 # from an external source (like the command line).
337 bits = filename.replace(os.sep, '/').split('/', 1)
333 bits = filename.replace(os.sep, '/').split('/', 1)
338 if len(bits) == 2 and bits[0] == shortname:
334 if len(bits) == 2 and bits[0] == shortname:
339 return bits[1]
335 return bits[1]
340 else:
336 else:
341 return None
337 return None
342
338
343 def updatestandin(repo, standin):
339 def updatestandin(repo, standin):
344 file = repo.wjoin(splitstandin(standin))
340 file = repo.wjoin(splitstandin(standin))
345 if os.path.exists(file):
341 if os.path.exists(file):
346 hash = hashfile(file)
342 hash = hashfile(file)
347 executable = getexecutable(file)
343 executable = getexecutable(file)
348 writestandin(repo, standin, hash, executable)
344 writestandin(repo, standin, hash, executable)
349
345
350 def readstandin(repo, filename, node=None):
346 def readstandin(repo, filename, node=None):
351 '''read hex hash from standin for filename at given node, or working
347 '''read hex hash from standin for filename at given node, or working
352 directory if no node is given'''
348 directory if no node is given'''
353 return repo[node][standin(filename)].data().strip()
349 return repo[node][standin(filename)].data().strip()
354
350
355 def writestandin(repo, standin, hash, executable):
351 def writestandin(repo, standin, hash, executable):
356 '''write hhash to <repo.root>/<standin>'''
352 '''write hhash to <repo.root>/<standin>'''
357 writehash(hash, repo.wjoin(standin), executable)
353 writehash(hash, repo.wjoin(standin), executable)
358
354
359 def copyandhash(instream, outfile):
355 def copyandhash(instream, outfile):
360 '''Read bytes from instream (iterable) and write them to outfile,
356 '''Read bytes from instream (iterable) and write them to outfile,
361 computing the SHA-1 hash of the data along the way. Close outfile
357 computing the SHA-1 hash of the data along the way. Close outfile
362 when done and return the binary hash.'''
358 when done and return the binary hash.'''
363 hasher = util.sha1('')
359 hasher = util.sha1('')
364 for data in instream:
360 for data in instream:
365 hasher.update(data)
361 hasher.update(data)
366 outfile.write(data)
362 outfile.write(data)
367
363
368 # Blecch: closing a file that somebody else opened is rude and
364 # Blecch: closing a file that somebody else opened is rude and
369 # wrong. But it's so darn convenient and practical! After all,
365 # wrong. But it's so darn convenient and practical! After all,
370 # outfile was opened just to copy and hash.
366 # outfile was opened just to copy and hash.
371 outfile.close()
367 outfile.close()
372
368
373 return hasher.digest()
369 return hasher.digest()
374
370
375 def hashrepofile(repo, file):
371 def hashrepofile(repo, file):
376 return hashfile(repo.wjoin(file))
372 return hashfile(repo.wjoin(file))
377
373
378 def hashfile(file):
374 def hashfile(file):
379 if not os.path.exists(file):
375 if not os.path.exists(file):
380 return ''
376 return ''
381 hasher = util.sha1('')
377 hasher = util.sha1('')
382 fd = open(file, 'rb')
378 fd = open(file, 'rb')
383 for data in blockstream(fd):
379 for data in blockstream(fd):
384 hasher.update(data)
380 hasher.update(data)
385 fd.close()
381 fd.close()
386 return hasher.hexdigest()
382 return hasher.hexdigest()
387
383
388 class limitreader(object):
384 class limitreader(object):
389 def __init__(self, f, limit):
385 def __init__(self, f, limit):
390 self.f = f
386 self.f = f
391 self.limit = limit
387 self.limit = limit
392
388
393 def read(self, length):
389 def read(self, length):
394 if self.limit == 0:
390 if self.limit == 0:
395 return ''
391 return ''
396 length = length > self.limit and self.limit or length
392 length = length > self.limit and self.limit or length
397 self.limit -= length
393 self.limit -= length
398 return self.f.read(length)
394 return self.f.read(length)
399
395
400 def close(self):
396 def close(self):
401 pass
397 pass
402
398
403 def blockstream(infile, blocksize=128 * 1024):
399 def blockstream(infile, blocksize=128 * 1024):
404 """Generator that yields blocks of data from infile and closes infile."""
400 """Generator that yields blocks of data from infile and closes infile."""
405 while True:
401 while True:
406 data = infile.read(blocksize)
402 data = infile.read(blocksize)
407 if not data:
403 if not data:
408 break
404 break
409 yield data
405 yield data
410 # Same blecch as above.
406 # Same blecch as above.
411 infile.close()
407 infile.close()
412
408
413 def readhash(filename):
409 def readhash(filename):
414 rfile = open(filename, 'rb')
410 rfile = open(filename, 'rb')
415 hash = rfile.read(40)
411 hash = rfile.read(40)
416 rfile.close()
412 rfile.close()
417 if len(hash) < 40:
413 if len(hash) < 40:
418 raise util.Abort(_('bad hash in \'%s\' (only %d bytes long)')
414 raise util.Abort(_('bad hash in \'%s\' (only %d bytes long)')
419 % (filename, len(hash)))
415 % (filename, len(hash)))
420 return hash
416 return hash
421
417
422 def writehash(hash, filename, executable):
418 def writehash(hash, filename, executable):
423 util.makedirs(os.path.dirname(filename))
419 util.makedirs(os.path.dirname(filename))
424 if os.path.exists(filename):
420 if os.path.exists(filename):
425 os.unlink(filename)
421 os.unlink(filename)
426 wfile = open(filename, 'wb')
422 wfile = open(filename, 'wb')
427
423
428 try:
424 try:
429 wfile.write(hash)
425 wfile.write(hash)
430 wfile.write('\n')
426 wfile.write('\n')
431 finally:
427 finally:
432 wfile.close()
428 wfile.close()
433 if os.path.exists(filename):
429 if os.path.exists(filename):
434 os.chmod(filename, getmode(executable))
430 os.chmod(filename, getmode(executable))
435
431
436 def getexecutable(filename):
432 def getexecutable(filename):
437 mode = os.stat(filename).st_mode
433 mode = os.stat(filename).st_mode
438 return (mode & stat.S_IXUSR) and (mode & stat.S_IXGRP) and (mode & \
434 return (mode & stat.S_IXUSR) and (mode & stat.S_IXGRP) and (mode & \
439 stat.S_IXOTH)
435 stat.S_IXOTH)
440
436
441 def getmode(executable):
437 def getmode(executable):
442 if executable:
438 if executable:
443 return 0755
439 return 0755
444 else:
440 else:
445 return 0644
441 return 0644
446
442
447 def urljoin(first, second, *arg):
443 def urljoin(first, second, *arg):
448 def join(left, right):
444 def join(left, right):
449 if not left.endswith('/'):
445 if not left.endswith('/'):
450 left += '/'
446 left += '/'
451 if right.startswith('/'):
447 if right.startswith('/'):
452 right = right[1:]
448 right = right[1:]
453 return left + right
449 return left + right
454
450
455 url = join(first, second)
451 url = join(first, second)
456 for a in arg:
452 for a in arg:
457 url = join(url, a)
453 url = join(url, a)
458 return url
454 return url
459
455
460 def hexsha1(data):
456 def hexsha1(data):
461 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
457 """hexsha1 returns the hex-encoded sha1 sum of the data in the file-like
462 object data"""
458 object data"""
463 h = hashlib.sha1()
459 h = hashlib.sha1()
464 for chunk in util.filechunkiter(data):
460 for chunk in util.filechunkiter(data):
465 h.update(chunk)
461 h.update(chunk)
466 return h.hexdigest()
462 return h.hexdigest()
467
463
468 def httpsendfile(ui, filename):
464 def httpsendfile(ui, filename):
469 try:
465 try:
470 # Mercurial >= 1.9
466 # Mercurial >= 1.9
471 return httpconnection.httpsendfile(ui, filename, 'rb')
467 return httpconnection.httpsendfile(ui, filename, 'rb')
472 except ImportError:
468 except ImportError:
473 if 'ui' in inspect.getargspec(url_.httpsendfile.__init__)[0]:
469 if 'ui' in inspect.getargspec(url_.httpsendfile.__init__)[0]:
474 # Mercurial == 1.8
470 # Mercurial == 1.8
475 return url_.httpsendfile(ui, filename, 'rb')
471 return url_.httpsendfile(ui, filename, 'rb')
476 else:
472 else:
477 # Mercurial <= 1.7
473 # Mercurial <= 1.7
478 return url_.httpsendfile(filename, 'rb')
474 return url_.httpsendfile(filename, 'rb')
479
475
480 # Convert a path to a unix style path. This is used to give a
476 # Convert a path to a unix style path. This is used to give a
481 # canonical path to the lfdirstate.
477 # canonical path to the lfdirstate.
482 def unixpath(path):
478 def unixpath(path):
483 return os.path.normpath(path).replace(os.sep, '/')
479 return os.path.normpath(path).replace(os.sep, '/')
484
480
485 def islfilesrepo(repo):
481 def islfilesrepo(repo):
486 return ('largefiles' in repo.requirements and
482 return ('largefiles' in repo.requirements and
487 any_(shortname + '/' in f[0] for f in repo.store.datafiles()))
483 any_(shortname + '/' in f[0] for f in repo.store.datafiles()))
488
484
489 def any_(gen):
485 def any_(gen):
490 for x in gen:
486 for x in gen:
491 if x:
487 if x:
492 return True
488 return True
493 return False
489 return False
494
490
495 class storeprotonotcapable(BaseException):
491 class storeprotonotcapable(BaseException):
496 def __init__(self, storetypes):
492 def __init__(self, storetypes):
497 self.storetypes = storetypes
493 self.storetypes = storetypes
General Comments 0
You need to be logged in to leave comments. Login now