##// END OF EJS Templates
shallowutil: fsdecode the bytes group name before passing to os...
Augie Fackler -
r41294:a6b98c95 default
parent child Browse files
Show More
@@ -1,492 +1,492 b''
1 # shallowutil.py -- remotefilelog utilities
1 # shallowutil.py -- remotefilelog utilities
2 #
2 #
3 # Copyright 2014 Facebook, Inc.
3 # Copyright 2014 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import collections
9 import collections
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import os
12 import os
13 import stat
13 import stat
14 import struct
14 import struct
15 import tempfile
15 import tempfile
16
16
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial import (
18 from mercurial import (
19 error,
19 error,
20 node,
20 node,
21 pycompat,
21 pycompat,
22 revlog,
22 revlog,
23 util,
23 util,
24 )
24 )
25 from mercurial.utils import (
25 from mercurial.utils import (
26 storageutil,
26 storageutil,
27 stringutil,
27 stringutil,
28 )
28 )
29 from . import constants
29 from . import constants
30
30
31 if not pycompat.iswindows:
31 if not pycompat.iswindows:
32 import grp
32 import grp
33
33
34 def isenabled(repo):
34 def isenabled(repo):
35 """returns whether the repository is remotefilelog enabled or not"""
35 """returns whether the repository is remotefilelog enabled or not"""
36 return constants.SHALLOWREPO_REQUIREMENT in repo.requirements
36 return constants.SHALLOWREPO_REQUIREMENT in repo.requirements
37
37
38 def getcachekey(reponame, file, id):
38 def getcachekey(reponame, file, id):
39 pathhash = node.hex(hashlib.sha1(file).digest())
39 pathhash = node.hex(hashlib.sha1(file).digest())
40 return os.path.join(reponame, pathhash[:2], pathhash[2:], id)
40 return os.path.join(reponame, pathhash[:2], pathhash[2:], id)
41
41
42 def getlocalkey(file, id):
42 def getlocalkey(file, id):
43 pathhash = node.hex(hashlib.sha1(file).digest())
43 pathhash = node.hex(hashlib.sha1(file).digest())
44 return os.path.join(pathhash, id)
44 return os.path.join(pathhash, id)
45
45
46 def getcachepath(ui, allowempty=False):
46 def getcachepath(ui, allowempty=False):
47 cachepath = ui.config("remotefilelog", "cachepath")
47 cachepath = ui.config("remotefilelog", "cachepath")
48 if not cachepath:
48 if not cachepath:
49 if allowempty:
49 if allowempty:
50 return None
50 return None
51 else:
51 else:
52 raise error.Abort(_("could not find config option "
52 raise error.Abort(_("could not find config option "
53 "remotefilelog.cachepath"))
53 "remotefilelog.cachepath"))
54 return util.expandpath(cachepath)
54 return util.expandpath(cachepath)
55
55
56 def getcachepackpath(repo, category):
56 def getcachepackpath(repo, category):
57 cachepath = getcachepath(repo.ui)
57 cachepath = getcachepath(repo.ui)
58 if category != constants.FILEPACK_CATEGORY:
58 if category != constants.FILEPACK_CATEGORY:
59 return os.path.join(cachepath, repo.name, 'packs', category)
59 return os.path.join(cachepath, repo.name, 'packs', category)
60 else:
60 else:
61 return os.path.join(cachepath, repo.name, 'packs')
61 return os.path.join(cachepath, repo.name, 'packs')
62
62
63 def getlocalpackpath(base, category):
63 def getlocalpackpath(base, category):
64 return os.path.join(base, 'packs', category)
64 return os.path.join(base, 'packs', category)
65
65
66 def createrevlogtext(text, copyfrom=None, copyrev=None):
66 def createrevlogtext(text, copyfrom=None, copyrev=None):
67 """returns a string that matches the revlog contents in a
67 """returns a string that matches the revlog contents in a
68 traditional revlog
68 traditional revlog
69 """
69 """
70 meta = {}
70 meta = {}
71 if copyfrom or text.startswith('\1\n'):
71 if copyfrom or text.startswith('\1\n'):
72 if copyfrom:
72 if copyfrom:
73 meta['copy'] = copyfrom
73 meta['copy'] = copyfrom
74 meta['copyrev'] = copyrev
74 meta['copyrev'] = copyrev
75 text = storageutil.packmeta(meta, text)
75 text = storageutil.packmeta(meta, text)
76
76
77 return text
77 return text
78
78
79 def parsemeta(text):
79 def parsemeta(text):
80 """parse mercurial filelog metadata"""
80 """parse mercurial filelog metadata"""
81 meta, size = storageutil.parsemeta(text)
81 meta, size = storageutil.parsemeta(text)
82 if text.startswith('\1\n'):
82 if text.startswith('\1\n'):
83 s = text.index('\1\n', 2)
83 s = text.index('\1\n', 2)
84 text = text[s + 2:]
84 text = text[s + 2:]
85 return meta or {}, text
85 return meta or {}, text
86
86
87 def sumdicts(*dicts):
87 def sumdicts(*dicts):
88 """Adds all the values of *dicts together into one dictionary. This assumes
88 """Adds all the values of *dicts together into one dictionary. This assumes
89 the values in *dicts are all summable.
89 the values in *dicts are all summable.
90
90
91 e.g. [{'a': 4', 'b': 2}, {'b': 3, 'c': 1}] -> {'a': 4, 'b': 5, 'c': 1}
91 e.g. [{'a': 4', 'b': 2}, {'b': 3, 'c': 1}] -> {'a': 4, 'b': 5, 'c': 1}
92 """
92 """
93 result = collections.defaultdict(lambda: 0)
93 result = collections.defaultdict(lambda: 0)
94 for dict in dicts:
94 for dict in dicts:
95 for k, v in dict.iteritems():
95 for k, v in dict.iteritems():
96 result[k] += v
96 result[k] += v
97 return result
97 return result
98
98
99 def prefixkeys(dict, prefix):
99 def prefixkeys(dict, prefix):
100 """Returns ``dict`` with ``prefix`` prepended to all its keys."""
100 """Returns ``dict`` with ``prefix`` prepended to all its keys."""
101 result = {}
101 result = {}
102 for k, v in dict.iteritems():
102 for k, v in dict.iteritems():
103 result[prefix + k] = v
103 result[prefix + k] = v
104 return result
104 return result
105
105
106 def reportpackmetrics(ui, prefix, *stores):
106 def reportpackmetrics(ui, prefix, *stores):
107 dicts = [s.getmetrics() for s in stores]
107 dicts = [s.getmetrics() for s in stores]
108 dict = prefixkeys(sumdicts(*dicts), prefix + '_')
108 dict = prefixkeys(sumdicts(*dicts), prefix + '_')
109 ui.log(prefix + "_packsizes", "\n", **pycompat.strkwargs(dict))
109 ui.log(prefix + "_packsizes", "\n", **pycompat.strkwargs(dict))
110
110
111 def _parsepackmeta(metabuf):
111 def _parsepackmeta(metabuf):
112 """parse datapack meta, bytes (<metadata-list>) -> dict
112 """parse datapack meta, bytes (<metadata-list>) -> dict
113
113
114 The dict contains raw content - both keys and values are strings.
114 The dict contains raw content - both keys and values are strings.
115 Upper-level business may want to convert some of them to other types like
115 Upper-level business may want to convert some of them to other types like
116 integers, on their own.
116 integers, on their own.
117
117
118 raise ValueError if the data is corrupted
118 raise ValueError if the data is corrupted
119 """
119 """
120 metadict = {}
120 metadict = {}
121 offset = 0
121 offset = 0
122 buflen = len(metabuf)
122 buflen = len(metabuf)
123 while buflen - offset >= 3:
123 while buflen - offset >= 3:
124 key = metabuf[offset:offset + 1]
124 key = metabuf[offset:offset + 1]
125 offset += 1
125 offset += 1
126 metalen = struct.unpack_from('!H', metabuf, offset)[0]
126 metalen = struct.unpack_from('!H', metabuf, offset)[0]
127 offset += 2
127 offset += 2
128 if offset + metalen > buflen:
128 if offset + metalen > buflen:
129 raise ValueError('corrupted metadata: incomplete buffer')
129 raise ValueError('corrupted metadata: incomplete buffer')
130 value = metabuf[offset:offset + metalen]
130 value = metabuf[offset:offset + metalen]
131 metadict[key] = value
131 metadict[key] = value
132 offset += metalen
132 offset += metalen
133 if offset != buflen:
133 if offset != buflen:
134 raise ValueError('corrupted metadata: redundant data')
134 raise ValueError('corrupted metadata: redundant data')
135 return metadict
135 return metadict
136
136
137 def _buildpackmeta(metadict):
137 def _buildpackmeta(metadict):
138 """reverse of _parsepackmeta, dict -> bytes (<metadata-list>)
138 """reverse of _parsepackmeta, dict -> bytes (<metadata-list>)
139
139
140 The dict contains raw content - both keys and values are strings.
140 The dict contains raw content - both keys and values are strings.
141 Upper-level business may want to serialize some of other types (like
141 Upper-level business may want to serialize some of other types (like
142 integers) to strings before calling this function.
142 integers) to strings before calling this function.
143
143
144 raise ProgrammingError when metadata key is illegal, or ValueError if
144 raise ProgrammingError when metadata key is illegal, or ValueError if
145 length limit is exceeded
145 length limit is exceeded
146 """
146 """
147 metabuf = ''
147 metabuf = ''
148 for k, v in sorted((metadict or {}).iteritems()):
148 for k, v in sorted((metadict or {}).iteritems()):
149 if len(k) != 1:
149 if len(k) != 1:
150 raise error.ProgrammingError('packmeta: illegal key: %s' % k)
150 raise error.ProgrammingError('packmeta: illegal key: %s' % k)
151 if len(v) > 0xfffe:
151 if len(v) > 0xfffe:
152 raise ValueError('metadata value is too long: 0x%x > 0xfffe'
152 raise ValueError('metadata value is too long: 0x%x > 0xfffe'
153 % len(v))
153 % len(v))
154 metabuf += k
154 metabuf += k
155 metabuf += struct.pack('!H', len(v))
155 metabuf += struct.pack('!H', len(v))
156 metabuf += v
156 metabuf += v
157 # len(metabuf) is guaranteed representable in 4 bytes, because there are
157 # len(metabuf) is guaranteed representable in 4 bytes, because there are
158 # only 256 keys, and for each value, len(value) <= 0xfffe.
158 # only 256 keys, and for each value, len(value) <= 0xfffe.
159 return metabuf
159 return metabuf
160
160
161 _metaitemtypes = {
161 _metaitemtypes = {
162 constants.METAKEYFLAG: (int, pycompat.long),
162 constants.METAKEYFLAG: (int, pycompat.long),
163 constants.METAKEYSIZE: (int, pycompat.long),
163 constants.METAKEYSIZE: (int, pycompat.long),
164 }
164 }
165
165
166 def buildpackmeta(metadict):
166 def buildpackmeta(metadict):
167 """like _buildpackmeta, but typechecks metadict and normalize it.
167 """like _buildpackmeta, but typechecks metadict and normalize it.
168
168
169 This means, METAKEYSIZE and METAKEYSIZE should have integers as values,
169 This means, METAKEYSIZE and METAKEYSIZE should have integers as values,
170 and METAKEYFLAG will be dropped if its value is 0.
170 and METAKEYFLAG will be dropped if its value is 0.
171 """
171 """
172 newmeta = {}
172 newmeta = {}
173 for k, v in (metadict or {}).iteritems():
173 for k, v in (metadict or {}).iteritems():
174 expectedtype = _metaitemtypes.get(k, (bytes,))
174 expectedtype = _metaitemtypes.get(k, (bytes,))
175 if not isinstance(v, expectedtype):
175 if not isinstance(v, expectedtype):
176 raise error.ProgrammingError('packmeta: wrong type of key %s' % k)
176 raise error.ProgrammingError('packmeta: wrong type of key %s' % k)
177 # normalize int to binary buffer
177 # normalize int to binary buffer
178 if int in expectedtype:
178 if int in expectedtype:
179 # optimization: remove flag if it's 0 to save space
179 # optimization: remove flag if it's 0 to save space
180 if k == constants.METAKEYFLAG and v == 0:
180 if k == constants.METAKEYFLAG and v == 0:
181 continue
181 continue
182 v = int2bin(v)
182 v = int2bin(v)
183 newmeta[k] = v
183 newmeta[k] = v
184 return _buildpackmeta(newmeta)
184 return _buildpackmeta(newmeta)
185
185
186 def parsepackmeta(metabuf):
186 def parsepackmeta(metabuf):
187 """like _parsepackmeta, but convert fields to desired types automatically.
187 """like _parsepackmeta, but convert fields to desired types automatically.
188
188
189 This means, METAKEYFLAG and METAKEYSIZE fields will be converted to
189 This means, METAKEYFLAG and METAKEYSIZE fields will be converted to
190 integers.
190 integers.
191 """
191 """
192 metadict = _parsepackmeta(metabuf)
192 metadict = _parsepackmeta(metabuf)
193 for k, v in metadict.iteritems():
193 for k, v in metadict.iteritems():
194 if k in _metaitemtypes and int in _metaitemtypes[k]:
194 if k in _metaitemtypes and int in _metaitemtypes[k]:
195 metadict[k] = bin2int(v)
195 metadict[k] = bin2int(v)
196 return metadict
196 return metadict
197
197
198 def int2bin(n):
198 def int2bin(n):
199 """convert a non-negative integer to raw binary buffer"""
199 """convert a non-negative integer to raw binary buffer"""
200 buf = bytearray()
200 buf = bytearray()
201 while n > 0:
201 while n > 0:
202 buf.insert(0, n & 0xff)
202 buf.insert(0, n & 0xff)
203 n >>= 8
203 n >>= 8
204 return bytes(buf)
204 return bytes(buf)
205
205
206 def bin2int(buf):
206 def bin2int(buf):
207 """the reverse of int2bin, convert a binary buffer to an integer"""
207 """the reverse of int2bin, convert a binary buffer to an integer"""
208 x = 0
208 x = 0
209 for b in bytearray(buf):
209 for b in bytearray(buf):
210 x <<= 8
210 x <<= 8
211 x |= b
211 x |= b
212 return x
212 return x
213
213
214 def parsesizeflags(raw):
214 def parsesizeflags(raw):
215 """given a remotefilelog blob, return (headersize, rawtextsize, flags)
215 """given a remotefilelog blob, return (headersize, rawtextsize, flags)
216
216
217 see remotefilelogserver.createfileblob for the format.
217 see remotefilelogserver.createfileblob for the format.
218 raise RuntimeError if the content is illformed.
218 raise RuntimeError if the content is illformed.
219 """
219 """
220 flags = revlog.REVIDX_DEFAULT_FLAGS
220 flags = revlog.REVIDX_DEFAULT_FLAGS
221 size = None
221 size = None
222 try:
222 try:
223 index = raw.index('\0')
223 index = raw.index('\0')
224 header = raw[:index]
224 header = raw[:index]
225 if header.startswith('v'):
225 if header.startswith('v'):
226 # v1 and above, header starts with 'v'
226 # v1 and above, header starts with 'v'
227 if header.startswith('v1\n'):
227 if header.startswith('v1\n'):
228 for s in header.split('\n'):
228 for s in header.split('\n'):
229 if s.startswith(constants.METAKEYSIZE):
229 if s.startswith(constants.METAKEYSIZE):
230 size = int(s[len(constants.METAKEYSIZE):])
230 size = int(s[len(constants.METAKEYSIZE):])
231 elif s.startswith(constants.METAKEYFLAG):
231 elif s.startswith(constants.METAKEYFLAG):
232 flags = int(s[len(constants.METAKEYFLAG):])
232 flags = int(s[len(constants.METAKEYFLAG):])
233 else:
233 else:
234 raise RuntimeError('unsupported remotefilelog header: %s'
234 raise RuntimeError('unsupported remotefilelog header: %s'
235 % header)
235 % header)
236 else:
236 else:
237 # v0, str(int(size)) is the header
237 # v0, str(int(size)) is the header
238 size = int(header)
238 size = int(header)
239 except ValueError:
239 except ValueError:
240 raise RuntimeError("unexpected remotefilelog header: illegal format")
240 raise RuntimeError("unexpected remotefilelog header: illegal format")
241 if size is None:
241 if size is None:
242 raise RuntimeError("unexpected remotefilelog header: no size found")
242 raise RuntimeError("unexpected remotefilelog header: no size found")
243 return index + 1, size, flags
243 return index + 1, size, flags
244
244
245 def buildfileblobheader(size, flags, version=None):
245 def buildfileblobheader(size, flags, version=None):
246 """return the header of a remotefilelog blob.
246 """return the header of a remotefilelog blob.
247
247
248 see remotefilelogserver.createfileblob for the format.
248 see remotefilelogserver.createfileblob for the format.
249 approximately the reverse of parsesizeflags.
249 approximately the reverse of parsesizeflags.
250
250
251 version could be 0 or 1, or None (auto decide).
251 version could be 0 or 1, or None (auto decide).
252 """
252 """
253 # choose v0 if flags is empty, otherwise v1
253 # choose v0 if flags is empty, otherwise v1
254 if version is None:
254 if version is None:
255 version = int(bool(flags))
255 version = int(bool(flags))
256 if version == 1:
256 if version == 1:
257 header = ('v1\n%s%d\n%s%d'
257 header = ('v1\n%s%d\n%s%d'
258 % (constants.METAKEYSIZE, size,
258 % (constants.METAKEYSIZE, size,
259 constants.METAKEYFLAG, flags))
259 constants.METAKEYFLAG, flags))
260 elif version == 0:
260 elif version == 0:
261 if flags:
261 if flags:
262 raise error.ProgrammingError('fileblob v0 does not support flag')
262 raise error.ProgrammingError('fileblob v0 does not support flag')
263 header = '%d' % size
263 header = '%d' % size
264 else:
264 else:
265 raise error.ProgrammingError('unknown fileblob version %d' % version)
265 raise error.ProgrammingError('unknown fileblob version %d' % version)
266 return header
266 return header
267
267
268 def ancestormap(raw):
268 def ancestormap(raw):
269 offset, size, flags = parsesizeflags(raw)
269 offset, size, flags = parsesizeflags(raw)
270 start = offset + size
270 start = offset + size
271
271
272 mapping = {}
272 mapping = {}
273 while start < len(raw):
273 while start < len(raw):
274 divider = raw.index('\0', start + 80)
274 divider = raw.index('\0', start + 80)
275
275
276 currentnode = raw[start:(start + 20)]
276 currentnode = raw[start:(start + 20)]
277 p1 = raw[(start + 20):(start + 40)]
277 p1 = raw[(start + 20):(start + 40)]
278 p2 = raw[(start + 40):(start + 60)]
278 p2 = raw[(start + 40):(start + 60)]
279 linknode = raw[(start + 60):(start + 80)]
279 linknode = raw[(start + 60):(start + 80)]
280 copyfrom = raw[(start + 80):divider]
280 copyfrom = raw[(start + 80):divider]
281
281
282 mapping[currentnode] = (p1, p2, linknode, copyfrom)
282 mapping[currentnode] = (p1, p2, linknode, copyfrom)
283 start = divider + 1
283 start = divider + 1
284
284
285 return mapping
285 return mapping
286
286
287 def readfile(path):
287 def readfile(path):
288 f = open(path, 'rb')
288 f = open(path, 'rb')
289 try:
289 try:
290 result = f.read()
290 result = f.read()
291
291
292 # we should never have empty files
292 # we should never have empty files
293 if not result:
293 if not result:
294 os.remove(path)
294 os.remove(path)
295 raise IOError("empty file: %s" % path)
295 raise IOError("empty file: %s" % path)
296
296
297 return result
297 return result
298 finally:
298 finally:
299 f.close()
299 f.close()
300
300
301 def unlinkfile(filepath):
301 def unlinkfile(filepath):
302 if pycompat.iswindows:
302 if pycompat.iswindows:
303 # On Windows, os.unlink cannnot delete readonly files
303 # On Windows, os.unlink cannnot delete readonly files
304 os.chmod(filepath, stat.S_IWUSR)
304 os.chmod(filepath, stat.S_IWUSR)
305 os.unlink(filepath)
305 os.unlink(filepath)
306
306
307 def renamefile(source, destination):
307 def renamefile(source, destination):
308 if pycompat.iswindows:
308 if pycompat.iswindows:
309 # On Windows, os.rename cannot rename readonly files
309 # On Windows, os.rename cannot rename readonly files
310 # and cannot overwrite destination if it exists
310 # and cannot overwrite destination if it exists
311 os.chmod(source, stat.S_IWUSR)
311 os.chmod(source, stat.S_IWUSR)
312 if os.path.isfile(destination):
312 if os.path.isfile(destination):
313 os.chmod(destination, stat.S_IWUSR)
313 os.chmod(destination, stat.S_IWUSR)
314 os.unlink(destination)
314 os.unlink(destination)
315
315
316 os.rename(source, destination)
316 os.rename(source, destination)
317
317
318 def writefile(path, content, readonly=False):
318 def writefile(path, content, readonly=False):
319 dirname, filename = os.path.split(path)
319 dirname, filename = os.path.split(path)
320 if not os.path.exists(dirname):
320 if not os.path.exists(dirname):
321 try:
321 try:
322 os.makedirs(dirname)
322 os.makedirs(dirname)
323 except OSError as ex:
323 except OSError as ex:
324 if ex.errno != errno.EEXIST:
324 if ex.errno != errno.EEXIST:
325 raise
325 raise
326
326
327 fd, temp = tempfile.mkstemp(prefix='.%s-' % filename, dir=dirname)
327 fd, temp = tempfile.mkstemp(prefix='.%s-' % filename, dir=dirname)
328 os.close(fd)
328 os.close(fd)
329
329
330 try:
330 try:
331 f = util.posixfile(temp, 'wb')
331 f = util.posixfile(temp, 'wb')
332 f.write(content)
332 f.write(content)
333 f.close()
333 f.close()
334
334
335 if readonly:
335 if readonly:
336 mode = 0o444
336 mode = 0o444
337 else:
337 else:
338 # tempfiles are created with 0o600, so we need to manually set the
338 # tempfiles are created with 0o600, so we need to manually set the
339 # mode.
339 # mode.
340 oldumask = os.umask(0)
340 oldumask = os.umask(0)
341 # there's no way to get the umask without modifying it, so set it
341 # there's no way to get the umask without modifying it, so set it
342 # back
342 # back
343 os.umask(oldumask)
343 os.umask(oldumask)
344 mode = ~oldumask
344 mode = ~oldumask
345
345
346 renamefile(temp, path)
346 renamefile(temp, path)
347 os.chmod(path, mode)
347 os.chmod(path, mode)
348 except Exception:
348 except Exception:
349 try:
349 try:
350 unlinkfile(temp)
350 unlinkfile(temp)
351 except OSError:
351 except OSError:
352 pass
352 pass
353 raise
353 raise
354
354
355 def sortnodes(nodes, parentfunc):
355 def sortnodes(nodes, parentfunc):
356 """Topologically sorts the nodes, using the parentfunc to find
356 """Topologically sorts the nodes, using the parentfunc to find
357 the parents of nodes."""
357 the parents of nodes."""
358 nodes = set(nodes)
358 nodes = set(nodes)
359 childmap = {}
359 childmap = {}
360 parentmap = {}
360 parentmap = {}
361 roots = []
361 roots = []
362
362
363 # Build a child and parent map
363 # Build a child and parent map
364 for n in nodes:
364 for n in nodes:
365 parents = [p for p in parentfunc(n) if p in nodes]
365 parents = [p for p in parentfunc(n) if p in nodes]
366 parentmap[n] = set(parents)
366 parentmap[n] = set(parents)
367 for p in parents:
367 for p in parents:
368 childmap.setdefault(p, set()).add(n)
368 childmap.setdefault(p, set()).add(n)
369 if not parents:
369 if not parents:
370 roots.append(n)
370 roots.append(n)
371
371
372 roots.sort()
372 roots.sort()
373 # Process roots, adding children to the queue as they become roots
373 # Process roots, adding children to the queue as they become roots
374 results = []
374 results = []
375 while roots:
375 while roots:
376 n = roots.pop(0)
376 n = roots.pop(0)
377 results.append(n)
377 results.append(n)
378 if n in childmap:
378 if n in childmap:
379 children = childmap[n]
379 children = childmap[n]
380 for c in children:
380 for c in children:
381 childparents = parentmap[c]
381 childparents = parentmap[c]
382 childparents.remove(n)
382 childparents.remove(n)
383 if len(childparents) == 0:
383 if len(childparents) == 0:
384 # insert at the beginning, that way child nodes
384 # insert at the beginning, that way child nodes
385 # are likely to be output immediately after their
385 # are likely to be output immediately after their
386 # parents. This gives better compression results.
386 # parents. This gives better compression results.
387 roots.insert(0, c)
387 roots.insert(0, c)
388
388
389 return results
389 return results
390
390
391 def readexactly(stream, n):
391 def readexactly(stream, n):
392 '''read n bytes from stream.read and abort if less was available'''
392 '''read n bytes from stream.read and abort if less was available'''
393 s = stream.read(n)
393 s = stream.read(n)
394 if len(s) < n:
394 if len(s) < n:
395 raise error.Abort(_("stream ended unexpectedly"
395 raise error.Abort(_("stream ended unexpectedly"
396 " (got %d bytes, expected %d)")
396 " (got %d bytes, expected %d)")
397 % (len(s), n))
397 % (len(s), n))
398 return s
398 return s
399
399
400 def readunpack(stream, fmt):
400 def readunpack(stream, fmt):
401 data = readexactly(stream, struct.calcsize(fmt))
401 data = readexactly(stream, struct.calcsize(fmt))
402 return struct.unpack(fmt, data)
402 return struct.unpack(fmt, data)
403
403
404 def readpath(stream):
404 def readpath(stream):
405 rawlen = readexactly(stream, constants.FILENAMESIZE)
405 rawlen = readexactly(stream, constants.FILENAMESIZE)
406 pathlen = struct.unpack(constants.FILENAMESTRUCT, rawlen)[0]
406 pathlen = struct.unpack(constants.FILENAMESTRUCT, rawlen)[0]
407 return readexactly(stream, pathlen)
407 return readexactly(stream, pathlen)
408
408
409 def readnodelist(stream):
409 def readnodelist(stream):
410 rawlen = readexactly(stream, constants.NODECOUNTSIZE)
410 rawlen = readexactly(stream, constants.NODECOUNTSIZE)
411 nodecount = struct.unpack(constants.NODECOUNTSTRUCT, rawlen)[0]
411 nodecount = struct.unpack(constants.NODECOUNTSTRUCT, rawlen)[0]
412 for i in pycompat.xrange(nodecount):
412 for i in pycompat.xrange(nodecount):
413 yield readexactly(stream, constants.NODESIZE)
413 yield readexactly(stream, constants.NODESIZE)
414
414
415 def readpathlist(stream):
415 def readpathlist(stream):
416 rawlen = readexactly(stream, constants.PATHCOUNTSIZE)
416 rawlen = readexactly(stream, constants.PATHCOUNTSIZE)
417 pathcount = struct.unpack(constants.PATHCOUNTSTRUCT, rawlen)[0]
417 pathcount = struct.unpack(constants.PATHCOUNTSTRUCT, rawlen)[0]
418 for i in pycompat.xrange(pathcount):
418 for i in pycompat.xrange(pathcount):
419 yield readpath(stream)
419 yield readpath(stream)
420
420
421 def getgid(groupname):
421 def getgid(groupname):
422 try:
422 try:
423 gid = grp.getgrnam(groupname).gr_gid
423 gid = grp.getgrnam(pycompat.fsdecode(groupname)).gr_gid
424 return gid
424 return gid
425 except KeyError:
425 except KeyError:
426 return None
426 return None
427
427
428 def setstickygroupdir(path, gid, warn=None):
428 def setstickygroupdir(path, gid, warn=None):
429 if gid is None:
429 if gid is None:
430 return
430 return
431 try:
431 try:
432 os.chown(path, -1, gid)
432 os.chown(path, -1, gid)
433 os.chmod(path, 0o2775)
433 os.chmod(path, 0o2775)
434 except (IOError, OSError) as ex:
434 except (IOError, OSError) as ex:
435 if warn:
435 if warn:
436 warn(_('unable to chown/chmod on %s: %s\n') % (path, ex))
436 warn(_('unable to chown/chmod on %s: %s\n') % (path, ex))
437
437
438 def mkstickygroupdir(ui, path):
438 def mkstickygroupdir(ui, path):
439 """Creates the given directory (if it doesn't exist) and give it a
439 """Creates the given directory (if it doesn't exist) and give it a
440 particular group with setgid enabled."""
440 particular group with setgid enabled."""
441 gid = None
441 gid = None
442 groupname = ui.config("remotefilelog", "cachegroup")
442 groupname = ui.config("remotefilelog", "cachegroup")
443 if groupname:
443 if groupname:
444 gid = getgid(groupname)
444 gid = getgid(groupname)
445 if gid is None:
445 if gid is None:
446 ui.warn(_('unable to resolve group name: %s\n') % groupname)
446 ui.warn(_('unable to resolve group name: %s\n') % groupname)
447
447
448 # we use a single stat syscall to test the existence and mode / group bit
448 # we use a single stat syscall to test the existence and mode / group bit
449 st = None
449 st = None
450 try:
450 try:
451 st = os.stat(path)
451 st = os.stat(path)
452 except OSError:
452 except OSError:
453 pass
453 pass
454
454
455 if st:
455 if st:
456 # exists
456 # exists
457 if (st.st_mode & 0o2775) != 0o2775 or st.st_gid != gid:
457 if (st.st_mode & 0o2775) != 0o2775 or st.st_gid != gid:
458 # permission needs to be fixed
458 # permission needs to be fixed
459 setstickygroupdir(path, gid, ui.warn)
459 setstickygroupdir(path, gid, ui.warn)
460 return
460 return
461
461
462 oldumask = os.umask(0o002)
462 oldumask = os.umask(0o002)
463 try:
463 try:
464 missingdirs = [path]
464 missingdirs = [path]
465 path = os.path.dirname(path)
465 path = os.path.dirname(path)
466 while path and not os.path.exists(path):
466 while path and not os.path.exists(path):
467 missingdirs.append(path)
467 missingdirs.append(path)
468 path = os.path.dirname(path)
468 path = os.path.dirname(path)
469
469
470 for path in reversed(missingdirs):
470 for path in reversed(missingdirs):
471 try:
471 try:
472 os.mkdir(path)
472 os.mkdir(path)
473 except OSError as ex:
473 except OSError as ex:
474 if ex.errno != errno.EEXIST:
474 if ex.errno != errno.EEXIST:
475 raise
475 raise
476
476
477 for path in missingdirs:
477 for path in missingdirs:
478 setstickygroupdir(path, gid, ui.warn)
478 setstickygroupdir(path, gid, ui.warn)
479 finally:
479 finally:
480 os.umask(oldumask)
480 os.umask(oldumask)
481
481
482 def getusername(ui):
482 def getusername(ui):
483 try:
483 try:
484 return stringutil.shortuser(ui.username())
484 return stringutil.shortuser(ui.username())
485 except Exception:
485 except Exception:
486 return 'unknown'
486 return 'unknown'
487
487
488 def getreponame(ui):
488 def getreponame(ui):
489 reponame = ui.config('paths', 'default')
489 reponame = ui.config('paths', 'default')
490 if reponame:
490 if reponame:
491 return os.path.basename(reponame)
491 return os.path.basename(reponame)
492 return "unknown"
492 return "unknown"
General Comments 0
You need to be logged in to leave comments. Login now