##// END OF EJS Templates
changegroup: move all compressions utilities in util...
Pierre-Yves David -
r26266:1e042e31 default
parent child Browse files
Show More
@@ -7,12 +7,10 b''
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import bz2
11 import os
10 import os
12 import struct
11 import struct
13 import tempfile
12 import tempfile
14 import weakref
13 import weakref
15 import zlib
16
14
17 from .i18n import _
15 from .i18n import _
18 from .node import (
16 from .node import (
@@ -81,20 +79,14 b' def combineresults(results):'
81 result = -1 + changedheads
79 result = -1 + changedheads
82 return result
80 return result
83
81
84 class nocompress(object):
85 def compress(self, x):
86 return x
87 def flush(self):
88 return ""
89
90 bundletypes = {
82 bundletypes = {
91 "": ("", nocompress), # only when using unbundle on ssh and old http servers
83 "": ("", 'UN'), # only when using unbundle on ssh and old http servers
92 # since the unification ssh accepts a header but there
84 # since the unification ssh accepts a header but there
93 # is no capability signaling it.
85 # is no capability signaling it.
94 "HG20": (), # special-cased below
86 "HG20": (), # special-cased below
95 "HG10UN": ("HG10UN", nocompress),
87 "HG10UN": ("HG10UN", 'UN'),
96 "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()),
88 "HG10BZ": ("HG10", 'BZ'),
97 "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()),
89 "HG10GZ": ("HG10GZ", 'GZ'),
98 }
90 }
99
91
100 # hgweb uses this list to communicate its preferred type
92 # hgweb uses this list to communicate its preferred type
@@ -127,15 +119,18 b' def writebundle(ui, cg, filename, bundle'
127 bundle = bundle2.bundle20(ui)
119 bundle = bundle2.bundle20(ui)
128 part = bundle.newpart('changegroup', data=cg.getchunks())
120 part = bundle.newpart('changegroup', data=cg.getchunks())
129 part.addparam('version', cg.version)
121 part.addparam('version', cg.version)
130 z = nocompress()
122 z = util.compressors['UN']()
131 chunkiter = bundle.getchunks()
123 chunkiter = bundle.getchunks()
132 else:
124 else:
133 if cg.version != '01':
125 if cg.version != '01':
134 raise util.Abort(_('old bundle types only supports v1 '
126 raise util.Abort(_('old bundle types only supports v1 '
135 'changegroups'))
127 'changegroups'))
136 header, compressor = bundletypes[bundletype]
128 header, comp = bundletypes[bundletype]
137 fh.write(header)
129 fh.write(header)
138 z = compressor()
130 if comp not in util.compressors:
131 raise util.Abort(_('unknown stream compression type: %s')
132 % comp)
133 z = util.compressors[comp]()
139 chunkiter = cg.getchunks()
134 chunkiter = cg.getchunks()
140
135
141 # parse the changegroup data, otherwise we will block
136 # parse the changegroup data, otherwise we will block
@@ -158,30 +153,15 b' def writebundle(ui, cg, filename, bundle'
158 else:
153 else:
159 os.unlink(cleanup)
154 os.unlink(cleanup)
160
155
161 def decompressor(fh, alg):
162 if alg == 'UN':
163 return fh
164 elif alg == 'GZ':
165 def generator(f):
166 zd = zlib.decompressobj()
167 for chunk in util.filechunkiter(f):
168 yield zd.decompress(chunk)
169 elif alg == 'BZ':
170 def generator(f):
171 zd = bz2.BZ2Decompressor()
172 zd.decompress("BZ")
173 for chunk in util.filechunkiter(f, 4096):
174 yield zd.decompress(chunk)
175 else:
176 raise util.Abort("unknown bundle compression '%s'" % alg)
177 return util.chunkbuffer(generator(fh))
178
179 class cg1unpacker(object):
156 class cg1unpacker(object):
180 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
157 deltaheader = _CHANGEGROUPV1_DELTA_HEADER
181 deltaheadersize = struct.calcsize(deltaheader)
158 deltaheadersize = struct.calcsize(deltaheader)
182 version = '01'
159 version = '01'
183 def __init__(self, fh, alg):
160 def __init__(self, fh, alg):
184 self._stream = decompressor(fh, alg)
161 if not alg in util.decompressors:
162 raise util.Abort(_('unknown stream compression type: %s')
163 % alg)
164 self._stream = util.decompressors[alg](fh)
185 self._type = alg
165 self._type = alg
186 self.callback = None
166 self.callback = None
187 def compressed(self):
167 def compressed(self):
@@ -21,6 +21,8 b' import re as remod'
21 import os, time, datetime, calendar, textwrap, signal, collections
21 import os, time, datetime, calendar, textwrap, signal, collections
22 import imp, socket, urllib
22 import imp, socket, urllib
23 import gc
23 import gc
24 import bz2
25 import zlib
24
26
25 if os.name == 'nt':
27 if os.name == 'nt':
26 import windows as platform
28 import windows as platform
@@ -2338,5 +2340,41 b' def finddirs(path):'
2338 yield path[:pos]
2340 yield path[:pos]
2339 pos = path.rfind('/', 0, pos)
2341 pos = path.rfind('/', 0, pos)
2340
2342
2343 # compression utility
2344
2345 class nocompress(object):
2346 def compress(self, x):
2347 return x
2348 def flush(self):
2349 return ""
2350
2351 compressors = {
2352 'UN': nocompress,
2353 # lambda to prevent early import
2354 'BZ': lambda: bz2.BZ2Compressor(),
2355 'GZ': lambda: zlib.compressobj(),
2356 }
2357
2358 def _makedecompressor(decompcls):
2359 def generator(f):
2360 d = decompcls()
2361 for chunk in filechunkiter(f):
2362 yield d.decompress(chunk)
2363 def func(fh):
2364 return chunkbuffer(generator(fh))
2365 return func
2366
2367 def _bz2():
2368 d = bz2.BZ2Decompressor()
2369 # Bzip2 stream start with BZ, but we stripped it.
2370 # we put it back for good measure.
2371 d.decompress('BZ')
2372 return d
2373
2374 decompressors = {'UN': lambda fh: fh,
2375 'BZ': _makedecompressor(_bz2),
2376 'GZ': _makedecompressor(lambda: zlib.decompressobj()),
2377 }
2378
2341 # convenient shortcut
2379 # convenient shortcut
2342 dst = debugstacktrace
2380 dst = debugstacktrace
General Comments 0
You need to be logged in to leave comments. Login now