Show More
@@ -58,7 +58,7 b' def aslist(obj, sep=None, strip=True):' | |||||
58 | :param sep: |
|
58 | :param sep: | |
59 | :param strip: |
|
59 | :param strip: | |
60 | """ |
|
60 | """ | |
61 |
if isinstance(obj, |
|
61 | if isinstance(obj, str): | |
62 | if obj in ['', ""]: |
|
62 | if obj in ['', ""]: | |
63 | return [] |
|
63 | return [] | |
64 |
|
64 |
@@ -54,19 +54,19 b' class TestLFSApplication(object):' | |||||
54 | def test_app_deprecated_endpoint(self, git_lfs_app): |
|
54 | def test_app_deprecated_endpoint(self, git_lfs_app): | |
55 | response = git_lfs_app.post('/repo/info/lfs/objects', status=501) |
|
55 | response = git_lfs_app.post('/repo/info/lfs/objects', status=501) | |
56 | assert response.status_code == 501 |
|
56 | assert response.status_code == 501 | |
57 |
assert json.loads(response.text) == { |
|
57 | assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'} | |
58 |
|
58 | |||
59 | def test_app_lock_verify_api_not_available(self, git_lfs_app): |
|
59 | def test_app_lock_verify_api_not_available(self, git_lfs_app): | |
60 | response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501) |
|
60 | response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501) | |
61 | assert response.status_code == 501 |
|
61 | assert response.status_code == 501 | |
62 | assert json.loads(response.text) == { |
|
62 | assert json.loads(response.text) == { | |
63 |
|
|
63 | 'message': 'GIT LFS locking api not supported'} | |
64 |
|
64 | |||
65 | def test_app_lock_api_not_available(self, git_lfs_app): |
|
65 | def test_app_lock_api_not_available(self, git_lfs_app): | |
66 | response = git_lfs_app.post('/repo/info/lfs/locks', status=501) |
|
66 | response = git_lfs_app.post('/repo/info/lfs/locks', status=501) | |
67 | assert response.status_code == 501 |
|
67 | assert response.status_code == 501 | |
68 | assert json.loads(response.text) == { |
|
68 | assert json.loads(response.text) == { | |
69 |
|
|
69 | 'message': 'GIT LFS locking api not supported'} | |
70 |
|
70 | |||
71 | def test_app_batch_api_missing_auth(self, git_lfs_app): |
|
71 | def test_app_batch_api_missing_auth(self, git_lfs_app): | |
72 | git_lfs_app.post_json( |
|
72 | git_lfs_app.post_json( | |
@@ -77,14 +77,14 b' class TestLFSApplication(object):' | |||||
77 | '/repo/info/lfs/objects/batch', params={}, status=400, |
|
77 | '/repo/info/lfs/objects/batch', params={}, status=400, | |
78 | extra_environ=http_auth) |
|
78 | extra_environ=http_auth) | |
79 | assert json.loads(response.text) == { |
|
79 | assert json.loads(response.text) == { | |
80 |
|
|
80 | 'message': 'unsupported operation mode: `None`'} | |
81 |
|
81 | |||
82 | def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth): |
|
82 | def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth): | |
83 | response = git_lfs_app.post_json( |
|
83 | response = git_lfs_app.post_json( | |
84 | '/repo/info/lfs/objects/batch', params={'operation': 'download'}, |
|
84 | '/repo/info/lfs/objects/batch', params={'operation': 'download'}, | |
85 | status=400, extra_environ=http_auth) |
|
85 | status=400, extra_environ=http_auth) | |
86 | assert json.loads(response.text) == { |
|
86 | assert json.loads(response.text) == { | |
87 |
|
|
87 | 'message': 'missing objects data'} | |
88 |
|
88 | |||
89 | def test_app_batch_api_unsupported_data_in_objects( |
|
89 | def test_app_batch_api_unsupported_data_in_objects( | |
90 | self, git_lfs_app, http_auth): |
|
90 | self, git_lfs_app, http_auth): | |
@@ -94,7 +94,7 b' class TestLFSApplication(object):' | |||||
94 | '/repo/info/lfs/objects/batch', params=params, status=400, |
|
94 | '/repo/info/lfs/objects/batch', params=params, status=400, | |
95 | extra_environ=http_auth) |
|
95 | extra_environ=http_auth) | |
96 | assert json.loads(response.text) == { |
|
96 | assert json.loads(response.text) == { | |
97 |
|
|
97 | 'message': 'unsupported data in objects'} | |
98 |
|
98 | |||
99 | def test_app_batch_api_download_missing_object( |
|
99 | def test_app_batch_api_download_missing_object( | |
100 | self, git_lfs_app, http_auth): |
|
100 | self, git_lfs_app, http_auth): | |
@@ -105,12 +105,12 b' class TestLFSApplication(object):' | |||||
105 | extra_environ=http_auth) |
|
105 | extra_environ=http_auth) | |
106 |
|
106 | |||
107 | expected_objects = [ |
|
107 | expected_objects = [ | |
108 |
{ |
|
108 | {'authenticated': True, | |
109 |
|
|
109 | 'errors': {'error': { | |
110 |
|
|
110 | 'code': 404, | |
111 |
|
|
111 | 'message': 'object: 123 does not exist in store'}}, | |
112 |
|
|
112 | 'oid': '123', | |
113 |
|
|
113 | 'size': '1024'} | |
114 | ] |
|
114 | ] | |
115 | assert json.loads(response.text) == { |
|
115 | assert json.loads(response.text) == { | |
116 | 'objects': expected_objects, 'transfer': 'basic'} |
|
116 | 'objects': expected_objects, 'transfer': 'basic'} | |
@@ -130,14 +130,14 b' class TestLFSApplication(object):' | |||||
130 | extra_environ=http_auth) |
|
130 | extra_environ=http_auth) | |
131 |
|
131 | |||
132 | expected_objects = [ |
|
132 | expected_objects = [ | |
133 |
{ |
|
133 | {'authenticated': True, | |
134 |
|
|
134 | 'actions': { | |
135 |
|
|
135 | 'download': { | |
136 |
|
|
136 | 'header': {'Authorization': 'Basic XXXXX'}, | |
137 |
|
|
137 | 'href': 'http://localhost/repo/info/lfs/objects/456'}, | |
138 | }, |
|
138 | }, | |
139 |
|
|
139 | 'oid': '456', | |
140 |
|
|
140 | 'size': '1024'} | |
141 | ] |
|
141 | ] | |
142 | assert json.loads(response.text) == { |
|
142 | assert json.loads(response.text) == { | |
143 | 'objects': expected_objects, 'transfer': 'basic'} |
|
143 | 'objects': expected_objects, 'transfer': 'basic'} | |
@@ -149,18 +149,18 b' class TestLFSApplication(object):' | |||||
149 | '/repo/info/lfs/objects/batch', params=params, |
|
149 | '/repo/info/lfs/objects/batch', params=params, | |
150 | extra_environ=http_auth) |
|
150 | extra_environ=http_auth) | |
151 | expected_objects = [ |
|
151 | expected_objects = [ | |
152 |
{ |
|
152 | {'authenticated': True, | |
153 |
|
|
153 | 'actions': { | |
154 |
|
|
154 | 'upload': { | |
155 |
|
|
155 | 'header': {'Authorization': 'Basic XXXXX', | |
156 |
|
|
156 | 'Transfer-Encoding': 'chunked'}, | |
157 |
|
|
157 | 'href': 'http://localhost/repo/info/lfs/objects/123'}, | |
158 |
|
|
158 | 'verify': { | |
159 |
|
|
159 | 'header': {'Authorization': 'Basic XXXXX'}, | |
160 |
|
|
160 | 'href': 'http://localhost/repo/info/lfs/verify'} | |
161 | }, |
|
161 | }, | |
162 |
|
|
162 | 'oid': '123', | |
163 |
|
|
163 | 'size': '1024'} | |
164 | ] |
|
164 | ] | |
165 | assert json.loads(response.text) == { |
|
165 | assert json.loads(response.text) == { | |
166 | 'objects': expected_objects, 'transfer': 'basic'} |
|
166 | 'objects': expected_objects, 'transfer': 'basic'} | |
@@ -172,18 +172,18 b' class TestLFSApplication(object):' | |||||
172 | '/repo/info/lfs/objects/batch', params=params, |
|
172 | '/repo/info/lfs/objects/batch', params=params, | |
173 | extra_environ=http_auth) |
|
173 | extra_environ=http_auth) | |
174 | expected_objects = [ |
|
174 | expected_objects = [ | |
175 |
{ |
|
175 | {'authenticated': True, | |
176 |
|
|
176 | 'actions': { | |
177 |
|
|
177 | 'upload': { | |
178 |
|
|
178 | 'header': {'Authorization': 'Basic XXXXX', | |
179 |
|
|
179 | 'Transfer-Encoding': 'chunked'}, | |
180 |
|
|
180 | 'href': 'https://localhost/repo/info/lfs/objects/123'}, | |
181 |
|
|
181 | 'verify': { | |
182 |
|
|
182 | 'header': {'Authorization': 'Basic XXXXX'}, | |
183 |
|
|
183 | 'href': 'https://localhost/repo/info/lfs/verify'} | |
184 | }, |
|
184 | }, | |
185 |
|
|
185 | 'oid': '123', | |
186 |
|
|
186 | 'size': '1024'} | |
187 | ] |
|
187 | ] | |
188 | assert json.loads(response.text) == { |
|
188 | assert json.loads(response.text) == { | |
189 | 'objects': expected_objects, 'transfer': 'basic'} |
|
189 | 'objects': expected_objects, 'transfer': 'basic'} | |
@@ -195,7 +195,7 b' class TestLFSApplication(object):' | |||||
195 | status=400) |
|
195 | status=400) | |
196 |
|
196 | |||
197 | assert json.loads(response.text) == { |
|
197 | assert json.loads(response.text) == { | |
198 |
|
|
198 | 'message': 'missing oid and size in request data'} | |
199 |
|
199 | |||
200 | def test_app_verify_api_missing_obj(self, git_lfs_app): |
|
200 | def test_app_verify_api_missing_obj(self, git_lfs_app): | |
201 | params = {'oid': 'missing', 'size': '1024'} |
|
201 | params = {'oid': 'missing', 'size': '1024'} | |
@@ -204,7 +204,7 b' class TestLFSApplication(object):' | |||||
204 | status=404) |
|
204 | status=404) | |
205 |
|
205 | |||
206 | assert json.loads(response.text) == { |
|
206 | assert json.loads(response.text) == { | |
207 |
|
|
207 | 'message': 'oid `missing` does not exists in store'} | |
208 |
|
208 | |||
209 | def test_app_verify_api_size_mismatch(self, git_lfs_app): |
|
209 | def test_app_verify_api_size_mismatch(self, git_lfs_app): | |
210 | oid = 'existing' |
|
210 | oid = 'existing' | |
@@ -219,8 +219,8 b' class TestLFSApplication(object):' | |||||
219 | '/repo/info/lfs/verify', params=params, status=422) |
|
219 | '/repo/info/lfs/verify', params=params, status=422) | |
220 |
|
220 | |||
221 | assert json.loads(response.text) == { |
|
221 | assert json.loads(response.text) == { | |
222 |
|
|
222 | 'message': 'requested file size mismatch ' | |
223 |
|
|
223 | 'store size:11 requested:1024'} | |
224 |
|
224 | |||
225 | def test_app_verify_api(self, git_lfs_app): |
|
225 | def test_app_verify_api(self, git_lfs_app): | |
226 | oid = 'existing' |
|
226 | oid = 'existing' | |
@@ -235,7 +235,7 b' class TestLFSApplication(object):' | |||||
235 | '/repo/info/lfs/verify', params=params) |
|
235 | '/repo/info/lfs/verify', params=params) | |
236 |
|
236 | |||
237 | assert json.loads(response.text) == { |
|
237 | assert json.loads(response.text) == { | |
238 |
|
|
238 | 'message': {'size': 'ok', 'in_store': 'ok'}} | |
239 |
|
239 | |||
240 | def test_app_download_api_oid_not_existing(self, git_lfs_app): |
|
240 | def test_app_download_api_oid_not_existing(self, git_lfs_app): | |
241 | oid = 'missing' |
|
241 | oid = 'missing' | |
@@ -244,7 +244,7 b' class TestLFSApplication(object):' | |||||
244 | '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404) |
|
244 | '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404) | |
245 |
|
245 | |||
246 | assert json.loads(response.text) == { |
|
246 | assert json.loads(response.text) == { | |
247 |
|
|
247 | 'message': 'requested file with oid `missing` not found in store'} | |
248 |
|
248 | |||
249 | def test_app_download_api(self, git_lfs_app): |
|
249 | def test_app_download_api(self, git_lfs_app): | |
250 | oid = 'existing' |
|
250 | oid = 'existing' | |
@@ -264,7 +264,7 b' class TestLFSApplication(object):' | |||||
264 | response = git_lfs_app.put( |
|
264 | response = git_lfs_app.put( | |
265 | '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT') |
|
265 | '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT') | |
266 |
|
266 | |||
267 |
assert json.loads(response.text) == { |
|
267 | assert json.loads(response.text) == {'upload': 'ok'} | |
268 |
|
268 | |||
269 | # verify that we actually wrote that OID |
|
269 | # verify that we actually wrote that OID | |
270 | oid_path = os.path.join(git_lfs_app._store, oid) |
|
270 | oid_path = os.path.join(git_lfs_app._store, oid) |
@@ -78,7 +78,7 b' def get_ctx(repo, ref):' | |||||
78 | ctx = revsymbol(repo, ref) |
|
78 | ctx = revsymbol(repo, ref) | |
79 | except (LookupError, RepoLookupError): |
|
79 | except (LookupError, RepoLookupError): | |
80 | # Similar case as above but only for refs that are not numeric |
|
80 | # Similar case as above but only for refs that are not numeric | |
81 |
if isinstance(ref, |
|
81 | if isinstance(ref, int): | |
82 | raise |
|
82 | raise | |
83 | ctx = revsymbol(repo, ref) |
|
83 | ctx = revsymbol(repo, ref) | |
84 | return ctx |
|
84 | return ctx |
@@ -579,7 +579,7 b' def git_post_receive(unused_repo_path, r' | |||||
579 | except Exception: |
|
579 | except Exception: | |
580 | cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', '"HEAD"', |
|
580 | cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', '"HEAD"', | |
581 | '"refs/heads/%s"' % push_ref['name']] |
|
581 | '"refs/heads/%s"' % push_ref['name']] | |
582 | print("Setting default branch to %s" % push_ref['name']) |
|
582 | print(("Setting default branch to %s" % push_ref['name'])) | |
583 | subprocessio.run_command(cmd, env=os.environ.copy()) |
|
583 | subprocessio.run_command(cmd, env=os.environ.copy()) | |
584 |
|
584 | |||
585 | cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', |
|
585 | cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', |
@@ -1,4 +1,4 b'' | |||||
1 | from __future__ import absolute_import, division, unicode_literals |
|
1 | ||
2 |
|
2 | |||
3 | import logging |
|
3 | import logging | |
4 |
|
4 |
@@ -1,4 +1,4 b'' | |||||
1 | from __future__ import absolute_import, division, unicode_literals |
|
1 | ||
2 |
|
2 | |||
3 | import re |
|
3 | import re | |
4 | import random |
|
4 | import random |
@@ -1,4 +1,4 b'' | |||||
1 | from __future__ import absolute_import, division, unicode_literals |
|
1 | ||
2 |
|
2 | |||
3 | import socket |
|
3 | import socket | |
4 |
|
4 |
@@ -1,4 +1,4 b'' | |||||
1 | from __future__ import absolute_import, division, unicode_literals |
|
1 | ||
2 |
|
2 | |||
3 | import functools |
|
3 | import functools | |
4 |
|
4 |
@@ -1,4 +1,4 b'' | |||||
1 | from __future__ import absolute_import, division, unicode_literals |
|
1 | ||
2 |
|
2 | |||
3 | import socket |
|
3 | import socket | |
4 |
|
4 |
@@ -265,7 +265,7 b' class BaseRedisBackend(redis_backend.Red' | |||||
265 |
|
265 | |||
266 | def get_mutex(self, key): |
|
266 | def get_mutex(self, key): | |
267 | if self.distributed_lock: |
|
267 | if self.distributed_lock: | |
268 |
lock_key = |
|
268 | lock_key = '_lock_{0}'.format(safe_unicode(key)) | |
269 | return get_mutex_lock(self.client, lock_key, self._lock_timeout, |
|
269 | return get_mutex_lock(self.client, lock_key, self._lock_timeout, | |
270 | auto_renewal=self._lock_auto_renewal) |
|
270 | auto_renewal=self._lock_auto_renewal) | |
271 | else: |
|
271 | else: |
@@ -105,11 +105,11 b' class RhodeCodeCacheRegion(CacheRegion):' | |||||
105 | def get_or_create_for_user_func(key_generator, user_func, *arg, **kw): |
|
105 | def get_or_create_for_user_func(key_generator, user_func, *arg, **kw): | |
106 |
|
106 | |||
107 | if not condition: |
|
107 | if not condition: | |
108 |
log.debug('Calling un-cached method:%s', user_func. |
|
108 | log.debug('Calling un-cached method:%s', user_func.__name__) | |
109 | start = time.time() |
|
109 | start = time.time() | |
110 | result = user_func(*arg, **kw) |
|
110 | result = user_func(*arg, **kw) | |
111 | total = time.time() - start |
|
111 | total = time.time() - start | |
112 |
log.debug('un-cached method:%s took %.4fs', user_func. |
|
112 | log.debug('un-cached method:%s took %.4fs', user_func.__name__, total) | |
113 | return result |
|
113 | return result | |
114 |
|
114 | |||
115 | key = key_generator(*arg, **kw) |
|
115 | key = key_generator(*arg, **kw) | |
@@ -117,7 +117,7 b' class RhodeCodeCacheRegion(CacheRegion):' | |||||
117 | timeout = expiration_time() if expiration_time_is_callable \ |
|
117 | timeout = expiration_time() if expiration_time_is_callable \ | |
118 | else expiration_time |
|
118 | else expiration_time | |
119 |
|
119 | |||
120 |
log.debug('Calling cached method:`%s`', user_func. |
|
120 | log.debug('Calling cached method:`%s`', user_func.__name__) | |
121 | return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw)) |
|
121 | return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw)) | |
122 |
|
122 | |||
123 | def cache_decorator(user_func): |
|
123 | def cache_decorator(user_func): |
@@ -81,7 +81,7 b' class HgWeb(mercurial.hgweb.hgweb_mod.hg' | |||||
81 | first_chunk = None |
|
81 | first_chunk = None | |
82 |
|
82 | |||
83 | try: |
|
83 | try: | |
84 |
data = |
|
84 | data = next(gen) | |
85 |
|
85 | |||
86 | def first_chunk(): |
|
86 | def first_chunk(): | |
87 | yield data |
|
87 | yield data |
@@ -47,7 +47,7 b' class StreamFeeder(Thread):' | |||||
47 | if type(source) in (type(''), bytes, bytearray): # string-like |
|
47 | if type(source) in (type(''), bytes, bytearray): # string-like | |
48 | self.bytes = bytes(source) |
|
48 | self.bytes = bytes(source) | |
49 | else: # can be either file pointer or file-like |
|
49 | else: # can be either file pointer or file-like | |
50 |
if type(source) in (int, |
|
50 | if type(source) in (int, int): # file pointer it is | |
51 | # converting file descriptor (int) stdin into file-like |
|
51 | # converting file descriptor (int) stdin into file-like | |
52 | try: |
|
52 | try: | |
53 | source = os.fdopen(source, 'rb', 16384) |
|
53 | source = os.fdopen(source, 'rb', 16384) | |
@@ -189,7 +189,7 b' class BufferedGenerator(object):' | |||||
189 | def __iter__(self): |
|
189 | def __iter__(self): | |
190 | return self |
|
190 | return self | |
191 |
|
191 | |||
192 | def next(self): |
|
192 | def __next__(self): | |
193 | while not len(self.data) and not self.worker.EOF.is_set(): |
|
193 | while not len(self.data) and not self.worker.EOF.is_set(): | |
194 | self.worker.data_added.clear() |
|
194 | self.worker.data_added.clear() | |
195 | self.worker.data_added.wait(0.2) |
|
195 | self.worker.data_added.wait(0.2) | |
@@ -440,7 +440,7 b' class SubprocessIOChunker(object):' | |||||
440 | def __iter__(self): |
|
440 | def __iter__(self): | |
441 | return self |
|
441 | return self | |
442 |
|
442 | |||
443 | def next(self): |
|
443 | def __next__(self): | |
444 | # Note: mikhail: We need to be sure that we are checking the return |
|
444 | # Note: mikhail: We need to be sure that we are checking the return | |
445 | # code after the stdout stream is closed. Some processes, e.g. git |
|
445 | # code after the stdout stream is closed. Some processes, e.g. git | |
446 | # are doing some magic in between closing stdout and terminating the |
|
446 | # are doing some magic in between closing stdout and terminating the | |
@@ -449,7 +449,7 b' class SubprocessIOChunker(object):' | |||||
449 | result = None |
|
449 | result = None | |
450 | stop_iteration = None |
|
450 | stop_iteration = None | |
451 | try: |
|
451 | try: | |
452 |
result = self.output |
|
452 | result = next(self.output) | |
453 | except StopIteration as e: |
|
453 | except StopIteration as e: | |
454 | stop_iteration = e |
|
454 | stop_iteration = e | |
455 |
|
455 |
@@ -39,7 +39,7 b' def repeat(request):' | |||||
39 | @pytest.fixture(scope='session') |
|
39 | @pytest.fixture(scope='session') | |
40 | def vcsserver_port(request): |
|
40 | def vcsserver_port(request): | |
41 | port = get_available_port() |
|
41 | port = get_available_port() | |
42 | print('Using vcsserver port %s' % (port, )) |
|
42 | print(('Using vcsserver port %s' % (port, ))) | |
43 | return port |
|
43 | return port | |
44 |
|
44 | |||
45 |
|
45 |
@@ -113,7 +113,7 b' class TestReraiseSafeExceptions(object):' | |||||
113 | methods = inspect.getmembers(git_remote, predicate=inspect.ismethod) |
|
113 | methods = inspect.getmembers(git_remote, predicate=inspect.ismethod) | |
114 | for method_name, method in methods: |
|
114 | for method_name, method in methods: | |
115 | if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']: |
|
115 | if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']: | |
116 |
assert method. |
|
116 | assert method.__func__.__code__ == decorator.__code__ | |
117 |
|
117 | |||
118 | @pytest.mark.parametrize('side_effect, expected_type', [ |
|
118 | @pytest.mark.parametrize('side_effect, expected_type', [ | |
119 | (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'), |
|
119 | (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'), |
@@ -52,7 +52,7 b' class TestReraiseSafeExceptions(object):' | |||||
52 | decorator = hg.reraise_safe_exceptions(None) |
|
52 | decorator = hg.reraise_safe_exceptions(None) | |
53 | for method_name, method in methods: |
|
53 | for method_name, method in methods: | |
54 | if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']: |
|
54 | if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']: | |
55 |
assert method. |
|
55 | assert method.__func__.__code__ == decorator.__code__ | |
56 |
|
56 | |||
57 | @pytest.mark.parametrize('side_effect, expected_type', [ |
|
57 | @pytest.mark.parametrize('side_effect, expected_type', [ | |
58 | (hgcompat.Abort(), 'abort'), |
|
58 | (hgcompat.Abort(), 'abort'), |
@@ -28,7 +28,7 b' def test_patch_largefiles_capabilities_a' | |||||
28 | patched_capabilities): |
|
28 | patched_capabilities): | |
29 | lfproto = hgcompat.largefiles.proto |
|
29 | lfproto = hgcompat.largefiles.proto | |
30 | hgpatches.patch_largefiles_capabilities() |
|
30 | hgpatches.patch_largefiles_capabilities() | |
31 |
assert lfproto._capabilities. |
|
31 | assert lfproto._capabilities.__name__ == '_dynamic_capabilities' | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | def test_dynamic_capabilities_uses_original_function_if_not_enabled( |
|
34 | def test_dynamic_capabilities_uses_original_function_if_not_enabled( |
@@ -100,7 +100,7 b' def test_does_not_fail_on_stderr(environ' | |||||
100 |
|
100 | |||
101 | @pytest.mark.parametrize('size', [1, 10 ** 5]) |
|
101 | @pytest.mark.parametrize('size', [1, 10 ** 5]) | |
102 | def test_output_with_no_input(size, environ): |
|
102 | def test_output_with_no_input(size, environ): | |
103 | print(type(environ)) |
|
103 | print((type(environ))) | |
104 | data = 'X' |
|
104 | data = 'X' | |
105 | args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size)) |
|
105 | args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size)) | |
106 | output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ)) |
|
106 | output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ)) | |
@@ -118,7 +118,7 b' def test_output_with_no_input_does_not_f' | |||||
118 | ) |
|
118 | ) | |
119 | ) |
|
119 | ) | |
120 |
|
120 | |||
121 | print("{} {}".format(len(data * size), len(output))) |
|
121 | print(("{} {}".format(len(data * size), len(output)))) | |
122 | assert output == data * size |
|
122 | assert output == data * size | |
123 |
|
123 | |||
124 |
|
124 |
General Comments 0
You need to be logged in to leave comments.
Login now