Show More
@@ -58,7 +58,7 b' def aslist(obj, sep=None, strip=True):' | |||
|
58 | 58 | :param sep: |
|
59 | 59 | :param strip: |
|
60 | 60 | """ |
|
61 |
if isinstance(obj, |
|
|
61 | if isinstance(obj, str): | |
|
62 | 62 | if obj in ['', ""]: |
|
63 | 63 | return [] |
|
64 | 64 |
@@ -54,19 +54,19 b' class TestLFSApplication(object):' | |||
|
54 | 54 | def test_app_deprecated_endpoint(self, git_lfs_app): |
|
55 | 55 | response = git_lfs_app.post('/repo/info/lfs/objects', status=501) |
|
56 | 56 | assert response.status_code == 501 |
|
57 |
assert json.loads(response.text) == { |
|
|
57 | assert json.loads(response.text) == {'message': 'LFS: v1 api not supported'} | |
|
58 | 58 | |
|
59 | 59 | def test_app_lock_verify_api_not_available(self, git_lfs_app): |
|
60 | 60 | response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501) |
|
61 | 61 | assert response.status_code == 501 |
|
62 | 62 | assert json.loads(response.text) == { |
|
63 |
|
|
|
63 | 'message': 'GIT LFS locking api not supported'} | |
|
64 | 64 | |
|
65 | 65 | def test_app_lock_api_not_available(self, git_lfs_app): |
|
66 | 66 | response = git_lfs_app.post('/repo/info/lfs/locks', status=501) |
|
67 | 67 | assert response.status_code == 501 |
|
68 | 68 | assert json.loads(response.text) == { |
|
69 |
|
|
|
69 | 'message': 'GIT LFS locking api not supported'} | |
|
70 | 70 | |
|
71 | 71 | def test_app_batch_api_missing_auth(self, git_lfs_app): |
|
72 | 72 | git_lfs_app.post_json( |
@@ -77,14 +77,14 b' class TestLFSApplication(object):' | |||
|
77 | 77 | '/repo/info/lfs/objects/batch', params={}, status=400, |
|
78 | 78 | extra_environ=http_auth) |
|
79 | 79 | assert json.loads(response.text) == { |
|
80 |
|
|
|
80 | 'message': 'unsupported operation mode: `None`'} | |
|
81 | 81 | |
|
82 | 82 | def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth): |
|
83 | 83 | response = git_lfs_app.post_json( |
|
84 | 84 | '/repo/info/lfs/objects/batch', params={'operation': 'download'}, |
|
85 | 85 | status=400, extra_environ=http_auth) |
|
86 | 86 | assert json.loads(response.text) == { |
|
87 |
|
|
|
87 | 'message': 'missing objects data'} | |
|
88 | 88 | |
|
89 | 89 | def test_app_batch_api_unsupported_data_in_objects( |
|
90 | 90 | self, git_lfs_app, http_auth): |
@@ -94,7 +94,7 b' class TestLFSApplication(object):' | |||
|
94 | 94 | '/repo/info/lfs/objects/batch', params=params, status=400, |
|
95 | 95 | extra_environ=http_auth) |
|
96 | 96 | assert json.loads(response.text) == { |
|
97 |
|
|
|
97 | 'message': 'unsupported data in objects'} | |
|
98 | 98 | |
|
99 | 99 | def test_app_batch_api_download_missing_object( |
|
100 | 100 | self, git_lfs_app, http_auth): |
@@ -105,12 +105,12 b' class TestLFSApplication(object):' | |||
|
105 | 105 | extra_environ=http_auth) |
|
106 | 106 | |
|
107 | 107 | expected_objects = [ |
|
108 |
{ |
|
|
109 |
|
|
|
110 |
|
|
|
111 |
|
|
|
112 |
|
|
|
113 |
|
|
|
108 | {'authenticated': True, | |
|
109 | 'errors': {'error': { | |
|
110 | 'code': 404, | |
|
111 | 'message': 'object: 123 does not exist in store'}}, | |
|
112 | 'oid': '123', | |
|
113 | 'size': '1024'} | |
|
114 | 114 | ] |
|
115 | 115 | assert json.loads(response.text) == { |
|
116 | 116 | 'objects': expected_objects, 'transfer': 'basic'} |
@@ -130,14 +130,14 b' class TestLFSApplication(object):' | |||
|
130 | 130 | extra_environ=http_auth) |
|
131 | 131 | |
|
132 | 132 | expected_objects = [ |
|
133 |
{ |
|
|
134 |
|
|
|
135 |
|
|
|
136 |
|
|
|
137 |
|
|
|
133 | {'authenticated': True, | |
|
134 | 'actions': { | |
|
135 | 'download': { | |
|
136 | 'header': {'Authorization': 'Basic XXXXX'}, | |
|
137 | 'href': 'http://localhost/repo/info/lfs/objects/456'}, | |
|
138 | 138 | }, |
|
139 |
|
|
|
140 |
|
|
|
139 | 'oid': '456', | |
|
140 | 'size': '1024'} | |
|
141 | 141 | ] |
|
142 | 142 | assert json.loads(response.text) == { |
|
143 | 143 | 'objects': expected_objects, 'transfer': 'basic'} |
@@ -149,18 +149,18 b' class TestLFSApplication(object):' | |||
|
149 | 149 | '/repo/info/lfs/objects/batch', params=params, |
|
150 | 150 | extra_environ=http_auth) |
|
151 | 151 | expected_objects = [ |
|
152 |
{ |
|
|
153 |
|
|
|
154 |
|
|
|
155 |
|
|
|
156 |
|
|
|
157 |
|
|
|
158 |
|
|
|
159 |
|
|
|
160 |
|
|
|
152 | {'authenticated': True, | |
|
153 | 'actions': { | |
|
154 | 'upload': { | |
|
155 | 'header': {'Authorization': 'Basic XXXXX', | |
|
156 | 'Transfer-Encoding': 'chunked'}, | |
|
157 | 'href': 'http://localhost/repo/info/lfs/objects/123'}, | |
|
158 | 'verify': { | |
|
159 | 'header': {'Authorization': 'Basic XXXXX'}, | |
|
160 | 'href': 'http://localhost/repo/info/lfs/verify'} | |
|
161 | 161 | }, |
|
162 |
|
|
|
163 |
|
|
|
162 | 'oid': '123', | |
|
163 | 'size': '1024'} | |
|
164 | 164 | ] |
|
165 | 165 | assert json.loads(response.text) == { |
|
166 | 166 | 'objects': expected_objects, 'transfer': 'basic'} |
@@ -172,18 +172,18 b' class TestLFSApplication(object):' | |||
|
172 | 172 | '/repo/info/lfs/objects/batch', params=params, |
|
173 | 173 | extra_environ=http_auth) |
|
174 | 174 | expected_objects = [ |
|
175 |
{ |
|
|
176 |
|
|
|
177 |
|
|
|
178 |
|
|
|
179 |
|
|
|
180 |
|
|
|
181 |
|
|
|
182 |
|
|
|
183 |
|
|
|
175 | {'authenticated': True, | |
|
176 | 'actions': { | |
|
177 | 'upload': { | |
|
178 | 'header': {'Authorization': 'Basic XXXXX', | |
|
179 | 'Transfer-Encoding': 'chunked'}, | |
|
180 | 'href': 'https://localhost/repo/info/lfs/objects/123'}, | |
|
181 | 'verify': { | |
|
182 | 'header': {'Authorization': 'Basic XXXXX'}, | |
|
183 | 'href': 'https://localhost/repo/info/lfs/verify'} | |
|
184 | 184 | }, |
|
185 |
|
|
|
186 |
|
|
|
185 | 'oid': '123', | |
|
186 | 'size': '1024'} | |
|
187 | 187 | ] |
|
188 | 188 | assert json.loads(response.text) == { |
|
189 | 189 | 'objects': expected_objects, 'transfer': 'basic'} |
@@ -195,7 +195,7 b' class TestLFSApplication(object):' | |||
|
195 | 195 | status=400) |
|
196 | 196 | |
|
197 | 197 | assert json.loads(response.text) == { |
|
198 |
|
|
|
198 | 'message': 'missing oid and size in request data'} | |
|
199 | 199 | |
|
200 | 200 | def test_app_verify_api_missing_obj(self, git_lfs_app): |
|
201 | 201 | params = {'oid': 'missing', 'size': '1024'} |
@@ -204,7 +204,7 b' class TestLFSApplication(object):' | |||
|
204 | 204 | status=404) |
|
205 | 205 | |
|
206 | 206 | assert json.loads(response.text) == { |
|
207 |
|
|
|
207 | 'message': 'oid `missing` does not exists in store'} | |
|
208 | 208 | |
|
209 | 209 | def test_app_verify_api_size_mismatch(self, git_lfs_app): |
|
210 | 210 | oid = 'existing' |
@@ -219,8 +219,8 b' class TestLFSApplication(object):' | |||
|
219 | 219 | '/repo/info/lfs/verify', params=params, status=422) |
|
220 | 220 | |
|
221 | 221 | assert json.loads(response.text) == { |
|
222 |
|
|
|
223 |
|
|
|
222 | 'message': 'requested file size mismatch ' | |
|
223 | 'store size:11 requested:1024'} | |
|
224 | 224 | |
|
225 | 225 | def test_app_verify_api(self, git_lfs_app): |
|
226 | 226 | oid = 'existing' |
@@ -235,7 +235,7 b' class TestLFSApplication(object):' | |||
|
235 | 235 | '/repo/info/lfs/verify', params=params) |
|
236 | 236 | |
|
237 | 237 | assert json.loads(response.text) == { |
|
238 |
|
|
|
238 | 'message': {'size': 'ok', 'in_store': 'ok'}} | |
|
239 | 239 | |
|
240 | 240 | def test_app_download_api_oid_not_existing(self, git_lfs_app): |
|
241 | 241 | oid = 'missing' |
@@ -244,7 +244,7 b' class TestLFSApplication(object):' | |||
|
244 | 244 | '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404) |
|
245 | 245 | |
|
246 | 246 | assert json.loads(response.text) == { |
|
247 |
|
|
|
247 | 'message': 'requested file with oid `missing` not found in store'} | |
|
248 | 248 | |
|
249 | 249 | def test_app_download_api(self, git_lfs_app): |
|
250 | 250 | oid = 'existing' |
@@ -264,7 +264,7 b' class TestLFSApplication(object):' | |||
|
264 | 264 | response = git_lfs_app.put( |
|
265 | 265 | '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT') |
|
266 | 266 | |
|
267 |
assert json.loads(response.text) == { |
|
|
267 | assert json.loads(response.text) == {'upload': 'ok'} | |
|
268 | 268 | |
|
269 | 269 | # verify that we actually wrote that OID |
|
270 | 270 | oid_path = os.path.join(git_lfs_app._store, oid) |
@@ -78,7 +78,7 b' def get_ctx(repo, ref):' | |||
|
78 | 78 | ctx = revsymbol(repo, ref) |
|
79 | 79 | except (LookupError, RepoLookupError): |
|
80 | 80 | # Similar case as above but only for refs that are not numeric |
|
81 |
if isinstance(ref, |
|
|
81 | if isinstance(ref, int): | |
|
82 | 82 | raise |
|
83 | 83 | ctx = revsymbol(repo, ref) |
|
84 | 84 | return ctx |
@@ -579,7 +579,7 b' def git_post_receive(unused_repo_path, r' | |||
|
579 | 579 | except Exception: |
|
580 | 580 | cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', '"HEAD"', |
|
581 | 581 | '"refs/heads/%s"' % push_ref['name']] |
|
582 | print("Setting default branch to %s" % push_ref['name']) | |
|
582 | print(("Setting default branch to %s" % push_ref['name'])) | |
|
583 | 583 | subprocessio.run_command(cmd, env=os.environ.copy()) |
|
584 | 584 | |
|
585 | 585 | cmd = [settings.GIT_EXECUTABLE, 'for-each-ref', |
@@ -1,4 +1,4 b'' | |||
|
1 | from __future__ import absolute_import, division, unicode_literals | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | import logging |
|
4 | 4 |
@@ -1,4 +1,4 b'' | |||
|
1 | from __future__ import absolute_import, division, unicode_literals | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | import re |
|
4 | 4 | import random |
@@ -1,4 +1,4 b'' | |||
|
1 | from __future__ import absolute_import, division, unicode_literals | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | import socket |
|
4 | 4 |
@@ -1,4 +1,4 b'' | |||
|
1 | from __future__ import absolute_import, division, unicode_literals | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | import functools |
|
4 | 4 |
@@ -1,4 +1,4 b'' | |||
|
1 | from __future__ import absolute_import, division, unicode_literals | |
|
1 | ||
|
2 | 2 | |
|
3 | 3 | import socket |
|
4 | 4 |
@@ -265,7 +265,7 b' class BaseRedisBackend(redis_backend.Red' | |||
|
265 | 265 | |
|
266 | 266 | def get_mutex(self, key): |
|
267 | 267 | if self.distributed_lock: |
|
268 |
lock_key = |
|
|
268 | lock_key = '_lock_{0}'.format(safe_unicode(key)) | |
|
269 | 269 | return get_mutex_lock(self.client, lock_key, self._lock_timeout, |
|
270 | 270 | auto_renewal=self._lock_auto_renewal) |
|
271 | 271 | else: |
@@ -105,11 +105,11 b' class RhodeCodeCacheRegion(CacheRegion):' | |||
|
105 | 105 | def get_or_create_for_user_func(key_generator, user_func, *arg, **kw): |
|
106 | 106 | |
|
107 | 107 | if not condition: |
|
108 |
log.debug('Calling un-cached method:%s', user_func. |
|
|
108 | log.debug('Calling un-cached method:%s', user_func.__name__) | |
|
109 | 109 | start = time.time() |
|
110 | 110 | result = user_func(*arg, **kw) |
|
111 | 111 | total = time.time() - start |
|
112 |
log.debug('un-cached method:%s took %.4fs', user_func. |
|
|
112 | log.debug('un-cached method:%s took %.4fs', user_func.__name__, total) | |
|
113 | 113 | return result |
|
114 | 114 | |
|
115 | 115 | key = key_generator(*arg, **kw) |
@@ -117,7 +117,7 b' class RhodeCodeCacheRegion(CacheRegion):' | |||
|
117 | 117 | timeout = expiration_time() if expiration_time_is_callable \ |
|
118 | 118 | else expiration_time |
|
119 | 119 | |
|
120 |
log.debug('Calling cached method:`%s`', user_func. |
|
|
120 | log.debug('Calling cached method:`%s`', user_func.__name__) | |
|
121 | 121 | return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw)) |
|
122 | 122 | |
|
123 | 123 | def cache_decorator(user_func): |
@@ -81,7 +81,7 b' class HgWeb(mercurial.hgweb.hgweb_mod.hg' | |||
|
81 | 81 | first_chunk = None |
|
82 | 82 | |
|
83 | 83 | try: |
|
84 |
data = |
|
|
84 | data = next(gen) | |
|
85 | 85 | |
|
86 | 86 | def first_chunk(): |
|
87 | 87 | yield data |
@@ -47,7 +47,7 b' class StreamFeeder(Thread):' | |||
|
47 | 47 | if type(source) in (type(''), bytes, bytearray): # string-like |
|
48 | 48 | self.bytes = bytes(source) |
|
49 | 49 | else: # can be either file pointer or file-like |
|
50 |
if type(source) in (int, |
|
|
50 | if type(source) in (int, int): # file pointer it is | |
|
51 | 51 | # converting file descriptor (int) stdin into file-like |
|
52 | 52 | try: |
|
53 | 53 | source = os.fdopen(source, 'rb', 16384) |
@@ -189,7 +189,7 b' class BufferedGenerator(object):' | |||
|
189 | 189 | def __iter__(self): |
|
190 | 190 | return self |
|
191 | 191 | |
|
192 | def next(self): | |
|
192 | def __next__(self): | |
|
193 | 193 | while not len(self.data) and not self.worker.EOF.is_set(): |
|
194 | 194 | self.worker.data_added.clear() |
|
195 | 195 | self.worker.data_added.wait(0.2) |
@@ -440,7 +440,7 b' class SubprocessIOChunker(object):' | |||
|
440 | 440 | def __iter__(self): |
|
441 | 441 | return self |
|
442 | 442 | |
|
443 | def next(self): | |
|
443 | def __next__(self): | |
|
444 | 444 | # Note: mikhail: We need to be sure that we are checking the return |
|
445 | 445 | # code after the stdout stream is closed. Some processes, e.g. git |
|
446 | 446 | # are doing some magic in between closing stdout and terminating the |
@@ -449,7 +449,7 b' class SubprocessIOChunker(object):' | |||
|
449 | 449 | result = None |
|
450 | 450 | stop_iteration = None |
|
451 | 451 | try: |
|
452 |
result = self.output |
|
|
452 | result = next(self.output) | |
|
453 | 453 | except StopIteration as e: |
|
454 | 454 | stop_iteration = e |
|
455 | 455 |
@@ -39,7 +39,7 b' def repeat(request):' | |||
|
39 | 39 | @pytest.fixture(scope='session') |
|
40 | 40 | def vcsserver_port(request): |
|
41 | 41 | port = get_available_port() |
|
42 | print('Using vcsserver port %s' % (port, )) | |
|
42 | print(('Using vcsserver port %s' % (port, ))) | |
|
43 | 43 | return port |
|
44 | 44 | |
|
45 | 45 |
@@ -113,7 +113,7 b' class TestReraiseSafeExceptions(object):' | |||
|
113 | 113 | methods = inspect.getmembers(git_remote, predicate=inspect.ismethod) |
|
114 | 114 | for method_name, method in methods: |
|
115 | 115 | if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']: |
|
116 |
assert method. |
|
|
116 | assert method.__func__.__code__ == decorator.__code__ | |
|
117 | 117 | |
|
118 | 118 | @pytest.mark.parametrize('side_effect, expected_type', [ |
|
119 | 119 | (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'), |
@@ -52,7 +52,7 b' class TestReraiseSafeExceptions(object):' | |||
|
52 | 52 | decorator = hg.reraise_safe_exceptions(None) |
|
53 | 53 | for method_name, method in methods: |
|
54 | 54 | if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']: |
|
55 |
assert method. |
|
|
55 | assert method.__func__.__code__ == decorator.__code__ | |
|
56 | 56 | |
|
57 | 57 | @pytest.mark.parametrize('side_effect, expected_type', [ |
|
58 | 58 | (hgcompat.Abort(), 'abort'), |
@@ -28,7 +28,7 b' def test_patch_largefiles_capabilities_a' | |||
|
28 | 28 | patched_capabilities): |
|
29 | 29 | lfproto = hgcompat.largefiles.proto |
|
30 | 30 | hgpatches.patch_largefiles_capabilities() |
|
31 |
assert lfproto._capabilities. |
|
|
31 | assert lfproto._capabilities.__name__ == '_dynamic_capabilities' | |
|
32 | 32 | |
|
33 | 33 | |
|
34 | 34 | def test_dynamic_capabilities_uses_original_function_if_not_enabled( |
@@ -100,7 +100,7 b' def test_does_not_fail_on_stderr(environ' | |||
|
100 | 100 | |
|
101 | 101 | @pytest.mark.parametrize('size', [1, 10 ** 5]) |
|
102 | 102 | def test_output_with_no_input(size, environ): |
|
103 | print(type(environ)) | |
|
103 | print((type(environ))) | |
|
104 | 104 | data = 'X' |
|
105 | 105 | args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size)) |
|
106 | 106 | output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ)) |
@@ -118,7 +118,7 b' def test_output_with_no_input_does_not_f' | |||
|
118 | 118 | ) |
|
119 | 119 | ) |
|
120 | 120 | |
|
121 | print("{} {}".format(len(data * size), len(output))) | |
|
121 | print(("{} {}".format(len(data * size), len(output)))) | |
|
122 | 122 | assert output == data * size |
|
123 | 123 | |
|
124 | 124 |
General Comments 0
You need to be logged in to leave comments.
Login now