Show More
@@ -6,12 +6,29 b' python:' | |||||
6 | - 3.4 |
|
6 | - 3.4 | |
7 | - 3.3 |
|
7 | - 3.3 | |
8 | - 2.7 |
|
8 | - 2.7 | |
|
9 | - pypy | |||
9 | sudo: false |
|
10 | sudo: false | |
10 | before_install: |
|
11 | before_install: | |
11 | - git clone --quiet --depth 1 https://github.com/minrk/travis-wheels travis-wheels |
|
12 | - git clone --quiet --depth 1 https://github.com/minrk/travis-wheels travis-wheels | |
12 | - 'if [[ $GROUP != js* ]]; then COVERAGE=""; fi' |
|
13 | - 'if [[ $GROUP != js* ]]; then COVERAGE=""; fi' | |
13 | install: |
|
14 | install: | |
14 | - pip install "setuptools>=18.5" |
|
15 | - pip install "setuptools>=18.5" | |
|
16 | # Installs PyPy (+ its Numpy). Based on @frol comment at: | |||
|
17 | # https://github.com/travis-ci/travis-ci/issues/5027 | |||
|
18 | - | | |||
|
19 | if [ "$TRAVIS_PYTHON_VERSION" = "pypy" ]; then | |||
|
20 | export PYENV_ROOT="$HOME/.pyenv" | |||
|
21 | if [ -f "$PYENV_ROOT/bin/pyenv" ]; then | |||
|
22 | cd "$PYENV_ROOT" && git pull | |||
|
23 | else | |||
|
24 | rm -rf "$PYENV_ROOT" && git clone --depth 1 https://github.com/yyuu/pyenv.git "$PYENV_ROOT" | |||
|
25 | fi | |||
|
26 | export PYPY_VERSION="5.3.1" | |||
|
27 | "$PYENV_ROOT/bin/pyenv" install "pypy-$PYPY_VERSION" | |||
|
28 | virtualenv --python="$PYENV_ROOT/versions/pypy-$PYPY_VERSION/bin/python" "$HOME/virtualenvs/pypy-$PYPY_VERSION" | |||
|
29 | source "$HOME/virtualenvs/pypy-$PYPY_VERSION/bin/activate" | |||
|
30 | pip install https://bitbucket.org/pypy/numpy/get/master.zip | |||
|
31 | fi | |||
15 | - pip install -f travis-wheels/wheelhouse -e file://$PWD#egg=ipython[test] |
|
32 | - pip install -f travis-wheels/wheelhouse -e file://$PWD#egg=ipython[test] | |
16 | - pip install codecov |
|
33 | - pip install codecov | |
17 | script: |
|
34 | script: | |
@@ -23,4 +40,5 b' after_success:' | |||||
23 |
|
40 | |||
24 | matrix: |
|
41 | matrix: | |
25 | allow_failures: |
|
42 | allow_failures: | |
26 | python: nightly |
|
43 | - python: nightly | |
|
44 | - python: pypy |
@@ -85,7 +85,7 b' import re' | |||||
85 | import datetime |
|
85 | import datetime | |
86 | from collections import deque |
|
86 | from collections import deque | |
87 |
|
87 | |||
88 | from IPython.utils.py3compat import PY3, cast_unicode, string_types |
|
88 | from IPython.utils.py3compat import PY3, PYPY, cast_unicode, string_types | |
89 | from IPython.utils.encoding import get_stream_enc |
|
89 | from IPython.utils.encoding import get_stream_enc | |
90 |
|
90 | |||
91 | from io import StringIO |
|
91 | from io import StringIO | |
@@ -605,7 +605,8 b' def _dict_pprinter_factory(start, end, basetype=None):' | |||||
605 |
|
605 | |||
606 | if cycle: |
|
606 | if cycle: | |
607 | return p.text('{...}') |
|
607 | return p.text('{...}') | |
608 |
|
|
608 | step = len(start) | |
|
609 | p.begin_group(step, start) | |||
609 | keys = obj.keys() |
|
610 | keys = obj.keys() | |
610 | # if dict isn't large enough to be truncated, sort keys before displaying |
|
611 | # if dict isn't large enough to be truncated, sort keys before displaying | |
611 | if not (p.max_seq_length and len(obj) >= p.max_seq_length): |
|
612 | if not (p.max_seq_length and len(obj) >= p.max_seq_length): | |
@@ -621,7 +622,7 b' def _dict_pprinter_factory(start, end, basetype=None):' | |||||
621 | p.pretty(key) |
|
622 | p.pretty(key) | |
622 | p.text(': ') |
|
623 | p.text(': ') | |
623 | p.pretty(obj[key]) |
|
624 | p.pretty(obj[key]) | |
624 |
p.end_group( |
|
625 | p.end_group(step, end) | |
625 | return inner |
|
626 | return inner | |
626 |
|
627 | |||
627 |
|
628 | |||
@@ -631,7 +632,11 b' def _super_pprint(obj, p, cycle):' | |||||
631 | p.pretty(obj.__thisclass__) |
|
632 | p.pretty(obj.__thisclass__) | |
632 | p.text(',') |
|
633 | p.text(',') | |
633 | p.breakable() |
|
634 | p.breakable() | |
634 | p.pretty(obj.__self__) |
|
635 | if PYPY: # In PyPy, super() objects don't have __self__ attributes | |
|
636 | dself = obj.__repr__.__self__ | |||
|
637 | p.pretty(None if dself is obj else dself) | |||
|
638 | else: | |||
|
639 | p.pretty(obj.__self__) | |||
635 | p.end_group(8, '>') |
|
640 | p.end_group(8, '>') | |
636 |
|
641 | |||
637 |
|
642 | |||
@@ -665,8 +670,10 b' def _type_pprint(obj, p, cycle):' | |||||
665 | # Heap allocated types might not have the module attribute, |
|
670 | # Heap allocated types might not have the module attribute, | |
666 | # and others may set it to None. |
|
671 | # and others may set it to None. | |
667 |
|
672 | |||
668 | # Checks for a __repr__ override in the metaclass |
|
673 | # Checks for a __repr__ override in the metaclass. Can't compare the | |
669 | if type(obj).__repr__ is not type.__repr__: |
|
674 | # type(obj).__repr__ directly because in PyPy the representation function | |
|
675 | # inherited from type isn't the same type.__repr__ | |||
|
676 | if [m for m in _get_mro(type(obj)) if "__repr__" in vars(m)][:1] != [type]: | |||
670 | _repr_pprint(obj, p, cycle) |
|
677 | _repr_pprint(obj, p, cycle) | |
671 | return |
|
678 | return | |
672 |
|
679 | |||
@@ -753,10 +760,15 b' _type_pprinters = {' | |||||
753 | } |
|
760 | } | |
754 |
|
761 | |||
755 | try: |
|
762 | try: | |
756 | _type_pprinters[types.DictProxyType] = _dict_pprinter_factory('<dictproxy {', '}>') |
|
763 | # In PyPy, types.DictProxyType is dict, setting the dictproxy printer | |
|
764 | # using dict.setdefault avoids overwritting the dict printer | |||
|
765 | _type_pprinters.setdefault(types.DictProxyType, | |||
|
766 | _dict_pprinter_factory('dict_proxy({', '})')) | |||
757 | _type_pprinters[types.ClassType] = _type_pprint |
|
767 | _type_pprinters[types.ClassType] = _type_pprint | |
758 | _type_pprinters[types.SliceType] = _repr_pprint |
|
768 | _type_pprinters[types.SliceType] = _repr_pprint | |
759 | except AttributeError: # Python 3 |
|
769 | except AttributeError: # Python 3 | |
|
770 | _type_pprinters[types.MappingProxyType] = \ | |||
|
771 | _dict_pprinter_factory('mappingproxy({', '})') | |||
760 | _type_pprinters[slice] = _repr_pprint |
|
772 | _type_pprinters[slice] = _repr_pprint | |
761 |
|
773 | |||
762 | try: |
|
774 | try: |
@@ -7,11 +7,13 b'' | |||||
7 | from __future__ import print_function |
|
7 | from __future__ import print_function | |
8 |
|
8 | |||
9 | from collections import Counter, defaultdict, deque, OrderedDict |
|
9 | from collections import Counter, defaultdict, deque, OrderedDict | |
|
10 | import types, string, ctypes | |||
10 |
|
11 | |||
11 | import nose.tools as nt |
|
12 | import nose.tools as nt | |
12 |
|
13 | |||
13 | from IPython.lib import pretty |
|
14 | from IPython.lib import pretty | |
14 | from IPython.testing.decorators import skip_without, py2_only |
|
15 | from IPython.testing.decorators import (skip_without, py2_only, py3_only, | |
|
16 | cpython2_only) | |||
15 | from IPython.utils.py3compat import PY3, unicode_to_str |
|
17 | from IPython.utils.py3compat import PY3, unicode_to_str | |
16 |
|
18 | |||
17 | if PY3: |
|
19 | if PY3: | |
@@ -186,12 +188,14 b' class SB(SA):' | |||||
186 | pass |
|
188 | pass | |
187 |
|
189 | |||
188 | def test_super_repr(): |
|
190 | def test_super_repr(): | |
|
191 | # "<super: module_name.SA, None>" | |||
189 | output = pretty.pretty(super(SA)) |
|
192 | output = pretty.pretty(super(SA)) | |
190 | nt.assert_in("SA", output) |
|
193 | nt.assert_regexp_matches(output, r"<super: \S+.SA, None>") | |
191 |
|
194 | |||
|
195 | # "<super: module_name.SA, <module_name.SB at 0x...>>" | |||
192 | sb = SB() |
|
196 | sb = SB() | |
193 | output = pretty.pretty(super(SA, sb)) |
|
197 | output = pretty.pretty(super(SA, sb)) | |
194 | nt.assert_in("SA", output) |
|
198 | nt.assert_regexp_matches(output, r"<super: \S+.SA,\s+<\S+.SB at 0x\S+>>") | |
195 |
|
199 | |||
196 |
|
200 | |||
197 | def test_long_list(): |
|
201 | def test_long_list(): | |
@@ -436,3 +440,97 b' def test_collections_counter():' | |||||
436 | ] |
|
440 | ] | |
437 | for obj, expected in cases: |
|
441 | for obj, expected in cases: | |
438 | nt.assert_equal(pretty.pretty(obj), expected) |
|
442 | nt.assert_equal(pretty.pretty(obj), expected) | |
|
443 | ||||
|
444 | @py3_only | |||
|
445 | def test_mappingproxy(): | |||
|
446 | MP = types.MappingProxyType | |||
|
447 | underlying_dict = {} | |||
|
448 | mp_recursive = MP(underlying_dict) | |||
|
449 | underlying_dict[2] = mp_recursive | |||
|
450 | underlying_dict[3] = underlying_dict | |||
|
451 | ||||
|
452 | cases = [ | |||
|
453 | (MP({}), "mappingproxy({})"), | |||
|
454 | (MP({None: MP({})}), "mappingproxy({None: mappingproxy({})})"), | |||
|
455 | (MP({k: k.upper() for k in string.ascii_lowercase}), | |||
|
456 | "mappingproxy({'a': 'A',\n" | |||
|
457 | " 'b': 'B',\n" | |||
|
458 | " 'c': 'C',\n" | |||
|
459 | " 'd': 'D',\n" | |||
|
460 | " 'e': 'E',\n" | |||
|
461 | " 'f': 'F',\n" | |||
|
462 | " 'g': 'G',\n" | |||
|
463 | " 'h': 'H',\n" | |||
|
464 | " 'i': 'I',\n" | |||
|
465 | " 'j': 'J',\n" | |||
|
466 | " 'k': 'K',\n" | |||
|
467 | " 'l': 'L',\n" | |||
|
468 | " 'm': 'M',\n" | |||
|
469 | " 'n': 'N',\n" | |||
|
470 | " 'o': 'O',\n" | |||
|
471 | " 'p': 'P',\n" | |||
|
472 | " 'q': 'Q',\n" | |||
|
473 | " 'r': 'R',\n" | |||
|
474 | " 's': 'S',\n" | |||
|
475 | " 't': 'T',\n" | |||
|
476 | " 'u': 'U',\n" | |||
|
477 | " 'v': 'V',\n" | |||
|
478 | " 'w': 'W',\n" | |||
|
479 | " 'x': 'X',\n" | |||
|
480 | " 'y': 'Y',\n" | |||
|
481 | " 'z': 'Z'})"), | |||
|
482 | (mp_recursive, "mappingproxy({2: {...}, 3: {2: {...}, 3: {...}}})"), | |||
|
483 | (underlying_dict, | |||
|
484 | "{2: mappingproxy({2: {...}, 3: {...}}), 3: {...}}"), | |||
|
485 | ] | |||
|
486 | for obj, expected in cases: | |||
|
487 | nt.assert_equal(pretty.pretty(obj), expected) | |||
|
488 | ||||
|
489 | @cpython2_only # In PyPy, types.DictProxyType is dict | |||
|
490 | def test_dictproxy(): | |||
|
491 | # This is the dictproxy constructor itself from the Python API, | |||
|
492 | DP = ctypes.pythonapi.PyDictProxy_New | |||
|
493 | DP.argtypes, DP.restype = (ctypes.py_object,), ctypes.py_object | |||
|
494 | ||||
|
495 | underlying_dict = {} | |||
|
496 | mp_recursive = DP(underlying_dict) | |||
|
497 | underlying_dict[0] = mp_recursive | |||
|
498 | underlying_dict[-3] = underlying_dict | |||
|
499 | ||||
|
500 | cases = [ | |||
|
501 | (DP({}), "dict_proxy({})"), | |||
|
502 | (DP({None: DP({})}), "dict_proxy({None: dict_proxy({})})"), | |||
|
503 | (DP({k: k.lower() for k in string.ascii_uppercase}), | |||
|
504 | "dict_proxy({'A': 'a',\n" | |||
|
505 | " 'B': 'b',\n" | |||
|
506 | " 'C': 'c',\n" | |||
|
507 | " 'D': 'd',\n" | |||
|
508 | " 'E': 'e',\n" | |||
|
509 | " 'F': 'f',\n" | |||
|
510 | " 'G': 'g',\n" | |||
|
511 | " 'H': 'h',\n" | |||
|
512 | " 'I': 'i',\n" | |||
|
513 | " 'J': 'j',\n" | |||
|
514 | " 'K': 'k',\n" | |||
|
515 | " 'L': 'l',\n" | |||
|
516 | " 'M': 'm',\n" | |||
|
517 | " 'N': 'n',\n" | |||
|
518 | " 'O': 'o',\n" | |||
|
519 | " 'P': 'p',\n" | |||
|
520 | " 'Q': 'q',\n" | |||
|
521 | " 'R': 'r',\n" | |||
|
522 | " 'S': 's',\n" | |||
|
523 | " 'T': 't',\n" | |||
|
524 | " 'U': 'u',\n" | |||
|
525 | " 'V': 'v',\n" | |||
|
526 | " 'W': 'w',\n" | |||
|
527 | " 'X': 'x',\n" | |||
|
528 | " 'Y': 'y',\n" | |||
|
529 | " 'Z': 'z'})"), | |||
|
530 | (mp_recursive, "dict_proxy({-3: {-3: {...}, 0: {...}}, 0: {...}})"), | |||
|
531 | ] | |||
|
532 | for obj, expected in cases: | |||
|
533 | nt.assert_is_instance(obj, types.DictProxyType) # Meta-test | |||
|
534 | nt.assert_equal(pretty.pretty(obj), expected) | |||
|
535 | nt.assert_equal(pretty.pretty(underlying_dict), | |||
|
536 | "{-3: {...}, 0: dict_proxy({-3: {...}, 0: {...}})}") |
@@ -48,7 +48,7 b' from .ipunittest import ipdoctest, ipdocstring' | |||||
48 | from IPython.external.decorators import * |
|
48 | from IPython.external.decorators import * | |
49 |
|
49 | |||
50 | # For onlyif_cmd_exists decorator |
|
50 | # For onlyif_cmd_exists decorator | |
51 | from IPython.utils.py3compat import string_types, which, PY2, PY3 |
|
51 | from IPython.utils.py3compat import string_types, which, PY2, PY3, PYPY | |
52 |
|
52 | |||
53 | #----------------------------------------------------------------------------- |
|
53 | #----------------------------------------------------------------------------- | |
54 | # Classes and functions |
|
54 | # Classes and functions | |
@@ -336,6 +336,7 b" skip_known_failure = knownfailureif(True,'This test is known to fail')" | |||||
336 | known_failure_py3 = knownfailureif(sys.version_info[0] >= 3, |
|
336 | known_failure_py3 = knownfailureif(sys.version_info[0] >= 3, | |
337 | 'This test is known to fail on Python 3.') |
|
337 | 'This test is known to fail on Python 3.') | |
338 |
|
338 | |||
|
339 | cpython2_only = skipif(PY3 or PYPY, "This test only runs on CPython 2.") | |||
339 | py2_only = skipif(PY3, "This test only runs on Python 2.") |
|
340 | py2_only = skipif(PY3, "This test only runs on Python 2.") | |
340 | py3_only = skipif(PY2, "This test only runs on Python 3.") |
|
341 | py3_only = skipif(PY2, "This test only runs on Python 3.") | |
341 |
|
342 |
@@ -6,6 +6,7 b' import sys' | |||||
6 | import re |
|
6 | import re | |
7 | import shutil |
|
7 | import shutil | |
8 | import types |
|
8 | import types | |
|
9 | import platform | |||
9 |
|
10 | |||
10 | from .encoding import DEFAULT_ENCODING |
|
11 | from .encoding import DEFAULT_ENCODING | |
11 |
|
12 | |||
@@ -292,6 +293,7 b' else:' | |||||
292 |
|
293 | |||
293 |
|
294 | |||
294 | PY2 = not PY3 |
|
295 | PY2 = not PY3 | |
|
296 | PYPY = platform.python_implementation() == "PyPy" | |||
295 |
|
297 | |||
296 |
|
298 | |||
297 | def annotate(**kwargs): |
|
299 | def annotate(**kwargs): |
@@ -1,44 +1,28 b'' | |||||
1 |
|
|
1 | ; Tox (http://tox.testrun.org/) is a virtualenv manager for running tests in | |
2 |
|
|
2 | ; multiple environments. This configuration file gets the requirements from | |
3 | # test suite on all supported python versions. To use it, "pip install tox" |
|
3 | ; setup.py like a "pip install ipython[test]". To create the environments, it | |
4 | # and then run "tox" from this directory. |
|
4 | ; requires every interpreter available/installed. | |
5 |
|
5 | ; -- Commands -- | ||
6 | # Building the source distribution requires `invoke` and `lessc` to be on your PATH. |
|
6 | ; pip install tox # Installs tox | |
7 | # "pip install invoke" will install invoke. Less can be installed by |
|
7 | ; tox # Runs the tests (call from the directory with tox.ini) | |
8 | # node.js' (http://nodejs.org/) package manager npm: |
|
8 | ; tox -r # Ditto, but forcing the virtual environments to be rebuilt | |
9 | # "npm install -g less". |
|
9 | ; tox -e py35,pypy # Runs only in the selected environments | |
10 |
|
10 | ; tox -- --all -j # Runs "iptest --all -j" in every environment | ||
11 | # Javascript tests need additional dependencies that can be installed |
|
|||
12 | # using node.js' package manager npm: |
|
|||
13 | # [*] casperjs: "npm install -g casperjs" |
|
|||
14 | # [*] slimerjs: "npm install -g slimerjs" |
|
|||
15 | # [*] phantomjs: "npm install -g phantomjs" |
|
|||
16 |
|
||||
17 | # Note: qt4 versions break some tests with tornado versions >=4.0. |
|
|||
18 |
|
11 | |||
19 | [tox] |
|
12 | [tox] | |
20 | envlist = py27, py33, py34 |
|
13 | envlist = py{36,35,34,33,27,py} | |
|
14 | skip_missing_interpreters = True | |||
|
15 | toxworkdir = /tmp/tox_ipython | |||
21 |
|
16 | |||
22 | [testenv] |
|
17 | [testenv] | |
|
18 | ; PyPy requires its Numpy fork instead of "pip install numpy" | |||
|
19 | ; Other IPython/testing dependencies should be in setup.py, not here | |||
23 | deps = |
|
20 | deps = | |
24 | pyzmq |
|
21 | pypy: https://bitbucket.org/pypy/numpy/get/master.zip | |
25 | nose |
|
22 | py{36,35,34,33,27}: matplotlib | |
26 | tornado<4.0 |
|
23 | .[test] | |
27 | jinja2 |
|
|||
28 | sphinx |
|
|||
29 | pygments |
|
|||
30 | jsonpointer |
|
|||
31 | jsonschema |
|
|||
32 | mistune |
|
|||
33 |
|
24 | |||
34 |
|
|
25 | ; It's just to avoid loading the IPython package in the current directory | |
35 | # current directory to ".tox/py*/tmp" before running test. |
|
|||
36 | changedir = {envtmpdir} |
|
26 | changedir = {envtmpdir} | |
37 |
|
27 | |||
38 | commands = |
|
28 | commands = iptest {posargs} | |
39 | iptest --all |
|
|||
40 |
|
||||
41 | [testenv:py27] |
|
|||
42 | deps= |
|
|||
43 | mock |
|
|||
44 | {[testenv]deps} |
|
General Comments 0
You need to be logged in to leave comments.
Login now