diff --git a/IPython/utils/openpy.py b/IPython/utils/openpy.py index c90d2b5..d57f877 100644 --- a/IPython/utils/openpy.py +++ b/IPython/utils/openpy.py @@ -7,6 +7,7 @@ Much of the code is taken from the tokenize module in Python 3.2. import io from io import TextIOWrapper, BytesIO +from pathlib import Path import re from tokenize import open, detect_encoding @@ -72,11 +73,12 @@ def read_py_file(filename, skip_encoding_cookie=True): ------- A unicode string containing the contents of the file. """ - with open(filename) as f: # the open function defined in this module. + filepath = Path(filename) + with filepath.open() as f: # the open function defined in this module. if skip_encoding_cookie: return "".join(strip_encoding_cookie(f)) else: - return f.read() + return filepath.read_text() def read_py_url(url, errors='replace', skip_encoding_cookie=True): """Read a Python file from a URL, using the encoding declared inside the file.