##// END OF EJS Templates
Merge pull request #8240 from minrk/split-io...
Thomas Kluyver -
r21119:a229520a merge
parent child Browse files
Show More
@@ -0,0 +1,131 b''
1 # encoding: utf-8
2 """Tests for file IO"""
3
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
6
7 import io as stdlib_io
8 import os.path
9 import stat
10
11 import nose.tools as nt
12
13 from IPython.testing.decorators import skip_win32
14 from ..fileio import atomic_writing
15
16 from IPython.utils.tempdir import TemporaryDirectory
17
18 umask = 0
19
20 def test_atomic_writing():
21 class CustomExc(Exception): pass
22
23 with TemporaryDirectory() as td:
24 f1 = os.path.join(td, 'penguin')
25 with stdlib_io.open(f1, 'w') as f:
26 f.write(u'Before')
27
28 if os.name != 'nt':
29 os.chmod(f1, 0o701)
30 orig_mode = stat.S_IMODE(os.stat(f1).st_mode)
31
32 f2 = os.path.join(td, 'flamingo')
33 try:
34 os.symlink(f1, f2)
35 have_symlink = True
36 except (AttributeError, NotImplementedError, OSError):
37 # AttributeError: Python doesn't support it
38 # NotImplementedError: The system doesn't support it
39 # OSError: The user lacks the privilege (Windows)
40 have_symlink = False
41
42 with nt.assert_raises(CustomExc):
43 with atomic_writing(f1) as f:
44 f.write(u'Failing write')
45 raise CustomExc
46
47 # Because of the exception, the file should not have been modified
48 with stdlib_io.open(f1, 'r') as f:
49 nt.assert_equal(f.read(), u'Before')
50
51 with atomic_writing(f1) as f:
52 f.write(u'Overwritten')
53
54 with stdlib_io.open(f1, 'r') as f:
55 nt.assert_equal(f.read(), u'Overwritten')
56
57 if os.name != 'nt':
58 mode = stat.S_IMODE(os.stat(f1).st_mode)
59 nt.assert_equal(mode, orig_mode)
60
61 if have_symlink:
62 # Check that writing over a file preserves a symlink
63 with atomic_writing(f2) as f:
64 f.write(u'written from symlink')
65
66 with stdlib_io.open(f1, 'r') as f:
67 nt.assert_equal(f.read(), u'written from symlink')
68
69 def _save_umask():
70 global umask
71 umask = os.umask(0)
72 os.umask(umask)
73
74 def _restore_umask():
75 os.umask(umask)
76
77 @skip_win32
78 @nt.with_setup(_save_umask, _restore_umask)
79 def test_atomic_writing_umask():
80 with TemporaryDirectory() as td:
81 os.umask(0o022)
82 f1 = os.path.join(td, '1')
83 with atomic_writing(f1) as f:
84 f.write(u'1')
85 mode = stat.S_IMODE(os.stat(f1).st_mode)
86 nt.assert_equal(mode, 0o644, '{:o} != 644'.format(mode))
87
88 os.umask(0o057)
89 f2 = os.path.join(td, '2')
90 with atomic_writing(f2) as f:
91 f.write(u'2')
92 mode = stat.S_IMODE(os.stat(f2).st_mode)
93 nt.assert_equal(mode, 0o620, '{:o} != 620'.format(mode))
94
95
96 def test_atomic_writing_newlines():
97 with TemporaryDirectory() as td:
98 path = os.path.join(td, 'testfile')
99
100 lf = u'a\nb\nc\n'
101 plat = lf.replace(u'\n', os.linesep)
102 crlf = lf.replace(u'\n', u'\r\n')
103
104 # test default
105 with stdlib_io.open(path, 'w') as f:
106 f.write(lf)
107 with stdlib_io.open(path, 'r', newline='') as f:
108 read = f.read()
109 nt.assert_equal(read, plat)
110
111 # test newline=LF
112 with stdlib_io.open(path, 'w', newline='\n') as f:
113 f.write(lf)
114 with stdlib_io.open(path, 'r', newline='') as f:
115 read = f.read()
116 nt.assert_equal(read, lf)
117
118 # test newline=CRLF
119 with atomic_writing(path, newline='\r\n') as f:
120 f.write(lf)
121 with stdlib_io.open(path, 'r', newline='') as f:
122 read = f.read()
123 nt.assert_equal(read, crlf)
124
125 # test newline=no convert
126 text = u'crlf\r\ncr\rlf\n'
127 with atomic_writing(path, newline='') as f:
128 f.write(text)
129 with stdlib_io.open(path, 'r', newline='') as f:
130 read = f.read()
131 nt.assert_equal(read, text)
@@ -0,0 +1,33 b''
1 # coding: utf-8
2 """io-related utilities"""
3
4 # Copyright (c) Jupyter Development Team.
5 # Distributed under the terms of the Modified BSD License.
6
7 import codecs
8 import sys
9 from IPython.utils.py3compat import PY3
10
11
12 def unicode_std_stream(stream='stdout'):
13 u"""Get a wrapper to write unicode to stdout/stderr as UTF-8.
14
15 This ignores environment variables and default encodings, to reliably write
16 unicode to stdout or stderr.
17
18 ::
19
20 unicode_std_stream().write(u'ł@e¶ŧ←')
21 """
22 assert stream in ('stdout', 'stderr')
23 stream = getattr(sys, stream)
24 if PY3:
25 try:
26 stream_b = stream.buffer
27 except AttributeError:
28 # sys.stdout has been replaced - use it directly
29 return stream
30 else:
31 stream_b = stream
32
33 return codecs.getwriter('utf-8')(stream_b)
@@ -0,0 +1,50 b''
1 # encoding: utf-8
2 """Tests for utils.io"""
3
4 # Copyright (c) Jupyter Development Team.
5 # Distributed under the terms of the Modified BSD License.
6
7 import io as stdlib_io
8 import sys
9
10 import nose.tools as nt
11
12 from IPython.testing.decorators import skipif
13 from ..io import unicode_std_stream
14 from IPython.utils.py3compat import PY3
15
16 if PY3:
17 from io import StringIO
18 else:
19 from StringIO import StringIO
20
21 def test_UnicodeStdStream():
22 # Test wrapping a bytes-level stdout
23 if PY3:
24 stdoutb = stdlib_io.BytesIO()
25 stdout = stdlib_io.TextIOWrapper(stdoutb, encoding='ascii')
26 else:
27 stdout = stdoutb = stdlib_io.BytesIO()
28
29 orig_stdout = sys.stdout
30 sys.stdout = stdout
31 try:
32 sample = u"@łe¶ŧ←"
33 unicode_std_stream().write(sample)
34
35 output = stdoutb.getvalue().decode('utf-8')
36 nt.assert_equal(output, sample)
37 assert not stdout.closed
38 finally:
39 sys.stdout = orig_stdout
40
41 @skipif(not PY3, "Not applicable on Python 2")
42 def test_UnicodeStdStream_nowrap():
43 # If we replace stdout with a StringIO, it shouldn't get wrapped.
44 orig_stdout = sys.stdout
45 sys.stdout = StringIO()
46 try:
47 nt.assert_is(unicode_std_stream(), sys.stdout)
48 assert not sys.stdout.closed
49 finally:
50 sys.stdout = orig_stdout
@@ -1,174 +1,256 b''
1 1 """
2 2 Utilities for file-based Contents/Checkpoints managers.
3 3 """
4 4
5 5 # Copyright (c) IPython Development Team.
6 6 # Distributed under the terms of the Modified BSD License.
7 7
8 8 import base64
9 9 from contextlib import contextmanager
10 10 import errno
11 11 import io
12 12 import os
13 13 import shutil
14 import tempfile
14 15
15 16 from tornado.web import HTTPError
16 17
17 18 from IPython.html.utils import (
18 19 to_api_path,
19 20 to_os_path,
20 21 )
21 22 from IPython import nbformat
22 from IPython.utils.io import atomic_writing
23 23 from IPython.utils.py3compat import str_to_unicode
24 24
25 25
26 def _copy_metadata(src, dst):
27 """Copy the set of metadata we want for atomic_writing.
28
29 Permission bits and flags. We'd like to copy file ownership as well, but we
30 can't do that.
31 """
32 shutil.copymode(src, dst)
33 st = os.stat(src)
34 if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
35 os.chflags(dst, st.st_flags)
36
37 @contextmanager
38 def atomic_writing(path, text=True, encoding='utf-8', **kwargs):
39 """Context manager to write to a file only if the entire write is successful.
40
41 This works by creating a temporary file in the same directory, and renaming
42 it over the old file if the context is exited without an error. If other
43 file names are hard linked to the target file, this relationship will not be
44 preserved.
45
46 On Windows, there is a small chink in the atomicity: the target file is
47 deleted before renaming the temporary file over it. This appears to be
48 unavoidable.
49
50 Parameters
51 ----------
52 path : str
53 The target file to write to.
54
55 text : bool, optional
56 Whether to open the file in text mode (i.e. to write unicode). Default is
57 True.
58
59 encoding : str, optional
60 The encoding to use for files opened in text mode. Default is UTF-8.
61
62 **kwargs
63 Passed to :func:`io.open`.
64 """
65 # realpath doesn't work on Windows: http://bugs.python.org/issue9949
66 # Luckily, we only need to resolve the file itself being a symlink, not
67 # any of its directories, so this will suffice:
68 if os.path.islink(path):
69 path = os.path.join(os.path.dirname(path), os.readlink(path))
70
71 dirname, basename = os.path.split(path)
72 tmp_dir = tempfile.mkdtemp(prefix=basename, dir=dirname)
73 tmp_path = os.path.join(tmp_dir, basename)
74 if text:
75 fileobj = io.open(tmp_path, 'w', encoding=encoding, **kwargs)
76 else:
77 fileobj = io.open(tmp_path, 'wb', **kwargs)
78
79 try:
80 yield fileobj
81 except:
82 fileobj.close()
83 shutil.rmtree(tmp_dir)
84 raise
85
86 # Flush to disk
87 fileobj.flush()
88 os.fsync(fileobj.fileno())
89
90 # Written successfully, now rename it
91 fileobj.close()
92
93 # Copy permission bits, access time, etc.
94 try:
95 _copy_metadata(path, tmp_path)
96 except OSError:
97 # e.g. the file didn't already exist. Ignore any failure to copy metadata
98 pass
99
100 if os.name == 'nt' and os.path.exists(path):
101 # Rename over existing file doesn't work on Windows
102 os.remove(path)
103
104 os.rename(tmp_path, path)
105 shutil.rmtree(tmp_dir)
106
107
26 108 class FileManagerMixin(object):
27 109 """
28 110 Mixin for ContentsAPI classes that interact with the filesystem.
29 111
30 112 Provides facilities for reading, writing, and copying both notebooks and
31 113 generic files.
32 114
33 115 Shared by FileContentsManager and FileCheckpoints.
34 116
35 117 Note
36 118 ----
37 119 Classes using this mixin must provide the following attributes:
38 120
39 121 root_dir : unicode
40 122 A directory against against which API-style paths are to be resolved.
41 123
42 124 log : logging.Logger
43 125 """
44 126
45 127 @contextmanager
46 128 def open(self, os_path, *args, **kwargs):
47 129 """wrapper around io.open that turns permission errors into 403"""
48 130 with self.perm_to_403(os_path):
49 131 with io.open(os_path, *args, **kwargs) as f:
50 132 yield f
51 133
52 134 @contextmanager
53 135 def atomic_writing(self, os_path, *args, **kwargs):
54 136 """wrapper around atomic_writing that turns permission errors to 403"""
55 137 with self.perm_to_403(os_path):
56 138 with atomic_writing(os_path, *args, **kwargs) as f:
57 139 yield f
58 140
59 141 @contextmanager
60 142 def perm_to_403(self, os_path=''):
61 143 """context manager for turning permission errors into 403."""
62 144 try:
63 145 yield
64 146 except (OSError, IOError) as e:
65 147 if e.errno in {errno.EPERM, errno.EACCES}:
66 148 # make 403 error message without root prefix
67 149 # this may not work perfectly on unicode paths on Python 2,
68 150 # but nobody should be doing that anyway.
69 151 if not os_path:
70 152 os_path = str_to_unicode(e.filename or 'unknown file')
71 153 path = to_api_path(os_path, root=self.root_dir)
72 154 raise HTTPError(403, u'Permission denied: %s' % path)
73 155 else:
74 156 raise
75 157
76 158 def _copy(self, src, dest):
77 159 """copy src to dest
78 160
79 161 like shutil.copy2, but log errors in copystat
80 162 """
81 163 shutil.copyfile(src, dest)
82 164 try:
83 165 shutil.copystat(src, dest)
84 166 except OSError:
85 167 self.log.debug("copystat on %s failed", dest, exc_info=True)
86 168
87 169 def _get_os_path(self, path):
88 170 """Given an API path, return its file system path.
89 171
90 172 Parameters
91 173 ----------
92 174 path : string
93 175 The relative API path to the named file.
94 176
95 177 Returns
96 178 -------
97 179 path : string
98 180 Native, absolute OS path to for a file.
99 181
100 182 Raises
101 183 ------
102 184 404: if path is outside root
103 185 """
104 186 root = os.path.abspath(self.root_dir)
105 187 os_path = to_os_path(path, root)
106 188 if not (os.path.abspath(os_path) + os.path.sep).startswith(root):
107 189 raise HTTPError(404, "%s is outside root contents directory" % path)
108 190 return os_path
109 191
110 192 def _read_notebook(self, os_path, as_version=4):
111 193 """Read a notebook from an os path."""
112 194 with self.open(os_path, 'r', encoding='utf-8') as f:
113 195 try:
114 196 return nbformat.read(f, as_version=as_version)
115 197 except Exception as e:
116 198 raise HTTPError(
117 199 400,
118 200 u"Unreadable Notebook: %s %r" % (os_path, e),
119 201 )
120 202
121 203 def _save_notebook(self, os_path, nb):
122 204 """Save a notebook to an os_path."""
123 205 with self.atomic_writing(os_path, encoding='utf-8') as f:
124 206 nbformat.write(nb, f, version=nbformat.NO_CONVERT)
125 207
126 208 def _read_file(self, os_path, format):
127 209 """Read a non-notebook file.
128 210
129 211 os_path: The path to be read.
130 212 format:
131 213 If 'text', the contents will be decoded as UTF-8.
132 214 If 'base64', the raw bytes contents will be encoded as base64.
133 215 If not specified, try to decode as UTF-8, and fall back to base64
134 216 """
135 217 if not os.path.isfile(os_path):
136 218 raise HTTPError(400, "Cannot read non-file %s" % os_path)
137 219
138 220 with self.open(os_path, 'rb') as f:
139 221 bcontent = f.read()
140 222
141 223 if format is None or format == 'text':
142 224 # Try to interpret as unicode if format is unknown or if unicode
143 225 # was explicitly requested.
144 226 try:
145 227 return bcontent.decode('utf8'), 'text'
146 228 except UnicodeError:
147 229 if format == 'text':
148 230 raise HTTPError(
149 231 400,
150 232 "%s is not UTF-8 encoded" % os_path,
151 233 reason='bad format',
152 234 )
153 235 return base64.encodestring(bcontent).decode('ascii'), 'base64'
154 236
155 237 def _save_file(self, os_path, content, format):
156 238 """Save content of a generic file."""
157 239 if format not in {'text', 'base64'}:
158 240 raise HTTPError(
159 241 400,
160 242 "Must specify format of file contents as 'text' or 'base64'",
161 243 )
162 244 try:
163 245 if format == 'text':
164 246 bcontent = content.encode('utf8')
165 247 else:
166 248 b64_bytes = content.encode('ascii')
167 249 bcontent = base64.decodestring(b64_bytes)
168 250 except Exception as e:
169 251 raise HTTPError(
170 252 400, u'Encoding error saving %s: %s' % (os_path, e)
171 253 )
172 254
173 255 with self.atomic_writing(os_path, text=False) as f:
174 256 f.write(bcontent)
@@ -1,347 +1,246 b''
1 1 # encoding: utf-8
2 2 """
3 3 IO related utilities.
4 4 """
5 5
6 #-----------------------------------------------------------------------------
7 # Copyright (C) 2008-2011 The IPython Development Team
8 #
9 # Distributed under the terms of the BSD License. The full license is in
10 # the file COPYING, distributed as part of this software.
11 #-----------------------------------------------------------------------------
6 # Copyright (c) IPython Development Team.
7 # Distributed under the terms of the Modified BSD License.
8
12 9 from __future__ import print_function
13 10 from __future__ import absolute_import
14 11
15 #-----------------------------------------------------------------------------
16 # Imports
17 #-----------------------------------------------------------------------------
12
18 13 import codecs
19 14 from contextlib import contextmanager
20 15 import io
21 16 import os
22 17 import shutil
23 import stat
24 18 import sys
25 19 import tempfile
20 import warnings
26 21 from .capture import CapturedIO, capture_output
27 22 from .py3compat import string_types, input, PY3
28 23
29 #-----------------------------------------------------------------------------
30 # Code
31 #-----------------------------------------------------------------------------
32
33 24
34 25 class IOStream:
35 26
36 27 def __init__(self,stream, fallback=None):
37 28 if not hasattr(stream,'write') or not hasattr(stream,'flush'):
38 29 if fallback is not None:
39 30 stream = fallback
40 31 else:
41 32 raise ValueError("fallback required, but not specified")
42 33 self.stream = stream
43 34 self._swrite = stream.write
44 35
45 36 # clone all methods not overridden:
46 37 def clone(meth):
47 38 return not hasattr(self, meth) and not meth.startswith('_')
48 39 for meth in filter(clone, dir(stream)):
49 40 setattr(self, meth, getattr(stream, meth))
50 41
51 42 def __repr__(self):
52 43 cls = self.__class__
53 44 tpl = '{mod}.{cls}({args})'
54 45 return tpl.format(mod=cls.__module__, cls=cls.__name__, args=self.stream)
55 46
56 47 def write(self,data):
57 48 try:
58 49 self._swrite(data)
59 50 except:
60 51 try:
61 52 # print handles some unicode issues which may trip a plain
62 53 # write() call. Emulate write() by using an empty end
63 54 # argument.
64 55 print(data, end='', file=self.stream)
65 56 except:
66 57 # if we get here, something is seriously broken.
67 58 print('ERROR - failed to write data to stream:', self.stream,
68 59 file=sys.stderr)
69 60
70 61 def writelines(self, lines):
71 62 if isinstance(lines, string_types):
72 63 lines = [lines]
73 64 for line in lines:
74 65 self.write(line)
75 66
76 67 # This class used to have a writeln method, but regular files and streams
77 68 # in Python don't have this method. We need to keep this completely
78 69 # compatible so we removed it.
79 70
80 71 @property
81 72 def closed(self):
82 73 return self.stream.closed
83 74
84 75 def close(self):
85 76 pass
86 77
87 78 # setup stdin/stdout/stderr to sys.stdin/sys.stdout/sys.stderr
88 79 devnull = open(os.devnull, 'w')
89 80 stdin = IOStream(sys.stdin, fallback=devnull)
90 81 stdout = IOStream(sys.stdout, fallback=devnull)
91 82 stderr = IOStream(sys.stderr, fallback=devnull)
92 83
93 84 class IOTerm:
94 85 """ Term holds the file or file-like objects for handling I/O operations.
95 86
96 87 These are normally just sys.stdin, sys.stdout and sys.stderr but for
97 88 Windows they can can replaced to allow editing the strings before they are
98 89 displayed."""
99 90
100 91 # In the future, having IPython channel all its I/O operations through
101 92 # this class will make it easier to embed it into other environments which
102 93 # are not a normal terminal (such as a GUI-based shell)
103 94 def __init__(self, stdin=None, stdout=None, stderr=None):
104 95 mymodule = sys.modules[__name__]
105 96 self.stdin = IOStream(stdin, mymodule.stdin)
106 97 self.stdout = IOStream(stdout, mymodule.stdout)
107 98 self.stderr = IOStream(stderr, mymodule.stderr)
108 99
109 100
110 101 class Tee(object):
111 102 """A class to duplicate an output stream to stdout/err.
112 103
113 104 This works in a manner very similar to the Unix 'tee' command.
114 105
115 106 When the object is closed or deleted, it closes the original file given to
116 107 it for duplication.
117 108 """
118 109 # Inspired by:
119 110 # http://mail.python.org/pipermail/python-list/2007-May/442737.html
120 111
121 112 def __init__(self, file_or_name, mode="w", channel='stdout'):
122 113 """Construct a new Tee object.
123 114
124 115 Parameters
125 116 ----------
126 117 file_or_name : filename or open filehandle (writable)
127 118 File that will be duplicated
128 119
129 120 mode : optional, valid mode for open().
130 121 If a filename was give, open with this mode.
131 122
132 123 channel : str, one of ['stdout', 'stderr']
133 124 """
134 125 if channel not in ['stdout', 'stderr']:
135 126 raise ValueError('Invalid channel spec %s' % channel)
136 127
137 128 if hasattr(file_or_name, 'write') and hasattr(file_or_name, 'seek'):
138 129 self.file = file_or_name
139 130 else:
140 131 self.file = open(file_or_name, mode)
141 132 self.channel = channel
142 133 self.ostream = getattr(sys, channel)
143 134 setattr(sys, channel, self)
144 135 self._closed = False
145 136
146 137 def close(self):
147 138 """Close the file and restore the channel."""
148 139 self.flush()
149 140 setattr(sys, self.channel, self.ostream)
150 141 self.file.close()
151 142 self._closed = True
152 143
153 144 def write(self, data):
154 145 """Write data to both channels."""
155 146 self.file.write(data)
156 147 self.ostream.write(data)
157 148 self.ostream.flush()
158 149
159 150 def flush(self):
160 151 """Flush both channels."""
161 152 self.file.flush()
162 153 self.ostream.flush()
163 154
164 155 def __del__(self):
165 156 if not self._closed:
166 157 self.close()
167 158
168 159
169 160 def ask_yes_no(prompt, default=None, interrupt=None):
170 161 """Asks a question and returns a boolean (y/n) answer.
171 162
172 163 If default is given (one of 'y','n'), it is used if the user input is
173 164 empty. If interrupt is given (one of 'y','n'), it is used if the user
174 165 presses Ctrl-C. Otherwise the question is repeated until an answer is
175 166 given.
176 167
177 168 An EOF is treated as the default answer. If there is no default, an
178 169 exception is raised to prevent infinite loops.
179 170
180 171 Valid answers are: y/yes/n/no (match is not case sensitive)."""
181 172
182 173 answers = {'y':True,'n':False,'yes':True,'no':False}
183 174 ans = None
184 175 while ans not in answers.keys():
185 176 try:
186 177 ans = input(prompt+' ').lower()
187 178 if not ans: # response was an empty string
188 179 ans = default
189 180 except KeyboardInterrupt:
190 181 if interrupt:
191 182 ans = interrupt
192 183 except EOFError:
193 184 if default in answers.keys():
194 185 ans = default
195 186 print()
196 187 else:
197 188 raise
198 189
199 190 return answers[ans]
200 191
201 192
202 193 def temp_pyfile(src, ext='.py'):
203 194 """Make a temporary python file, return filename and filehandle.
204 195
205 196 Parameters
206 197 ----------
207 198 src : string or list of strings (no need for ending newlines if list)
208 199 Source code to be written to the file.
209 200
210 201 ext : optional, string
211 202 Extension for the generated file.
212 203
213 204 Returns
214 205 -------
215 206 (filename, open filehandle)
216 207 It is the caller's responsibility to close the open file and unlink it.
217 208 """
218 209 fname = tempfile.mkstemp(ext)[1]
219 210 f = open(fname,'w')
220 211 f.write(src)
221 212 f.flush()
222 213 return fname, f
223 214
224 def _copy_metadata(src, dst):
225 """Copy the set of metadata we want for atomic_writing.
226
227 Permission bits and flags. We'd like to copy file ownership as well, but we
228 can't do that.
229 """
230 shutil.copymode(src, dst)
231 st = os.stat(src)
232 if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
233 os.chflags(dst, st.st_flags)
234
235 @contextmanager
236 def atomic_writing(path, text=True, encoding='utf-8', **kwargs):
237 """Context manager to write to a file only if the entire write is successful.
238
239 This works by creating a temporary file in the same directory, and renaming
240 it over the old file if the context is exited without an error. If other
241 file names are hard linked to the target file, this relationship will not be
242 preserved.
243
244 On Windows, there is a small chink in the atomicity: the target file is
245 deleted before renaming the temporary file over it. This appears to be
246 unavoidable.
247
248 Parameters
249 ----------
250 path : str
251 The target file to write to.
252
253 text : bool, optional
254 Whether to open the file in text mode (i.e. to write unicode). Default is
255 True.
256
257 encoding : str, optional
258 The encoding to use for files opened in text mode. Default is UTF-8.
259
260 **kwargs
261 Passed to :func:`io.open`.
262 """
263 # realpath doesn't work on Windows: http://bugs.python.org/issue9949
264 # Luckily, we only need to resolve the file itself being a symlink, not
265 # any of its directories, so this will suffice:
266 if os.path.islink(path):
267 path = os.path.join(os.path.dirname(path), os.readlink(path))
268
269 dirname, basename = os.path.split(path)
270 tmp_dir = tempfile.mkdtemp(prefix=basename, dir=dirname)
271 tmp_path = os.path.join(tmp_dir, basename)
272 if text:
273 fileobj = io.open(tmp_path, 'w', encoding=encoding, **kwargs)
274 else:
275 fileobj = io.open(tmp_path, 'wb', **kwargs)
276
277 try:
278 yield fileobj
279 except:
280 fileobj.close()
281 shutil.rmtree(tmp_dir)
282 raise
283
284 # Flush to disk
285 fileobj.flush()
286 os.fsync(fileobj.fileno())
287
288 # Written successfully, now rename it
289 fileobj.close()
290
291 # Copy permission bits, access time, etc.
292 try:
293 _copy_metadata(path, tmp_path)
294 except OSError:
295 # e.g. the file didn't already exist. Ignore any failure to copy metadata
296 pass
297
298 if os.name == 'nt' and os.path.exists(path):
299 # Rename over existing file doesn't work on Windows
300 os.remove(path)
301
302 os.rename(tmp_path, path)
303 shutil.rmtree(tmp_dir)
304
215 def atomic_writing(*args, **kwargs):
216 """DEPRECATED: moved to IPython.html.services.contents.fileio"""
217 warn("IPython.utils.io.atomic_writing has moved to IPython.html.services.contents.fileio")
218 from IPython.html.services.contents.fileio import atomic_writing
219 return atomic_writing(*args, **kwargs)
305 220
306 221 def raw_print(*args, **kw):
307 222 """Raw print to sys.__stdout__, otherwise identical interface to print()."""
308 223
309 224 print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
310 225 file=sys.__stdout__)
311 226 sys.__stdout__.flush()
312 227
313 228
314 229 def raw_print_err(*args, **kw):
315 230 """Raw print to sys.__stderr__, otherwise identical interface to print()."""
316 231
317 232 print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
318 233 file=sys.__stderr__)
319 234 sys.__stderr__.flush()
320 235
321 236
322 237 # Short aliases for quick debugging, do NOT use these in production code.
323 238 rprint = raw_print
324 239 rprinte = raw_print_err
325 240
326 def unicode_std_stream(stream='stdout'):
327 u"""Get a wrapper to write unicode to stdout/stderr as UTF-8.
328
329 This ignores environment variables and default encodings, to reliably write
330 unicode to stdout or stderr.
331 241
332 ::
333
334 unicode_std_stream().write(u'ł@e¶ŧ←')
335 """
336 assert stream in ('stdout', 'stderr')
337 stream = getattr(sys, stream)
338 if PY3:
339 try:
340 stream_b = stream.buffer
341 except AttributeError:
342 # sys.stdout has been replaced - use it directly
343 return stream
344 else:
345 stream_b = stream
346
347 return codecs.getwriter('utf-8')(stream_b)
242 def unicode_std_stream(stream='stdout'):
243 """DEPRECATED, moved to jupyter_nbconvert.utils.io"""
244 warn("IPython.utils.io.unicode_std_stream has moved to jupyter_nbconvert.utils.io")
245 from jupyter_nbconvert.utils.io import unicode_std_stream
246 return unicode_std_stream(stream)
@@ -1,231 +1,87 b''
1 1 # encoding: utf-8
2 2 """Tests for io.py"""
3 3
4 4 # Copyright (c) IPython Development Team.
5 5 # Distributed under the terms of the Modified BSD License.
6 6
7 7 from __future__ import print_function
8 8 from __future__ import absolute_import
9 9
10 10 import io as stdlib_io
11 11 import os.path
12 12 import stat
13 13 import sys
14 14
15 15 from subprocess import Popen, PIPE
16 16 import unittest
17 17
18 18 import nose.tools as nt
19 19
20 20 from IPython.testing.decorators import skipif, skip_win32
21 from IPython.utils.io import (Tee, capture_output, unicode_std_stream,
22 atomic_writing,
23 )
21 from IPython.utils.io import Tee, capture_output
24 22 from IPython.utils.py3compat import doctest_refactor_print, PY3
25 23 from IPython.utils.tempdir import TemporaryDirectory
26 24
27 25 if PY3:
28 26 from io import StringIO
29 27 else:
30 28 from StringIO import StringIO
31 29
32 30
33 31 def test_tee_simple():
34 32 "Very simple check with stdout only"
35 33 chan = StringIO()
36 34 text = 'Hello'
37 35 tee = Tee(chan, channel='stdout')
38 36 print(text, file=chan)
39 37 nt.assert_equal(chan.getvalue(), text+"\n")
40 38
41 39
42 40 class TeeTestCase(unittest.TestCase):
43 41
44 42 def tchan(self, channel, check='close'):
45 43 trap = StringIO()
46 44 chan = StringIO()
47 45 text = 'Hello'
48 46
49 47 std_ori = getattr(sys, channel)
50 48 setattr(sys, channel, trap)
51 49
52 50 tee = Tee(chan, channel=channel)
53 51 print(text, end='', file=chan)
54 52 setattr(sys, channel, std_ori)
55 53 trap_val = trap.getvalue()
56 54 nt.assert_equal(chan.getvalue(), text)
57 55 if check=='close':
58 56 tee.close()
59 57 else:
60 58 del tee
61 59
62 60 def test(self):
63 61 for chan in ['stdout', 'stderr']:
64 62 for check in ['close', 'del']:
65 63 self.tchan(chan, check)
66 64
67 65 def test_io_init():
68 66 """Test that io.stdin/out/err exist at startup"""
69 67 for name in ('stdin', 'stdout', 'stderr'):
70 68 cmd = doctest_refactor_print("from IPython.utils import io;print io.%s.__class__"%name)
71 69 p = Popen([sys.executable, '-c', cmd],
72 70 stdout=PIPE)
73 71 p.wait()
74 72 classname = p.stdout.read().strip().decode('ascii')
75 73 # __class__ is a reference to the class object in Python 3, so we can't
76 74 # just test for string equality.
77 75 assert 'IPython.utils.io.IOStream' in classname, classname
78 76
79 77 def test_capture_output():
80 78 """capture_output() context works"""
81 79
82 80 with capture_output() as io:
83 81 print('hi, stdout')
84 82 print('hi, stderr', file=sys.stderr)
85 83
86 84 nt.assert_equal(io.stdout, 'hi, stdout\n')
87 85 nt.assert_equal(io.stderr, 'hi, stderr\n')
88 86
89 def test_UnicodeStdStream():
90 # Test wrapping a bytes-level stdout
91 if PY3:
92 stdoutb = stdlib_io.BytesIO()
93 stdout = stdlib_io.TextIOWrapper(stdoutb, encoding='ascii')
94 else:
95 stdout = stdoutb = stdlib_io.BytesIO()
96
97 orig_stdout = sys.stdout
98 sys.stdout = stdout
99 try:
100 sample = u"@łe¶ŧ←"
101 unicode_std_stream().write(sample)
102
103 output = stdoutb.getvalue().decode('utf-8')
104 nt.assert_equal(output, sample)
105 assert not stdout.closed
106 finally:
107 sys.stdout = orig_stdout
108
109 @skipif(not PY3, "Not applicable on Python 2")
110 def test_UnicodeStdStream_nowrap():
111 # If we replace stdout with a StringIO, it shouldn't get wrapped.
112 orig_stdout = sys.stdout
113 sys.stdout = StringIO()
114 try:
115 nt.assert_is(unicode_std_stream(), sys.stdout)
116 assert not sys.stdout.closed
117 finally:
118 sys.stdout = orig_stdout
119
120 def test_atomic_writing():
121 class CustomExc(Exception): pass
122
123 with TemporaryDirectory() as td:
124 f1 = os.path.join(td, 'penguin')
125 with stdlib_io.open(f1, 'w') as f:
126 f.write(u'Before')
127
128 if os.name != 'nt':
129 os.chmod(f1, 0o701)
130 orig_mode = stat.S_IMODE(os.stat(f1).st_mode)
131
132 f2 = os.path.join(td, 'flamingo')
133 try:
134 os.symlink(f1, f2)
135 have_symlink = True
136 except (AttributeError, NotImplementedError, OSError):
137 # AttributeError: Python doesn't support it
138 # NotImplementedError: The system doesn't support it
139 # OSError: The user lacks the privilege (Windows)
140 have_symlink = False
141
142 with nt.assert_raises(CustomExc):
143 with atomic_writing(f1) as f:
144 f.write(u'Failing write')
145 raise CustomExc
146
147 # Because of the exception, the file should not have been modified
148 with stdlib_io.open(f1, 'r') as f:
149 nt.assert_equal(f.read(), u'Before')
150
151 with atomic_writing(f1) as f:
152 f.write(u'Overwritten')
153
154 with stdlib_io.open(f1, 'r') as f:
155 nt.assert_equal(f.read(), u'Overwritten')
156
157 if os.name != 'nt':
158 mode = stat.S_IMODE(os.stat(f1).st_mode)
159 nt.assert_equal(mode, orig_mode)
160
161 if have_symlink:
162 # Check that writing over a file preserves a symlink
163 with atomic_writing(f2) as f:
164 f.write(u'written from symlink')
165
166 with stdlib_io.open(f1, 'r') as f:
167 nt.assert_equal(f.read(), u'written from symlink')
168
169 def _save_umask():
170 global umask
171 umask = os.umask(0)
172 os.umask(umask)
173
174 def _restore_umask():
175 os.umask(umask)
176
177 @skip_win32
178 @nt.with_setup(_save_umask, _restore_umask)
179 def test_atomic_writing_umask():
180 with TemporaryDirectory() as td:
181 os.umask(0o022)
182 f1 = os.path.join(td, '1')
183 with atomic_writing(f1) as f:
184 f.write(u'1')
185 mode = stat.S_IMODE(os.stat(f1).st_mode)
186 nt.assert_equal(mode, 0o644, '{:o} != 644'.format(mode))
187
188 os.umask(0o057)
189 f2 = os.path.join(td, '2')
190 with atomic_writing(f2) as f:
191 f.write(u'2')
192 mode = stat.S_IMODE(os.stat(f2).st_mode)
193 nt.assert_equal(mode, 0o620, '{:o} != 620'.format(mode))
194
195
196 def test_atomic_writing_newlines():
197 with TemporaryDirectory() as td:
198 path = os.path.join(td, 'testfile')
199
200 lf = u'a\nb\nc\n'
201 plat = lf.replace(u'\n', os.linesep)
202 crlf = lf.replace(u'\n', u'\r\n')
203
204 # test default
205 with stdlib_io.open(path, 'w') as f:
206 f.write(lf)
207 with stdlib_io.open(path, 'r', newline='') as f:
208 read = f.read()
209 nt.assert_equal(read, plat)
210
211 # test newline=LF
212 with stdlib_io.open(path, 'w', newline='\n') as f:
213 f.write(lf)
214 with stdlib_io.open(path, 'r', newline='') as f:
215 read = f.read()
216 nt.assert_equal(read, lf)
217
218 # test newline=CRLF
219 with atomic_writing(path, newline='\r\n') as f:
220 f.write(lf)
221 with stdlib_io.open(path, 'r', newline='') as f:
222 read = f.read()
223 nt.assert_equal(read, crlf)
224
225 # test newline=no convert
226 text = u'crlf\r\ncr\rlf\n'
227 with atomic_writing(path, newline='') as f:
228 f.write(text)
229 with stdlib_io.open(path, 'r', newline='') as f:
230 read = f.read()
231 nt.assert_equal(read, text)
87
@@ -1,889 +1,891 b''
1 1 """Session object for building, serializing, sending, and receiving messages in
2 2 IPython. The Session object supports serialization, HMAC signatures, and
3 3 metadata on messages.
4 4
5 5 Also defined here are utilities for working with Sessions:
6 6 * A SessionFactory to be used as a base class for configurables that work with
7 7 Sessions.
8 8 * A Message object for convenience that allows attribute-access to the msg dict.
9 9 """
10 10
11 11 # Copyright (c) IPython Development Team.
12 12 # Distributed under the terms of the Modified BSD License.
13 13
14 14 import hashlib
15 15 import hmac
16 16 import logging
17 17 import os
18 18 import pprint
19 19 import random
20 20 import uuid
21 21 import warnings
22 22 from datetime import datetime
23 23
24 24 try:
25 25 import cPickle
26 26 pickle = cPickle
27 27 except:
28 28 cPickle = None
29 29 import pickle
30 30
31 31 try:
32 32 # py3
33 33 PICKLE_PROTOCOL = pickle.DEFAULT_PROTOCOL
34 34 except AttributeError:
35 35 PICKLE_PROTOCOL = pickle.HIGHEST_PROTOCOL
36 36
37 37 try:
38 38 # We are using compare_digest to limit the surface of timing attacks
39 39 from hmac import compare_digest
40 40 except ImportError:
41 41 # Python < 2.7.7: When digests don't match no feedback is provided,
42 42 # limiting the surface of attack
43 43 def compare_digest(a,b): return a == b
44 44
45 45 import zmq
46 46 from zmq.utils import jsonapi
47 47 from zmq.eventloop.ioloop import IOLoop
48 48 from zmq.eventloop.zmqstream import ZMQStream
49 49
50 50 from IPython.core.release import kernel_protocol_version
51 51 from IPython.config.configurable import Configurable, LoggingConfigurable
52 from IPython.utils import io
53 52 from IPython.utils.importstring import import_item
54 53 from jupyter_client.jsonutil import extract_dates, squash_dates, date_default
55 54 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
56 55 iteritems)
57 56 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
58 57 DottedObjectName, CUnicode, Dict, Integer,
59 58 TraitError,
60 59 )
61 60 from jupyter_client.adapter import adapt
61 from traitlets.log import get_logger
62
62 63
63 64 #-----------------------------------------------------------------------------
64 65 # utility functions
65 66 #-----------------------------------------------------------------------------
66 67
67 68 def squash_unicode(obj):
68 69 """coerce unicode back to bytestrings."""
69 70 if isinstance(obj,dict):
70 71 for key in obj.keys():
71 72 obj[key] = squash_unicode(obj[key])
72 73 if isinstance(key, unicode_type):
73 74 obj[squash_unicode(key)] = obj.pop(key)
74 75 elif isinstance(obj, list):
75 76 for i,v in enumerate(obj):
76 77 obj[i] = squash_unicode(v)
77 78 elif isinstance(obj, unicode_type):
78 79 obj = obj.encode('utf8')
79 80 return obj
80 81
81 82 #-----------------------------------------------------------------------------
82 83 # globals and defaults
83 84 #-----------------------------------------------------------------------------
84 85
85 86 # default values for the thresholds:
86 87 MAX_ITEMS = 64
87 88 MAX_BYTES = 1024
88 89
89 90 # ISO8601-ify datetime objects
90 91 # allow unicode
91 92 # disallow nan, because it's not actually valid JSON
92 93 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default,
93 94 ensure_ascii=False, allow_nan=False,
94 95 )
95 96 json_unpacker = lambda s: jsonapi.loads(s)
96 97
97 98 pickle_packer = lambda o: pickle.dumps(squash_dates(o), PICKLE_PROTOCOL)
98 99 pickle_unpacker = pickle.loads
99 100
100 101 default_packer = json_packer
101 102 default_unpacker = json_unpacker
102 103
103 104 DELIM = b"<IDS|MSG>"
104 105 # singleton dummy tracker, which will always report as done
105 106 DONE = zmq.MessageTracker()
106 107
107 108 #-----------------------------------------------------------------------------
108 109 # Mixin tools for apps that use Sessions
109 110 #-----------------------------------------------------------------------------
110 111
111 112 session_aliases = dict(
112 113 ident = 'Session.session',
113 114 user = 'Session.username',
114 115 keyfile = 'Session.keyfile',
115 116 )
116 117
117 118 session_flags = {
118 119 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
119 120 'keyfile' : '' }},
120 121 """Use HMAC digests for authentication of messages.
121 122 Setting this flag will generate a new UUID to use as the HMAC key.
122 123 """),
123 124 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
124 125 """Don't authenticate messages."""),
125 126 }
126 127
127 128 def default_secure(cfg):
128 129 """Set the default behavior for a config environment to be secure.
129 130
130 131 If Session.key/keyfile have not been set, set Session.key to
131 132 a new random UUID.
132 133 """
133 134 warnings.warn("default_secure is deprecated", DeprecationWarning)
134 135 if 'Session' in cfg:
135 136 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
136 137 return
137 138 # key/keyfile not specified, generate new UUID:
138 139 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
139 140
140 141
141 142 #-----------------------------------------------------------------------------
142 143 # Classes
143 144 #-----------------------------------------------------------------------------
144 145
145 146 class SessionFactory(LoggingConfigurable):
146 147 """The Base class for configurables that have a Session, Context, logger,
147 148 and IOLoop.
148 149 """
149 150
150 151 logname = Unicode('')
151 152 def _logname_changed(self, name, old, new):
152 153 self.log = logging.getLogger(new)
153 154
154 155 # not configurable:
155 156 context = Instance('zmq.Context')
156 157 def _context_default(self):
157 158 return zmq.Context.instance()
158 159
159 160 session = Instance('jupyter_client.session.Session',
160 161 allow_none=True)
161 162
162 163 loop = Instance('zmq.eventloop.ioloop.IOLoop')
163 164 def _loop_default(self):
164 165 return IOLoop.instance()
165 166
166 167 def __init__(self, **kwargs):
167 168 super(SessionFactory, self).__init__(**kwargs)
168 169
169 170 if self.session is None:
170 171 # construct the session
171 172 self.session = Session(**kwargs)
172 173
173 174
174 175 class Message(object):
175 176 """A simple message object that maps dict keys to attributes.
176 177
177 178 A Message can be created from a dict and a dict from a Message instance
178 179 simply by calling dict(msg_obj)."""
179 180
180 181 def __init__(self, msg_dict):
181 182 dct = self.__dict__
182 183 for k, v in iteritems(dict(msg_dict)):
183 184 if isinstance(v, dict):
184 185 v = Message(v)
185 186 dct[k] = v
186 187
187 188 # Having this iterator lets dict(msg_obj) work out of the box.
188 189 def __iter__(self):
189 190 return iter(iteritems(self.__dict__))
190 191
191 192 def __repr__(self):
192 193 return repr(self.__dict__)
193 194
194 195 def __str__(self):
195 196 return pprint.pformat(self.__dict__)
196 197
197 198 def __contains__(self, k):
198 199 return k in self.__dict__
199 200
200 201 def __getitem__(self, k):
201 202 return self.__dict__[k]
202 203
203 204
204 205 def msg_header(msg_id, msg_type, username, session):
205 206 date = datetime.now()
206 207 version = kernel_protocol_version
207 208 return locals()
208 209
209 210 def extract_header(msg_or_header):
210 211 """Given a message or header, return the header."""
211 212 if not msg_or_header:
212 213 return {}
213 214 try:
214 215 # See if msg_or_header is the entire message.
215 216 h = msg_or_header['header']
216 217 except KeyError:
217 218 try:
218 219 # See if msg_or_header is just the header
219 220 h = msg_or_header['msg_id']
220 221 except KeyError:
221 222 raise
222 223 else:
223 224 h = msg_or_header
224 225 if not isinstance(h, dict):
225 226 h = dict(h)
226 227 return h
227 228
228 229 class Session(Configurable):
229 230 """Object for handling serialization and sending of messages.
230 231
231 232 The Session object handles building messages and sending them
232 233 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
233 234 other over the network via Session objects, and only need to work with the
234 235 dict-based IPython message spec. The Session will handle
235 236 serialization/deserialization, security, and metadata.
236 237
237 238 Sessions support configurable serialization via packer/unpacker traits,
238 239 and signing with HMAC digests via the key/keyfile traits.
239 240
240 241 Parameters
241 242 ----------
242 243
243 244 debug : bool
244 245 whether to trigger extra debugging statements
245 246 packer/unpacker : str : 'json', 'pickle' or import_string
246 247 importstrings for methods to serialize message parts. If just
247 248 'json' or 'pickle', predefined JSON and pickle packers will be used.
248 249 Otherwise, the entire importstring must be used.
249 250
250 251 The functions must accept at least valid JSON input, and output *bytes*.
251 252
252 253 For example, to use msgpack:
253 254 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
254 255 pack/unpack : callables
255 256 You can also set the pack/unpack callables for serialization directly.
256 257 session : bytes
257 258 the ID of this Session object. The default is to generate a new UUID.
258 259 username : unicode
259 260 username added to message headers. The default is to ask the OS.
260 261 key : bytes
261 262 The key used to initialize an HMAC signature. If unset, messages
262 263 will not be signed or checked.
263 264 keyfile : filepath
264 265 The file containing a key. If this is set, `key` will be initialized
265 266 to the contents of the file.
266 267
267 268 """
268 269
269 270 debug=Bool(False, config=True, help="""Debug output in the Session""")
270 271
271 272 packer = DottedObjectName('json',config=True,
272 273 help="""The name of the packer for serializing messages.
273 274 Should be one of 'json', 'pickle', or an import name
274 275 for a custom callable serializer.""")
275 276 def _packer_changed(self, name, old, new):
276 277 if new.lower() == 'json':
277 278 self.pack = json_packer
278 279 self.unpack = json_unpacker
279 280 self.unpacker = new
280 281 elif new.lower() == 'pickle':
281 282 self.pack = pickle_packer
282 283 self.unpack = pickle_unpacker
283 284 self.unpacker = new
284 285 else:
285 286 self.pack = import_item(str(new))
286 287
287 288 unpacker = DottedObjectName('json', config=True,
288 289 help="""The name of the unpacker for unserializing messages.
289 290 Only used with custom functions for `packer`.""")
290 291 def _unpacker_changed(self, name, old, new):
291 292 if new.lower() == 'json':
292 293 self.pack = json_packer
293 294 self.unpack = json_unpacker
294 295 self.packer = new
295 296 elif new.lower() == 'pickle':
296 297 self.pack = pickle_packer
297 298 self.unpack = pickle_unpacker
298 299 self.packer = new
299 300 else:
300 301 self.unpack = import_item(str(new))
301 302
302 303 session = CUnicode(u'', config=True,
303 304 help="""The UUID identifying this session.""")
304 305 def _session_default(self):
305 306 u = unicode_type(uuid.uuid4())
306 307 self.bsession = u.encode('ascii')
307 308 return u
308 309
309 310 def _session_changed(self, name, old, new):
310 311 self.bsession = self.session.encode('ascii')
311 312
312 313 # bsession is the session as bytes
313 314 bsession = CBytes(b'')
314 315
315 316 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
316 317 help="""Username for the Session. Default is your system username.""",
317 318 config=True)
318 319
319 320 metadata = Dict({}, config=True,
320 321 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
321 322
322 323 # if 0, no adapting to do.
323 324 adapt_version = Integer(0)
324 325
325 326 # message signature related traits:
326 327
327 328 key = CBytes(config=True,
328 329 help="""execution key, for signing messages.""")
329 330 def _key_default(self):
330 331 return str_to_bytes(str(uuid.uuid4()))
331 332
332 333 def _key_changed(self):
333 334 self._new_auth()
334 335
335 336 signature_scheme = Unicode('hmac-sha256', config=True,
336 337 help="""The digest scheme used to construct the message signatures.
337 338 Must have the form 'hmac-HASH'.""")
338 339 def _signature_scheme_changed(self, name, old, new):
339 340 if not new.startswith('hmac-'):
340 341 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
341 342 hash_name = new.split('-', 1)[1]
342 343 try:
343 344 self.digest_mod = getattr(hashlib, hash_name)
344 345 except AttributeError:
345 346 raise TraitError("hashlib has no such attribute: %s" % hash_name)
346 347 self._new_auth()
347 348
348 349 digest_mod = Any()
349 350 def _digest_mod_default(self):
350 351 return hashlib.sha256
351 352
352 353 auth = Instance(hmac.HMAC, allow_none=True)
353 354
354 355 def _new_auth(self):
355 356 if self.key:
356 357 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
357 358 else:
358 359 self.auth = None
359 360
360 361 digest_history = Set()
361 362 digest_history_size = Integer(2**16, config=True,
362 363 help="""The maximum number of digests to remember.
363 364
364 365 The digest history will be culled when it exceeds this value.
365 366 """
366 367 )
367 368
368 369 keyfile = Unicode('', config=True,
369 370 help="""path to file containing execution key.""")
370 371 def _keyfile_changed(self, name, old, new):
371 372 with open(new, 'rb') as f:
372 373 self.key = f.read().strip()
373 374
374 375 # for protecting against sends from forks
375 376 pid = Integer()
376 377
377 378 # serialization traits:
378 379
379 380 pack = Any(default_packer) # the actual packer function
380 381 def _pack_changed(self, name, old, new):
381 382 if not callable(new):
382 383 raise TypeError("packer must be callable, not %s"%type(new))
383 384
384 385 unpack = Any(default_unpacker) # the actual packer function
385 386 def _unpack_changed(self, name, old, new):
386 387 # unpacker is not checked - it is assumed to be
387 388 if not callable(new):
388 389 raise TypeError("unpacker must be callable, not %s"%type(new))
389 390
390 391 # thresholds:
391 392 copy_threshold = Integer(2**16, config=True,
392 393 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
393 394 buffer_threshold = Integer(MAX_BYTES, config=True,
394 395 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
395 396 item_threshold = Integer(MAX_ITEMS, config=True,
396 397 help="""The maximum number of items for a container to be introspected for custom serialization.
397 398 Containers larger than this are pickled outright.
398 399 """
399 400 )
400 401
401 402
402 403 def __init__(self, **kwargs):
403 404 """create a Session object
404 405
405 406 Parameters
406 407 ----------
407 408
408 409 debug : bool
409 410 whether to trigger extra debugging statements
410 411 packer/unpacker : str : 'json', 'pickle' or import_string
411 412 importstrings for methods to serialize message parts. If just
412 413 'json' or 'pickle', predefined JSON and pickle packers will be used.
413 414 Otherwise, the entire importstring must be used.
414 415
415 416 The functions must accept at least valid JSON input, and output
416 417 *bytes*.
417 418
418 419 For example, to use msgpack:
419 420 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
420 421 pack/unpack : callables
421 422 You can also set the pack/unpack callables for serialization
422 423 directly.
423 424 session : unicode (must be ascii)
424 425 the ID of this Session object. The default is to generate a new
425 426 UUID.
426 427 bsession : bytes
427 428 The session as bytes
428 429 username : unicode
429 430 username added to message headers. The default is to ask the OS.
430 431 key : bytes
431 432 The key used to initialize an HMAC signature. If unset, messages
432 433 will not be signed or checked.
433 434 signature_scheme : str
434 435 The message digest scheme. Currently must be of the form 'hmac-HASH',
435 436 where 'HASH' is a hashing function available in Python's hashlib.
436 437 The default is 'hmac-sha256'.
437 438 This is ignored if 'key' is empty.
438 439 keyfile : filepath
439 440 The file containing a key. If this is set, `key` will be
440 441 initialized to the contents of the file.
441 442 """
442 443 super(Session, self).__init__(**kwargs)
443 444 self._check_packers()
444 445 self.none = self.pack({})
445 446 # ensure self._session_default() if necessary, so bsession is defined:
446 447 self.session
447 448 self.pid = os.getpid()
448 449 self._new_auth()
449 450
450 451 @property
451 452 def msg_id(self):
452 453 """always return new uuid"""
453 454 return str(uuid.uuid4())
454 455
455 456 def _check_packers(self):
456 457 """check packers for datetime support."""
457 458 pack = self.pack
458 459 unpack = self.unpack
459 460
460 461 # check simple serialization
461 462 msg = dict(a=[1,'hi'])
462 463 try:
463 464 packed = pack(msg)
464 465 except Exception as e:
465 466 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
466 467 if self.packer == 'json':
467 468 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
468 469 else:
469 470 jsonmsg = ""
470 471 raise ValueError(
471 472 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
472 473 )
473 474
474 475 # ensure packed message is bytes
475 476 if not isinstance(packed, bytes):
476 477 raise ValueError("message packed to %r, but bytes are required"%type(packed))
477 478
478 479 # check that unpack is pack's inverse
479 480 try:
480 481 unpacked = unpack(packed)
481 482 assert unpacked == msg
482 483 except Exception as e:
483 484 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
484 485 if self.packer == 'json':
485 486 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
486 487 else:
487 488 jsonmsg = ""
488 489 raise ValueError(
489 490 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
490 491 )
491 492
492 493 # check datetime support
493 494 msg = dict(t=datetime.now())
494 495 try:
495 496 unpacked = unpack(pack(msg))
496 497 if isinstance(unpacked['t'], datetime):
497 498 raise ValueError("Shouldn't deserialize to datetime")
498 499 except Exception:
499 500 self.pack = lambda o: pack(squash_dates(o))
500 501 self.unpack = lambda s: unpack(s)
501 502
502 503 def msg_header(self, msg_type):
503 504 return msg_header(self.msg_id, msg_type, self.username, self.session)
504 505
505 506 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
506 507 """Return the nested message dict.
507 508
508 509 This format is different from what is sent over the wire. The
509 510 serialize/deserialize methods converts this nested message dict to the wire
510 511 format, which is a list of message parts.
511 512 """
512 513 msg = {}
513 514 header = self.msg_header(msg_type) if header is None else header
514 515 msg['header'] = header
515 516 msg['msg_id'] = header['msg_id']
516 517 msg['msg_type'] = header['msg_type']
517 518 msg['parent_header'] = {} if parent is None else extract_header(parent)
518 519 msg['content'] = {} if content is None else content
519 520 msg['metadata'] = self.metadata.copy()
520 521 if metadata is not None:
521 522 msg['metadata'].update(metadata)
522 523 return msg
523 524
524 525 def sign(self, msg_list):
525 526 """Sign a message with HMAC digest. If no auth, return b''.
526 527
527 528 Parameters
528 529 ----------
529 530 msg_list : list
530 531 The [p_header,p_parent,p_content] part of the message list.
531 532 """
532 533 if self.auth is None:
533 534 return b''
534 535 h = self.auth.copy()
535 536 for m in msg_list:
536 537 h.update(m)
537 538 return str_to_bytes(h.hexdigest())
538 539
539 540 def serialize(self, msg, ident=None):
540 541 """Serialize the message components to bytes.
541 542
542 543 This is roughly the inverse of deserialize. The serialize/deserialize
543 544 methods work with full message lists, whereas pack/unpack work with
544 545 the individual message parts in the message list.
545 546
546 547 Parameters
547 548 ----------
548 549 msg : dict or Message
549 550 The next message dict as returned by the self.msg method.
550 551
551 552 Returns
552 553 -------
553 554 msg_list : list
554 555 The list of bytes objects to be sent with the format::
555 556
556 557 [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
557 558 p_metadata, p_content, buffer1, buffer2, ...]
558 559
559 560 In this list, the ``p_*`` entities are the packed or serialized
560 561 versions, so if JSON is used, these are utf8 encoded JSON strings.
561 562 """
562 563 content = msg.get('content', {})
563 564 if content is None:
564 565 content = self.none
565 566 elif isinstance(content, dict):
566 567 content = self.pack(content)
567 568 elif isinstance(content, bytes):
568 569 # content is already packed, as in a relayed message
569 570 pass
570 571 elif isinstance(content, unicode_type):
571 572 # should be bytes, but JSON often spits out unicode
572 573 content = content.encode('utf8')
573 574 else:
574 575 raise TypeError("Content incorrect type: %s"%type(content))
575 576
576 577 real_message = [self.pack(msg['header']),
577 578 self.pack(msg['parent_header']),
578 579 self.pack(msg['metadata']),
579 580 content,
580 581 ]
581 582
582 583 to_send = []
583 584
584 585 if isinstance(ident, list):
585 586 # accept list of idents
586 587 to_send.extend(ident)
587 588 elif ident is not None:
588 589 to_send.append(ident)
589 590 to_send.append(DELIM)
590 591
591 592 signature = self.sign(real_message)
592 593 to_send.append(signature)
593 594
594 595 to_send.extend(real_message)
595 596
596 597 return to_send
597 598
598 599 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
599 600 buffers=None, track=False, header=None, metadata=None):
600 601 """Build and send a message via stream or socket.
601 602
602 603 The message format used by this function internally is as follows:
603 604
604 605 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
605 606 buffer1,buffer2,...]
606 607
607 608 The serialize/deserialize methods convert the nested message dict into this
608 609 format.
609 610
610 611 Parameters
611 612 ----------
612 613
613 614 stream : zmq.Socket or ZMQStream
614 615 The socket-like object used to send the data.
615 616 msg_or_type : str or Message/dict
616 617 Normally, msg_or_type will be a msg_type unless a message is being
617 618 sent more than once. If a header is supplied, this can be set to
618 619 None and the msg_type will be pulled from the header.
619 620
620 621 content : dict or None
621 622 The content of the message (ignored if msg_or_type is a message).
622 623 header : dict or None
623 624 The header dict for the message (ignored if msg_to_type is a message).
624 625 parent : Message or dict or None
625 626 The parent or parent header describing the parent of this message
626 627 (ignored if msg_or_type is a message).
627 628 ident : bytes or list of bytes
628 629 The zmq.IDENTITY routing path.
629 630 metadata : dict or None
630 631 The metadata describing the message
631 632 buffers : list or None
632 633 The already-serialized buffers to be appended to the message.
633 634 track : bool
634 635 Whether to track. Only for use with Sockets, because ZMQStream
635 636 objects cannot track messages.
636 637
637 638
638 639 Returns
639 640 -------
640 641 msg : dict
641 642 The constructed message.
642 643 """
643 644 if not isinstance(stream, zmq.Socket):
644 645 # ZMQStreams and dummy sockets do not support tracking.
645 646 track = False
646 647
647 648 if isinstance(msg_or_type, (Message, dict)):
648 649 # We got a Message or message dict, not a msg_type so don't
649 650 # build a new Message.
650 651 msg = msg_or_type
651 652 buffers = buffers or msg.get('buffers', [])
652 653 else:
653 654 msg = self.msg(msg_or_type, content=content, parent=parent,
654 655 header=header, metadata=metadata)
655 656 if not os.getpid() == self.pid:
656 io.rprint("WARNING: attempted to send message from fork")
657 io.rprint(msg)
657 get_logger().warn("WARNING: attempted to send message from fork\n%s",
658 msg
659 )
658 660 return
659 661 buffers = [] if buffers is None else buffers
660 662 if self.adapt_version:
661 663 msg = adapt(msg, self.adapt_version)
662 664 to_send = self.serialize(msg, ident)
663 665 to_send.extend(buffers)
664 666 longest = max([ len(s) for s in to_send ])
665 667 copy = (longest < self.copy_threshold)
666 668
667 669 if buffers and track and not copy:
668 670 # only really track when we are doing zero-copy buffers
669 671 tracker = stream.send_multipart(to_send, copy=False, track=True)
670 672 else:
671 673 # use dummy tracker, which will be done immediately
672 674 tracker = DONE
673 675 stream.send_multipart(to_send, copy=copy)
674 676
675 677 if self.debug:
676 678 pprint.pprint(msg)
677 679 pprint.pprint(to_send)
678 680 pprint.pprint(buffers)
679 681
680 682 msg['tracker'] = tracker
681 683
682 684 return msg
683 685
684 686 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
685 687 """Send a raw message via ident path.
686 688
687 689 This method is used to send a already serialized message.
688 690
689 691 Parameters
690 692 ----------
691 693 stream : ZMQStream or Socket
692 694 The ZMQ stream or socket to use for sending the message.
693 695 msg_list : list
694 696 The serialized list of messages to send. This only includes the
695 697 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
696 698 the message.
697 699 ident : ident or list
698 700 A single ident or a list of idents to use in sending.
699 701 """
700 702 to_send = []
701 703 if isinstance(ident, bytes):
702 704 ident = [ident]
703 705 if ident is not None:
704 706 to_send.extend(ident)
705 707
706 708 to_send.append(DELIM)
707 709 to_send.append(self.sign(msg_list))
708 710 to_send.extend(msg_list)
709 711 stream.send_multipart(to_send, flags, copy=copy)
710 712
711 713 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
712 714 """Receive and unpack a message.
713 715
714 716 Parameters
715 717 ----------
716 718 socket : ZMQStream or Socket
717 719 The socket or stream to use in receiving.
718 720
719 721 Returns
720 722 -------
721 723 [idents], msg
722 724 [idents] is a list of idents and msg is a nested message dict of
723 725 same format as self.msg returns.
724 726 """
725 727 if isinstance(socket, ZMQStream):
726 728 socket = socket.socket
727 729 try:
728 730 msg_list = socket.recv_multipart(mode, copy=copy)
729 731 except zmq.ZMQError as e:
730 732 if e.errno == zmq.EAGAIN:
731 733 # We can convert EAGAIN to None as we know in this case
732 734 # recv_multipart won't return None.
733 735 return None,None
734 736 else:
735 737 raise
736 738 # split multipart message into identity list and message dict
737 739 # invalid large messages can cause very expensive string comparisons
738 740 idents, msg_list = self.feed_identities(msg_list, copy)
739 741 try:
740 742 return idents, self.deserialize(msg_list, content=content, copy=copy)
741 743 except Exception as e:
742 744 # TODO: handle it
743 745 raise e
744 746
745 747 def feed_identities(self, msg_list, copy=True):
746 748 """Split the identities from the rest of the message.
747 749
748 750 Feed until DELIM is reached, then return the prefix as idents and
749 751 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
750 752 but that would be silly.
751 753
752 754 Parameters
753 755 ----------
754 756 msg_list : a list of Message or bytes objects
755 757 The message to be split.
756 758 copy : bool
757 759 flag determining whether the arguments are bytes or Messages
758 760
759 761 Returns
760 762 -------
761 763 (idents, msg_list) : two lists
762 764 idents will always be a list of bytes, each of which is a ZMQ
763 765 identity. msg_list will be a list of bytes or zmq.Messages of the
764 766 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
765 767 should be unpackable/unserializable via self.deserialize at this
766 768 point.
767 769 """
768 770 if copy:
769 771 idx = msg_list.index(DELIM)
770 772 return msg_list[:idx], msg_list[idx+1:]
771 773 else:
772 774 failed = True
773 775 for idx,m in enumerate(msg_list):
774 776 if m.bytes == DELIM:
775 777 failed = False
776 778 break
777 779 if failed:
778 780 raise ValueError("DELIM not in msg_list")
779 781 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
780 782 return [m.bytes for m in idents], msg_list
781 783
782 784 def _add_digest(self, signature):
783 785 """add a digest to history to protect against replay attacks"""
784 786 if self.digest_history_size == 0:
785 787 # no history, never add digests
786 788 return
787 789
788 790 self.digest_history.add(signature)
789 791 if len(self.digest_history) > self.digest_history_size:
790 792 # threshold reached, cull 10%
791 793 self._cull_digest_history()
792 794
793 795 def _cull_digest_history(self):
794 796 """cull the digest history
795 797
796 798 Removes a randomly selected 10% of the digest history
797 799 """
798 800 current = len(self.digest_history)
799 801 n_to_cull = max(int(current // 10), current - self.digest_history_size)
800 802 if n_to_cull >= current:
801 803 self.digest_history = set()
802 804 return
803 805 to_cull = random.sample(self.digest_history, n_to_cull)
804 806 self.digest_history.difference_update(to_cull)
805 807
806 808 def deserialize(self, msg_list, content=True, copy=True):
807 809 """Unserialize a msg_list to a nested message dict.
808 810
809 811 This is roughly the inverse of serialize. The serialize/deserialize
810 812 methods work with full message lists, whereas pack/unpack work with
811 813 the individual message parts in the message list.
812 814
813 815 Parameters
814 816 ----------
815 817 msg_list : list of bytes or Message objects
816 818 The list of message parts of the form [HMAC,p_header,p_parent,
817 819 p_metadata,p_content,buffer1,buffer2,...].
818 820 content : bool (True)
819 821 Whether to unpack the content dict (True), or leave it packed
820 822 (False).
821 823 copy : bool (True)
822 824 Whether msg_list contains bytes (True) or the non-copying Message
823 825 objects in each place (False).
824 826
825 827 Returns
826 828 -------
827 829 msg : dict
828 830 The nested message dict with top-level keys [header, parent_header,
829 831 content, buffers]. The buffers are returned as memoryviews.
830 832 """
831 833 minlen = 5
832 834 message = {}
833 835 if not copy:
834 836 # pyzmq didn't copy the first parts of the message, so we'll do it
835 837 for i in range(minlen):
836 838 msg_list[i] = msg_list[i].bytes
837 839 if self.auth is not None:
838 840 signature = msg_list[0]
839 841 if not signature:
840 842 raise ValueError("Unsigned Message")
841 843 if signature in self.digest_history:
842 844 raise ValueError("Duplicate Signature: %r" % signature)
843 845 self._add_digest(signature)
844 846 check = self.sign(msg_list[1:5])
845 847 if not compare_digest(signature, check):
846 848 raise ValueError("Invalid Signature: %r" % signature)
847 849 if not len(msg_list) >= minlen:
848 850 raise TypeError("malformed message, must have at least %i elements"%minlen)
849 851 header = self.unpack(msg_list[1])
850 852 message['header'] = extract_dates(header)
851 853 message['msg_id'] = header['msg_id']
852 854 message['msg_type'] = header['msg_type']
853 855 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
854 856 message['metadata'] = self.unpack(msg_list[3])
855 857 if content:
856 858 message['content'] = self.unpack(msg_list[4])
857 859 else:
858 860 message['content'] = msg_list[4]
859 861 buffers = [memoryview(b) for b in msg_list[5:]]
860 862 if buffers and buffers[0].shape is None:
861 863 # force copy to workaround pyzmq #646
862 864 buffers = [memoryview(b.bytes) for b in msg_list[5:]]
863 865 message['buffers'] = buffers
864 866 # adapt to the current version
865 867 return adapt(message)
866 868
867 869 def unserialize(self, *args, **kwargs):
868 870 warnings.warn(
869 871 "Session.unserialize is deprecated. Use Session.deserialize.",
870 872 DeprecationWarning,
871 873 )
872 874 return self.deserialize(*args, **kwargs)
873 875
874 876
875 877 def test_msg2obj():
876 878 am = dict(x=1)
877 879 ao = Message(am)
878 880 assert ao.x == am['x']
879 881
880 882 am['y'] = dict(z=1)
881 883 ao = Message(am)
882 884 assert ao.y.z == am['y']['z']
883 885
884 886 k1, k2 = 'y', 'z'
885 887 assert ao[k1][k2] == am[k1][k2]
886 888
887 889 am2 = dict(ao)
888 890 assert am['x'] == am2['x']
889 891 assert am['y']['z'] == am2['y']['z']
@@ -1,141 +1,128 b''
1 1 """Utility for calling pandoc"""
2 #-----------------------------------------------------------------------------
3 # Copyright (c) 2014 the IPython Development Team.
4 #
2 # Copyright (c) IPython Development Team.
5 3 # Distributed under the terms of the Modified BSD License.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #-----------------------------------------------------------------------------
9 4
10 #-----------------------------------------------------------------------------
11 # Imports
12 #-----------------------------------------------------------------------------
13 from __future__ import print_function
5 from __future__ import print_function, absolute_import
14 6
15 # Stdlib imports
16 7 import subprocess
17 8 import warnings
18 9 import re
19 10 from io import TextIOWrapper, BytesIO
20 11
21 # IPython imports
22 12 from IPython.utils.py3compat import cast_bytes
23 13 from IPython.utils.version import check_version
24 14 from IPython.utils.process import is_cmd_found, FindCmdError
25 15
26 16 from .exceptions import ConversionException
27 17
28 #-----------------------------------------------------------------------------
29 # Classes and functions
30 #-----------------------------------------------------------------------------
31 18 _minimal_version = "1.12.1"
32 19
33 20 def pandoc(source, fmt, to, extra_args=None, encoding='utf-8'):
34 21 """Convert an input string in format `from` to format `to` via pandoc.
35 22
36 23 Parameters
37 24 ----------
38 25 source : string
39 26 Input string, assumed to be valid format `from`.
40 27 fmt : string
41 28 The name of the input format (markdown, etc.)
42 29 to : string
43 30 The name of the output format (html, etc.)
44 31
45 32 Returns
46 33 -------
47 34 out : unicode
48 35 Output as returned by pandoc.
49 36
50 37 Raises
51 38 ------
52 39 PandocMissing
53 40 If pandoc is not installed.
54 41
55 42 Any error messages generated by pandoc are printed to stderr.
56 43
57 44 """
58 45 cmd = ['pandoc', '-f', fmt, '-t', to]
59 46 if extra_args:
60 47 cmd.extend(extra_args)
61 48
62 49 # this will raise an exception that will pop us out of here
63 50 check_pandoc_version()
64 51
65 52 # we can safely continue
66 53 p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
67 54 out, _ = p.communicate(cast_bytes(source, encoding))
68 55 out = TextIOWrapper(BytesIO(out), encoding, 'replace').read()
69 56 return out.rstrip('\n')
70 57
71 58
72 59 def get_pandoc_version():
73 60 """Gets the Pandoc version if Pandoc is installed.
74 61
75 62 If the minimal version is not met, it will probe Pandoc for its version, cache it and return that value.
76 63 If the minimal version is met, it will return the cached version and stop probing Pandoc
77 64 (unless :func:`clean_cache()` is called).
78 65
79 66 Raises
80 67 ------
81 68 PandocMissing
82 69 If pandoc is unavailable.
83 70 """
84 71 global __version
85 72
86 73 if __version is None:
87 74 if not is_cmd_found('pandoc'):
88 75 raise PandocMissing()
89 76
90 77 out = subprocess.check_output(['pandoc', '-v'],
91 78 universal_newlines=True)
92 79 out_lines = out.splitlines()
93 80 version_pattern = re.compile(r"^\d+(\.\d+){1,}$")
94 81 for tok in out_lines[0].split():
95 82 if version_pattern.match(tok):
96 83 __version = tok
97 84 break
98 85 return __version
99 86
100 87
101 88 def check_pandoc_version():
102 89 """Returns True if minimal pandoc version is met.
103 90
104 91 Raises
105 92 ------
106 93 PandocMissing
107 94 If pandoc is unavailable.
108 95 """
109 96 v = get_pandoc_version()
110 97 if v is None:
111 98 warnings.warn("Sorry, we cannot determine the version of pandoc.\n"
112 99 "Please consider reporting this issue and include the"
113 100 "output of pandoc --version.\nContinuing...",
114 101 RuntimeWarning, stacklevel=2)
115 102 return False
116 103 ok = check_version(v , _minimal_version )
117 104 if not ok:
118 105 warnings.warn( "You are using an old version of pandoc (%s)\n" % v +
119 106 "Recommended version is %s.\nTry updating." % _minimal_version +
120 107 "http://johnmacfarlane.net/pandoc/installing.html.\nContinuing with doubts...",
121 108 RuntimeWarning, stacklevel=2)
122 109 return ok
123 110
124 111 #-----------------------------------------------------------------------------
125 112 # Exception handling
126 113 #-----------------------------------------------------------------------------
127 114 class PandocMissing(ConversionException):
128 115 """Exception raised when Pandoc is missing. """
129 116 def __init__(self, *args, **kwargs):
130 117 super(PandocMissing, self).__init__( "Pandoc wasn't found.\n" +
131 118 "Please check that pandoc is installed:\n" +
132 119 "http://johnmacfarlane.net/pandoc/installing.html" )
133 120
134 121 #-----------------------------------------------------------------------------
135 122 # Internal state management
136 123 #-----------------------------------------------------------------------------
137 124 def clean_cache():
138 125 global __version
139 126 __version = None
140 127
141 128 __version = None
@@ -1,34 +1,23 b''
1 1 """
2 2 Contains Stdout writer
3 3 """
4 #-----------------------------------------------------------------------------
5 #Copyright (c) 2013, the IPython Development Team.
6 #
7 #Distributed under the terms of the Modified BSD License.
8 #
9 #The full license is in the file COPYING.txt, distributed with this software.
10 #-----------------------------------------------------------------------------
11 4
12 #-----------------------------------------------------------------------------
13 # Imports
14 #-----------------------------------------------------------------------------
5 # Copyright (c) Jupyter Development Team.
6 # Distributed under the terms of the Modified BSD License.
15 7
16 from IPython.utils import io
8 from jupyter_nbconvert.utils import io
17 9 from .base import WriterBase
18 10
19 #-----------------------------------------------------------------------------
20 # Classes
21 #-----------------------------------------------------------------------------
22 11
23 12 class StdoutWriter(WriterBase):
24 13 """Consumes output from nbconvert export...() methods and writes to the
25 14 stdout stream."""
26 15
27 16
28 17 def write(self, output, resources, **kw):
29 18 """
30 19 Consume and write Jinja output.
31 20
32 21 See base for more...
33 22 """
34 23 io.unicode_std_stream().write(output)
General Comments 0
You need to be logged in to leave comments. Login now