Uploaded Test files

This commit is contained in:
Batuhan Berk Başoğlu 2020-11-12 11:05:57 -05:00
parent f584ad9d97
commit 2e81cb7d99
16627 changed files with 2065359 additions and 102444 deletions

View file

@ -0,0 +1,49 @@
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import shutil
import sys
import tempfile
try:
from unittest.mock import patch
except ImportError:
from mock import patch
from jupyter_core import paths as jpaths
from IPython import paths as ipaths
from ipykernel.kernelspec import install
pjoin = os.path.join
tmp = None
patchers = []
def setup():
"""setup temporary env for tests"""
global tmp
tmp = tempfile.mkdtemp()
patchers[:] = [
patch.dict(os.environ, {
'HOME': tmp,
# Let tests work with --user install when HOME is changed:
'PYTHONPATH': os.pathsep.join(sys.path),
}),
]
for p in patchers:
p.start()
# install IPython in the temp home:
install(user=True)
def teardown():
for p in patchers:
p.stop()
try:
shutil.rmtree(tmp)
except (OSError, IOError):
# no such file
pass

View file

@ -0,0 +1,17 @@
"""test utilities that use async/await syntax
a separate file to avoid syntax errors on Python 2
"""
import asyncio
def async_func():
"""Simple async function to schedule a task on the current eventloop"""
loop = asyncio.get_event_loop()
assert loop.is_running()
async def task():
await asyncio.sleep(1)
loop.create_task(task())

View file

@ -0,0 +1,74 @@
"""Test async/await integration"""
from distutils.version import LooseVersion as V
import sys
import pytest
import IPython
from .utils import execute, flush_channels, start_new_kernel, TIMEOUT
from .test_message_spec import validate_message
KC = KM = None
def setup_function():
"""start the global kernel (if it isn't running) and return its client"""
global KM, KC
KM, KC = start_new_kernel()
flush_channels(KC)
def teardown_function():
KC.stop_channels()
KM.shutdown_kernel(now=True)
skip_without_async = pytest.mark.skipif(
sys.version_info < (3, 5) or V(IPython.__version__) < V("7.0"),
reason="IPython >=7 with async/await required",
)
@skip_without_async
def test_async_await():
flush_channels(KC)
msg_id, content = execute("import asyncio; await asyncio.sleep(0.1)", KC)
assert content["status"] == "ok", content
@pytest.mark.parametrize("asynclib", ["asyncio", "trio", "curio"])
@skip_without_async
def test_async_interrupt(asynclib, request):
try:
__import__(asynclib)
except ImportError:
pytest.skip("Requires %s" % asynclib)
request.addfinalizer(lambda: execute("%autoawait asyncio", KC))
flush_channels(KC)
msg_id, content = execute("%autoawait " + asynclib, KC)
assert content["status"] == "ok", content
flush_channels(KC)
msg_id = KC.execute(
"print('begin'); import {0}; await {0}.sleep(5)".format(asynclib)
)
busy = KC.get_iopub_msg(timeout=TIMEOUT)
validate_message(busy, "status", msg_id)
assert busy["content"]["execution_state"] == "busy"
echo = KC.get_iopub_msg(timeout=TIMEOUT)
validate_message(echo, "execute_input")
stream = KC.get_iopub_msg(timeout=TIMEOUT)
# wait for the stream output to be sure kernel is in the async block
validate_message(stream, "stream")
assert stream["content"]["text"] == "begin\n"
KM.interrupt_kernel()
reply = KC.get_shell_msg()["content"]
assert reply["status"] == "error", reply
assert reply["ename"] in {"CancelledError", "KeyboardInterrupt"}
flush_channels(KC)

View file

@ -0,0 +1,123 @@
"""Tests for kernel connection utilities"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import errno
import json
import os
from unittest.mock import patch
import pytest
import zmq
from traitlets.config import Config
from ipython_genutils.tempdir import TemporaryDirectory, TemporaryWorkingDirectory
from ipython_genutils.py3compat import str_to_bytes
from ipykernel import connect
from ipykernel.kernelapp import IPKernelApp
sample_info = dict(ip='1.2.3.4', transport='ipc',
shell_port=1, hb_port=2, iopub_port=3, stdin_port=4, control_port=5,
key=b'abc123', signature_scheme='hmac-md5',
)
class DummyKernelApp(IPKernelApp):
def _default_shell_port(self):
return 0
def initialize(self, argv=[]):
self.init_profile_dir()
self.init_connection_file()
def test_get_connection_file():
cfg = Config()
with TemporaryWorkingDirectory() as d:
cfg.ProfileDir.location = d
cf = 'kernel.json'
app = DummyKernelApp(config=cfg, connection_file=cf)
app.initialize()
profile_cf = os.path.join(app.connection_dir, cf)
assert profile_cf == app.abs_connection_file
with open(profile_cf, 'w') as f:
f.write("{}")
assert os.path.exists(profile_cf)
assert connect.get_connection_file(app) == profile_cf
app.connection_file = cf
assert connect.get_connection_file(app) == profile_cf
def test_get_connection_info():
with TemporaryDirectory() as d:
cf = os.path.join(d, 'kernel.json')
connect.write_connection_file(cf, **sample_info)
json_info = connect.get_connection_info(cf)
info = connect.get_connection_info(cf, unpack=True)
assert isinstance(json_info, str)
sub_info = {k:v for k,v in info.items() if k in sample_info}
assert sub_info == sample_info
info2 = json.loads(json_info)
info2['key'] = str_to_bytes(info2['key'])
sub_info2 = {k:v for k,v in info.items() if k in sample_info}
assert sub_info2 == sample_info
def test_port_bind_failure_raises(request):
cfg = Config()
with TemporaryWorkingDirectory() as d:
cfg.ProfileDir.location = d
cf = 'kernel.json'
app = DummyKernelApp(config=cfg, connection_file=cf)
request.addfinalizer(app.close)
app.initialize()
with patch.object(app, '_try_bind_socket') as mock_try_bind:
mock_try_bind.side_effect = zmq.ZMQError(-100, "fails for unknown error types")
with pytest.raises(zmq.ZMQError):
app.init_sockets()
assert mock_try_bind.call_count == 1
def test_port_bind_failure_recovery(request):
try:
errno.WSAEADDRINUSE
except AttributeError:
# Fake windows address in-use code
p = patch.object(errno, 'WSAEADDRINUSE', 12345, create=True)
p.start()
request.addfinalizer(p.stop)
cfg = Config()
with TemporaryWorkingDirectory() as d:
cfg.ProfileDir.location = d
cf = 'kernel.json'
app = DummyKernelApp(config=cfg, connection_file=cf)
request.addfinalizer(app.close)
app.initialize()
with patch.object(app, '_try_bind_socket') as mock_try_bind:
mock_try_bind.side_effect = [
zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind unix"),
zmq.ZMQError(errno.WSAEADDRINUSE, "fails for non-bind windows")
] + [0] * 100
# Shouldn't raise anything as retries will kick in
app.init_sockets()
def test_port_bind_failure_gives_up_retries(request):
cfg = Config()
with TemporaryWorkingDirectory() as d:
cfg.ProfileDir.location = d
cf = 'kernel.json'
app = DummyKernelApp(config=cfg, connection_file=cf)
request.addfinalizer(app.close)
app.initialize()
with patch.object(app, '_try_bind_socket') as mock_try_bind:
mock_try_bind.side_effect = zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind")
with pytest.raises(zmq.ZMQError):
app.init_sockets()
assert mock_try_bind.call_count == 100

View file

@ -0,0 +1,181 @@
"""test IPython.embed_kernel()"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import sys
import time
import json
from contextlib import contextmanager
from subprocess import Popen, PIPE
from flaky import flaky
from jupyter_client import BlockingKernelClient
from jupyter_core import paths
from ipython_genutils import py3compat
from ipython_genutils.py3compat import unicode_type
SETUP_TIMEOUT = 60
TIMEOUT = 15
@contextmanager
def setup_kernel(cmd):
"""start an embedded kernel in a subprocess, and wait for it to be ready
Returns
-------
kernel_manager: connected KernelManager instance
"""
def connection_file_ready(connection_file):
"""Check if connection_file is a readable json file."""
if not os.path.exists(connection_file):
return False
try:
with open(connection_file) as f:
json.load(f)
return True
except ValueError:
return False
kernel = Popen([sys.executable, '-c', cmd], stdout=PIPE, stderr=PIPE)
try:
connection_file = os.path.join(
paths.jupyter_runtime_dir(),
'kernel-%i.json' % kernel.pid,
)
# wait for connection file to exist, timeout after 5s
tic = time.time()
while not connection_file_ready(connection_file) \
and kernel.poll() is None \
and time.time() < tic + SETUP_TIMEOUT:
time.sleep(0.1)
# Wait 100ms for the writing to finish
time.sleep(0.1)
if kernel.poll() is not None:
o,e = kernel.communicate()
e = py3compat.cast_unicode(e)
raise IOError("Kernel failed to start:\n%s" % e)
if not os.path.exists(connection_file):
if kernel.poll() is None:
kernel.terminate()
raise IOError("Connection file %r never arrived" % connection_file)
client = BlockingKernelClient(connection_file=connection_file)
client.load_connection_file()
client.start_channels()
client.wait_for_ready()
try:
yield client
finally:
client.stop_channels()
finally:
kernel.terminate()
@flaky(max_runs=3)
def test_embed_kernel_basic():
"""IPython.embed_kernel() is basically functional"""
cmd = '\n'.join([
'from IPython import embed_kernel',
'def go():',
' a=5',
' b="hi there"',
' embed_kernel()',
'go()',
'',
])
with setup_kernel(cmd) as client:
# oinfo a (int)
msg_id = client.inspect('a')
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert content['found']
msg_id = client.execute("c=a*2")
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert content['status'] == u'ok'
# oinfo c (should be 10)
msg_id = client.inspect('c')
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert content['found']
text = content['data']['text/plain']
assert '10' in text
@flaky(max_runs=3)
def test_embed_kernel_namespace():
"""IPython.embed_kernel() inherits calling namespace"""
cmd = '\n'.join([
'from IPython import embed_kernel',
'def go():',
' a=5',
' b="hi there"',
' embed_kernel()',
'go()',
'',
])
with setup_kernel(cmd) as client:
# oinfo a (int)
msg_id = client.inspect('a')
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert content['found']
text = content['data']['text/plain']
assert u'5' in text
# oinfo b (str)
msg_id = client.inspect('b')
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert content['found']
text = content['data']['text/plain']
assert u'hi there' in text
# oinfo c (undefined)
msg_id = client.inspect('c')
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert not content['found']
@flaky(max_runs=3)
def test_embed_kernel_reentrant():
"""IPython.embed_kernel() can be called multiple times"""
cmd = '\n'.join([
'from IPython import embed_kernel',
'count = 0',
'def go():',
' global count',
' embed_kernel()',
' count = count + 1',
'',
'while True:'
' go()',
'',
])
with setup_kernel(cmd) as client:
for i in range(5):
msg_id = client.inspect('count')
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert content['found']
text = content['data']['text/plain']
assert unicode_type(i) in text
# exit from embed_kernel
client.execute("get_ipython().exit_now = True")
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
time.sleep(0.2)

View file

@ -0,0 +1,46 @@
"""Test eventloop integration"""
import sys
import pytest
import tornado
from .utils import flush_channels, start_new_kernel, execute
KC = KM = None
def setup():
"""start the global kernel (if it isn't running) and return its client"""
global KM, KC
KM, KC = start_new_kernel()
flush_channels(KC)
def teardown():
KC.stop_channels()
KM.shutdown_kernel(now=True)
async_code = """
from ipykernel.tests._asyncio_utils import async_func
async_func()
"""
@pytest.mark.skipif(sys.version_info < (3, 5), reason="async/await syntax required")
@pytest.mark.skipif(tornado.version_info < (5,), reason="only relevant on tornado 5")
def test_asyncio_interrupt():
flush_channels(KC)
msg_id, content = execute('%gui asyncio', KC)
assert content['status'] == 'ok', content
flush_channels(KC)
msg_id, content = execute(async_code, KC)
assert content['status'] == 'ok', content
KM.interrupt_kernel()
flush_channels(KC)
msg_id, content = execute(async_code, KC)
assert content['status'] == 'ok'

View file

@ -0,0 +1,59 @@
"""Tests for heartbeat thread"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import errno
from unittest.mock import patch
import pytest
import zmq
from ipykernel.heartbeat import Heartbeat
def test_port_bind_failure_raises():
heart = Heartbeat(None)
with patch.object(heart, '_try_bind_socket') as mock_try_bind:
mock_try_bind.side_effect = zmq.ZMQError(-100, "fails for unknown error types")
with pytest.raises(zmq.ZMQError):
heart._bind_socket()
assert mock_try_bind.call_count == 1
def test_port_bind_success():
heart = Heartbeat(None)
with patch.object(heart, '_try_bind_socket') as mock_try_bind:
heart._bind_socket()
assert mock_try_bind.call_count == 1
def test_port_bind_failure_recovery():
try:
errno.WSAEADDRINUSE
except AttributeError:
# Fake windows address in-use code
errno.WSAEADDRINUSE = 12345
try:
heart = Heartbeat(None)
with patch.object(heart, '_try_bind_socket') as mock_try_bind:
mock_try_bind.side_effect = [
zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind unix"),
zmq.ZMQError(errno.WSAEADDRINUSE, "fails for non-bind windows")
] + [0] * 100
# Shouldn't raise anything as retries will kick in
heart._bind_socket()
finally:
# Cleanup fake assignment
if errno.WSAEADDRINUSE == 12345:
del errno.WSAEADDRINUSE
def test_port_bind_failure_gives_up_retries():
heart = Heartbeat(None)
with patch.object(heart, '_try_bind_socket') as mock_try_bind:
mock_try_bind.side_effect = zmq.ZMQError(errno.EADDRINUSE, "fails for non-bind")
with pytest.raises(zmq.ZMQError):
heart._bind_socket()
assert mock_try_bind.call_count == 100

View file

@ -0,0 +1,40 @@
"""Test IO capturing functionality"""
import io
import zmq
from jupyter_client.session import Session
from ipykernel.iostream import IOPubThread, OutStream
import nose.tools as nt
def test_io_api():
"""Test that wrapped stdout has the same API as a normal TextIO object"""
session = Session()
ctx = zmq.Context()
pub = ctx.socket(zmq.PUB)
thread = IOPubThread(pub)
thread.start()
stream = OutStream(session, thread, 'stdout')
# cleanup unused zmq objects before we start testing
thread.stop()
thread.close()
ctx.term()
assert stream.errors is None
assert not stream.isatty()
with nt.assert_raises(io.UnsupportedOperation):
stream.detach()
with nt.assert_raises(io.UnsupportedOperation):
next(stream)
with nt.assert_raises(io.UnsupportedOperation):
stream.read()
with nt.assert_raises(io.UnsupportedOperation):
stream.readline()
with nt.assert_raises(io.UnsupportedOperation):
stream.seek(0)
with nt.assert_raises(io.UnsupportedOperation):
stream.tell()

View file

@ -0,0 +1,110 @@
# coding: utf-8
"""Test suite for our JSON utilities."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from binascii import a2b_base64
import json
from datetime import datetime
import numbers
import nose.tools as nt
from .. import jsonutil
from ..jsonutil import json_clean, encode_images
from ipython_genutils.py3compat import unicode_to_str
class MyInt(object):
def __int__(self):
return 389
numbers.Integral.register(MyInt)
class MyFloat(object):
def __float__(self):
return 3.14
numbers.Real.register(MyFloat)
def test():
# list of input/expected output. Use None for the expected output if it
# can be the same as the input.
pairs = [(1, None), # start with scalars
(1.0, None),
('a', None),
(True, None),
(False, None),
(None, None),
# Containers
([1, 2], None),
((1, 2), [1, 2]),
(set([1, 2]), [1, 2]),
(dict(x=1), None),
({'x': 1, 'y':[1,2,3], '1':'int'}, None),
# More exotic objects
((x for x in range(3)), [0, 1, 2]),
(iter([1, 2]), [1, 2]),
(datetime(1991, 7, 3, 12, 00), "1991-07-03T12:00:00.000000"),
(MyFloat(), 3.14),
(MyInt(), 389)
]
for val, jval in pairs:
if jval is None:
jval = val
out = json_clean(val)
# validate our cleanup
assert out == jval
# and ensure that what we return, indeed encodes cleanly
json.loads(json.dumps(out))
def test_encode_images():
# invalid data, but the header and footer are from real files
pngdata = b'\x89PNG\r\n\x1a\nblahblahnotactuallyvalidIEND\xaeB`\x82'
jpegdata = b'\xff\xd8\xff\xe0\x00\x10JFIFblahblahjpeg(\xa0\x0f\xff\xd9'
pdfdata = b'%PDF-1.\ntrailer<</Root<</Pages<</Kids[<</MediaBox[0 0 3 3]>>]>>>>>>'
bindata = b'\xff\xff\xff\xff'
fmt = {
'image/png' : pngdata,
'image/jpeg' : jpegdata,
'application/pdf' : pdfdata,
'application/unrecognized': bindata,
}
encoded = json_clean(encode_images(fmt))
for key, value in fmt.items():
# encoded has unicode, want bytes
decoded = a2b_base64(encoded[key])
assert decoded == value
encoded2 = json_clean(encode_images(encoded))
assert encoded == encoded2
# test that we don't double-encode base64 str
b64_str = {}
for key, encoded in encoded.items():
b64_str[key] = unicode_to_str(encoded)
encoded3 = json_clean(encode_images(b64_str))
assert encoded3 == b64_str
for key, value in fmt.items():
decoded = a2b_base64(encoded3[key])
assert decoded == value
def test_lambda():
with nt.assert_raises(ValueError):
json_clean(lambda : 1)
def test_exception():
bad_dicts = [{1:'number', '1':'string'},
{True:'bool', 'True':'string'},
]
for d in bad_dicts:
nt.assert_raises(ValueError, json_clean, d)
def test_unicode_dict():
data = {u'üniço∂e': u'üniço∂e'}
clean = jsonutil.json_clean(data)
assert data == clean

View file

@ -0,0 +1,411 @@
# coding: utf-8
"""test the IPython Kernel"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import ast
import io
import os.path
import sys
import time
import nose.tools as nt
from flaky import flaky
import pytest
from packaging import version
from IPython.testing import decorators as dec, tools as tt
import IPython
from ipython_genutils import py3compat
from IPython.paths import locate_profile
from ipython_genutils.tempdir import TemporaryDirectory
from .utils import (
new_kernel, kernel, TIMEOUT, assemble_output, execute,
flush_channels, wait_for_idle,
)
def _check_master(kc, expected=True, stream="stdout"):
execute(kc=kc, code="import sys")
flush_channels(kc)
msg_id, content = execute(kc=kc, code="print (sys.%s._is_master_process())" % stream)
stdout, stderr = assemble_output(kc.iopub_channel)
assert stdout.strip() == repr(expected)
def _check_status(content):
"""If status=error, show the traceback"""
if content['status'] == 'error':
assert False, ''.join(['\n'] + content['traceback'])
# printing tests
def test_simple_print():
"""simple print statement in kernel"""
with kernel() as kc:
iopub = kc.iopub_channel
msg_id, content = execute(kc=kc, code="print ('hi')")
stdout, stderr = assemble_output(iopub)
assert stdout == 'hi\n'
assert stderr == ''
_check_master(kc, expected=True)
def test_sys_path():
"""test that sys.path doesn't get messed up by default"""
with kernel() as kc:
msg_id, content = execute(kc=kc, code="import sys; print(repr(sys.path))")
stdout, stderr = assemble_output(kc.iopub_channel)
# for error-output on failure
sys.stderr.write(stderr)
sys_path = ast.literal_eval(stdout.strip())
assert '' in sys_path
def test_sys_path_profile_dir():
"""test that sys.path doesn't get messed up when `--profile-dir` is specified"""
with new_kernel(['--profile-dir', locate_profile('default')]) as kc:
msg_id, content = execute(kc=kc, code="import sys; print(repr(sys.path))")
stdout, stderr = assemble_output(kc.iopub_channel)
# for error-output on failure
sys.stderr.write(stderr)
sys_path = ast.literal_eval(stdout.strip())
assert '' in sys_path
@flaky(max_runs=3)
@dec.skipif(sys.platform == 'win32', "subprocess prints fail on Windows")
def test_subprocess_print():
"""printing from forked mp.Process"""
with new_kernel() as kc:
iopub = kc.iopub_channel
_check_master(kc, expected=True)
flush_channels(kc)
np = 5
code = '\n'.join([
"from __future__ import print_function",
"import time",
"import multiprocessing as mp",
"pool = [mp.Process(target=print, args=('hello', i,)) for i in range(%i)]" % np,
"for p in pool: p.start()",
"for p in pool: p.join()",
"time.sleep(0.5),"
])
msg_id, content = execute(kc=kc, code=code)
stdout, stderr = assemble_output(iopub)
nt.assert_equal(stdout.count("hello"), np, stdout)
for n in range(np):
nt.assert_equal(stdout.count(str(n)), 1, stdout)
assert stderr == ''
_check_master(kc, expected=True)
_check_master(kc, expected=True, stream="stderr")
@flaky(max_runs=3)
def test_subprocess_noprint():
"""mp.Process without print doesn't trigger iostream mp_mode"""
with kernel() as kc:
iopub = kc.iopub_channel
np = 5
code = '\n'.join([
"import multiprocessing as mp",
"pool = [mp.Process(target=range, args=(i,)) for i in range(%i)]" % np,
"for p in pool: p.start()",
"for p in pool: p.join()"
])
msg_id, content = execute(kc=kc, code=code)
stdout, stderr = assemble_output(iopub)
assert stdout == ''
assert stderr == ''
_check_master(kc, expected=True)
_check_master(kc, expected=True, stream="stderr")
@flaky(max_runs=3)
@dec.skipif(sys.platform == 'win32', "subprocess prints fail on Windows")
def test_subprocess_error():
"""error in mp.Process doesn't crash"""
with new_kernel() as kc:
iopub = kc.iopub_channel
code = '\n'.join([
"import multiprocessing as mp",
"p = mp.Process(target=int, args=('hi',))",
"p.start()",
"p.join()",
])
msg_id, content = execute(kc=kc, code=code)
stdout, stderr = assemble_output(iopub)
assert stdout == ''
assert "ValueError" in stderr
_check_master(kc, expected=True)
_check_master(kc, expected=True, stream="stderr")
# raw_input tests
def test_raw_input():
"""test [raw_]input"""
with kernel() as kc:
iopub = kc.iopub_channel
input_f = "input" if py3compat.PY3 else "raw_input"
theprompt = "prompt> "
code = 'print({input_f}("{theprompt}"))'.format(**locals())
msg_id = kc.execute(code, allow_stdin=True)
msg = kc.get_stdin_msg(block=True, timeout=TIMEOUT)
assert msg['header']['msg_type'] == u'input_request'
content = msg['content']
assert content['prompt'] == theprompt
text = "some text"
kc.input(text)
reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
assert reply['content']['status'] == 'ok'
stdout, stderr = assemble_output(iopub)
assert stdout == text + "\n"
@dec.skipif(py3compat.PY3)
def test_eval_input():
"""test input() on Python 2"""
with kernel() as kc:
iopub = kc.iopub_channel
input_f = "input" if py3compat.PY3 else "raw_input"
theprompt = "prompt> "
code = 'print(input("{theprompt}"))'.format(**locals())
msg_id = kc.execute(code, allow_stdin=True)
msg = kc.get_stdin_msg(block=True, timeout=TIMEOUT)
assert msg['header']['msg_type'] == u'input_request'
content = msg['content']
assert content['prompt'] == theprompt
kc.input("1+1")
reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
assert reply['content']['status'] == 'ok'
stdout, stderr = assemble_output(iopub)
assert stdout == "2\n"
def test_save_history():
# Saving history from the kernel with %hist -f was failing because of
# unicode problems on Python 2.
with kernel() as kc, TemporaryDirectory() as td:
file = os.path.join(td, 'hist.out')
execute(u'a=1', kc=kc)
wait_for_idle(kc)
execute(u'b=u"abcþ"', kc=kc)
wait_for_idle(kc)
_, reply = execute("%hist -f " + file, kc=kc)
assert reply['status'] == 'ok'
with io.open(file, encoding='utf-8') as f:
content = f.read()
assert u'a=1' in content
assert u'b=u"abcþ"' in content
@dec.skip_without('faulthandler')
def test_smoke_faulthandler():
with kernel() as kc:
# Note: faulthandler.register is not available on windows.
code = u'\n'.join([
'import sys',
'import faulthandler',
'import signal',
'faulthandler.enable()',
'if not sys.platform.startswith("win32"):',
' faulthandler.register(signal.SIGTERM)'])
_, reply = execute(code, kc=kc)
nt.assert_equal(reply['status'], 'ok', reply.get('traceback', ''))
def test_help_output():
"""ipython kernel --help-all works"""
tt.help_all_output_test('kernel')
def test_is_complete():
with kernel() as kc:
# There are more test cases for this in core - here we just check
# that the kernel exposes the interface correctly.
kc.is_complete('2+2')
reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
assert reply['content']['status'] == 'complete'
# SyntaxError
kc.is_complete('raise = 2')
reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
assert reply['content']['status'] == 'invalid'
kc.is_complete('a = [1,\n2,')
reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
assert reply['content']['status'] == 'incomplete'
assert reply['content']['indent'] == ''
# Cell magic ends on two blank lines for console UIs
kc.is_complete('%%timeit\na\n\n')
reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
assert reply['content']['status'] == 'complete'
@dec.skipif(sys.platform != 'win32', "only run on Windows")
def test_complete():
with kernel() as kc:
execute(u'a = 1', kc=kc)
wait_for_idle(kc)
cell = 'import IPython\nb = a.'
kc.complete(cell)
reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
c = reply['content']
assert c['status'] == 'ok'
start = cell.find('a.')
end = start + 2
assert c['cursor_end'] == cell.find('a.') + 2
assert c['cursor_start'] <= end
# there are many right answers for cursor_start,
# so verify application of the completion
# rather than the value of cursor_start
matches = c['matches']
assert matches
for m in matches:
completed = cell[:c['cursor_start']] + m
assert completed.startswith(cell)
@dec.skip_without('matplotlib')
def test_matplotlib_inline_on_import():
with kernel() as kc:
cell = '\n'.join([
'import matplotlib, matplotlib.pyplot as plt',
'backend = matplotlib.get_backend()'
])
_, reply = execute(cell,
user_expressions={'backend': 'backend'},
kc=kc)
_check_status(reply)
backend_bundle = reply['user_expressions']['backend']
_check_status(backend_bundle)
assert 'backend_inline' in backend_bundle['data']['text/plain']
def test_message_order():
N = 100 # number of messages to test
with kernel() as kc:
_, reply = execute("a = 1", kc=kc)
_check_status(reply)
offset = reply['execution_count'] + 1
cell = "a += 1\na"
msg_ids = []
# submit N executions as fast as we can
for i in range(N):
msg_ids.append(kc.execute(cell))
# check message-handling order
for i, msg_id in enumerate(msg_ids, offset):
reply = kc.get_shell_msg(timeout=TIMEOUT)
_check_status(reply['content'])
assert reply['content']['execution_count'] == i
assert reply['parent_header']['msg_id'] == msg_id
@dec.skipif(sys.platform.startswith('linux'))
def test_unc_paths():
with kernel() as kc, TemporaryDirectory() as td:
drive_file_path = os.path.join(td, 'unc.txt')
with open(drive_file_path, 'w+') as f:
f.write('# UNC test')
unc_root = '\\\\localhost\\C$'
file_path = os.path.splitdrive(os.path.dirname(drive_file_path))[1]
unc_file_path = os.path.join(unc_root, file_path[1:])
iopub = kc.iopub_channel
kc.execute("cd {0:s}".format(unc_file_path))
reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
assert reply['content']['status'] == 'ok'
out, err = assemble_output(iopub)
assert unc_file_path in out
flush_channels(kc)
kc.execute(code="ls")
reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
assert reply['content']['status'] == 'ok'
out, err = assemble_output(iopub)
assert 'unc.txt' in out
kc.execute(code="cd")
reply = kc.get_shell_msg(block=True, timeout=TIMEOUT)
assert reply['content']['status'] == 'ok'
def test_shutdown():
"""Kernel exits after polite shutdown_request"""
with new_kernel() as kc:
km = kc.parent
execute(u'a = 1', kc=kc)
wait_for_idle(kc)
kc.shutdown()
for i in range(300): # 30s timeout
if km.is_alive():
time.sleep(.1)
else:
break
assert not km.is_alive()
def test_interrupt_during_input():
"""
The kernel exits after being interrupted while waiting in input().
input() appears to have issues other functions don't, and it needs to be
interruptible in order for pdb to be interruptible.
"""
with new_kernel() as kc:
km = kc.parent
msg_id = kc.execute("input()")
time.sleep(1) # Make sure it's actually waiting for input.
km.interrupt_kernel()
# If we failed to interrupt interrupt, this will timeout:
reply = kc.get_shell_msg(timeout=TIMEOUT)
from .test_message_spec import validate_message
validate_message(reply, 'execute_reply', msg_id)
@pytest.mark.skipif(
version.parse(IPython.__version__) < version.parse("7.14.0"),
reason="Need new IPython"
)
def test_interrupt_during_pdb_set_trace():
"""
The kernel exits after being interrupted while waiting in pdb.set_trace().
Merely testing input() isn't enough, pdb has its own issues that need
to be handled in addition.
This test will fail with versions of IPython < 7.14.0.
"""
with new_kernel() as kc:
km = kc.parent
msg_id = kc.execute("import pdb; pdb.set_trace()")
msg_id2 = kc.execute("3 + 4")
time.sleep(1) # Make sure it's actually waiting for input.
km.interrupt_kernel()
# If we failed to interrupt interrupt, this will timeout:
from .test_message_spec import validate_message
reply = kc.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'execute_reply', msg_id)
reply = kc.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'execute_reply', msg_id2)

View file

@ -0,0 +1,146 @@
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import json
import io
import os
import shutil
import sys
import tempfile
try:
from unittest import mock
except ImportError:
import mock # py2
from jupyter_core.paths import jupyter_data_dir
from ipykernel.kernelspec import (
make_ipkernel_cmd,
get_kernel_dict,
write_kernel_spec,
install,
InstallIPythonKernelSpecApp,
KERNEL_NAME,
RESOURCES,
)
import nose.tools as nt
pjoin = os.path.join
def test_make_ipkernel_cmd():
cmd = make_ipkernel_cmd()
nt.assert_equal(cmd, [
sys.executable,
'-m',
'ipykernel_launcher',
'-f',
'{connection_file}'
])
def assert_kernel_dict(d):
assert d['argv'] == make_ipkernel_cmd()
assert d['display_name'] == 'Python %i' % sys.version_info[0]
assert d['language'] == 'python'
def test_get_kernel_dict():
d = get_kernel_dict()
assert_kernel_dict(d)
def assert_kernel_dict_with_profile(d):
nt.assert_equal(d['argv'], make_ipkernel_cmd(
extra_arguments=["--profile", "test"]))
assert d['display_name'] == 'Python %i' % sys.version_info[0]
assert d['language'] == 'python'
def test_get_kernel_dict_with_profile():
d = get_kernel_dict(["--profile", "test"])
assert_kernel_dict_with_profile(d)
def assert_is_spec(path):
for fname in os.listdir(RESOURCES):
dst = pjoin(path, fname)
assert os.path.exists(dst)
kernel_json = pjoin(path, 'kernel.json')
assert os.path.exists(kernel_json)
with io.open(kernel_json, encoding='utf8') as f:
json.load(f)
def test_write_kernel_spec():
path = write_kernel_spec()
assert_is_spec(path)
shutil.rmtree(path)
def test_write_kernel_spec_path():
path = os.path.join(tempfile.mkdtemp(), KERNEL_NAME)
path2 = write_kernel_spec(path)
assert path == path2
assert_is_spec(path)
shutil.rmtree(path)
def test_install_kernelspec():
path = tempfile.mkdtemp()
try:
test = InstallIPythonKernelSpecApp.launch_instance(argv=['--prefix', path])
assert_is_spec(os.path.join(
path, 'share', 'jupyter', 'kernels', KERNEL_NAME))
finally:
shutil.rmtree(path)
def test_install_user():
tmp = tempfile.mkdtemp()
with mock.patch.dict(os.environ, {'HOME': tmp}):
install(user=True)
data_dir = jupyter_data_dir()
assert_is_spec(os.path.join(data_dir, 'kernels', KERNEL_NAME))
def test_install():
system_jupyter_dir = tempfile.mkdtemp()
with mock.patch('jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH',
[system_jupyter_dir]):
install()
assert_is_spec(os.path.join(system_jupyter_dir, 'kernels', KERNEL_NAME))
def test_install_profile():
system_jupyter_dir = tempfile.mkdtemp()
with mock.patch('jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH',
[system_jupyter_dir]):
install(profile="Test")
spec = os.path.join(system_jupyter_dir, 'kernels', KERNEL_NAME, "kernel.json")
with open(spec) as f:
spec = json.load(f)
assert spec["display_name"].endswith(" [profile=Test]")
nt.assert_equal(spec["argv"][-2:], ["--profile", "Test"])
def test_install_display_name_overrides_profile():
system_jupyter_dir = tempfile.mkdtemp()
with mock.patch('jupyter_client.kernelspec.SYSTEM_JUPYTER_PATH',
[system_jupyter_dir]):
install(display_name="Display", profile="Test")
spec = os.path.join(system_jupyter_dir, 'kernels', KERNEL_NAME, "kernel.json")
with open(spec) as f:
spec = json.load(f)
assert spec["display_name"] == "Display"

View file

@ -0,0 +1,549 @@
"""Test suite for our zeromq-based message specification."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import re
import sys
from distutils.version import LooseVersion as V
try:
from queue import Empty # Py 3
except ImportError:
from Queue import Empty # Py 2
import nose.tools as nt
from nose.plugins.skip import SkipTest
from traitlets import (
HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum
)
from ipython_genutils.py3compat import string_types, iteritems
from .utils import TIMEOUT, start_global_kernel, flush_channels, execute
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
KC = None
def setup():
global KC
KC = start_global_kernel()
#-----------------------------------------------------------------------------
# Message Spec References
#-----------------------------------------------------------------------------
class Reference(HasTraits):
"""
Base class for message spec specification testing.
This class is the core of the message specification test. The
idea is that child classes implement trait attributes for each
message keys, so that message keys can be tested against these
traits using :meth:`check` method.
"""
def check(self, d):
"""validate a dict against our traits"""
for key in self.trait_names():
assert key in d
# FIXME: always allow None, probably not a good idea
if d[key] is None:
continue
try:
setattr(self, key, d[key])
except TraitError as e:
assert False, str(e)
class Version(Unicode):
def __init__(self, *args, **kwargs):
self.min = kwargs.pop('min', None)
self.max = kwargs.pop('max', None)
kwargs['default_value'] = self.min
super(Version, self).__init__(*args, **kwargs)
def validate(self, obj, value):
if self.min and V(value) < V(self.min):
raise TraitError("bad version: %s < %s" % (value, self.min))
if self.max and (V(value) > V(self.max)):
raise TraitError("bad version: %s > %s" % (value, self.max))
class RMessage(Reference):
msg_id = Unicode()
msg_type = Unicode()
header = Dict()
parent_header = Dict()
content = Dict()
def check(self, d):
super(RMessage, self).check(d)
RHeader().check(self.header)
if self.parent_header:
RHeader().check(self.parent_header)
class RHeader(Reference):
msg_id = Unicode()
msg_type = Unicode()
session = Unicode()
username = Unicode()
version = Version(min='5.0')
mime_pat = re.compile(r'^[\w\-\+\.]+/[\w\-\+\.]+$')
class MimeBundle(Reference):
metadata = Dict()
data = Dict()
def _data_changed(self, name, old, new):
for k,v in iteritems(new):
assert mime_pat.match(k)
assert isinstance(v, string_types)
# shell replies
class Reply(Reference):
status = Enum((u'ok', u'error'), default_value=u'ok')
class ExecuteReply(Reply):
execution_count = Integer()
def check(self, d):
Reference.check(self, d)
if d['status'] == 'ok':
ExecuteReplyOkay().check(d)
elif d['status'] == 'error':
ExecuteReplyError().check(d)
class ExecuteReplyOkay(Reply):
status = Enum(('ok',))
user_expressions = Dict()
class ExecuteReplyError(Reply):
ename = Unicode()
evalue = Unicode()
traceback = List(Unicode())
class InspectReply(Reply, MimeBundle):
found = Bool()
class ArgSpec(Reference):
args = List(Unicode())
varargs = Unicode()
varkw = Unicode()
defaults = List()
class Status(Reference):
execution_state = Enum((u'busy', u'idle', u'starting'), default_value=u'busy')
class CompleteReply(Reply):
matches = List(Unicode())
cursor_start = Integer()
cursor_end = Integer()
status = Unicode()
class LanguageInfo(Reference):
name = Unicode('python')
version = Unicode(sys.version.split()[0])
class KernelInfoReply(Reply):
protocol_version = Version(min='5.0')
implementation = Unicode('ipython')
implementation_version = Version(min='2.1')
language_info = Dict()
banner = Unicode()
def check(self, d):
Reference.check(self, d)
LanguageInfo().check(d['language_info'])
class ConnectReply(Reference):
shell_port = Integer()
control_port = Integer()
stdin_port = Integer()
iopub_port = Integer()
hb_port = Integer()
class CommInfoReply(Reply):
comms = Dict()
class IsCompleteReply(Reference):
status = Enum((u'complete', u'incomplete', u'invalid', u'unknown'), default_value=u'complete')
def check(self, d):
Reference.check(self, d)
if d['status'] == 'incomplete':
IsCompleteReplyIncomplete().check(d)
class IsCompleteReplyIncomplete(Reference):
indent = Unicode()
# IOPub messages
class ExecuteInput(Reference):
code = Unicode()
execution_count = Integer()
class Error(ExecuteReplyError):
"""Errors are the same as ExecuteReply, but without status"""
status = None # no status field
class Stream(Reference):
name = Enum((u'stdout', u'stderr'), default_value=u'stdout')
text = Unicode()
class DisplayData(MimeBundle):
pass
class ExecuteResult(MimeBundle):
execution_count = Integer()
class HistoryReply(Reply):
history = List(List())
references = {
'execute_reply' : ExecuteReply(),
'inspect_reply' : InspectReply(),
'status' : Status(),
'complete_reply' : CompleteReply(),
'kernel_info_reply': KernelInfoReply(),
'connect_reply': ConnectReply(),
'comm_info_reply': CommInfoReply(),
'is_complete_reply': IsCompleteReply(),
'execute_input' : ExecuteInput(),
'execute_result' : ExecuteResult(),
'history_reply' : HistoryReply(),
'error' : Error(),
'stream' : Stream(),
'display_data' : DisplayData(),
'header' : RHeader(),
}
"""
Specifications of `content` part of the reply messages.
"""
def validate_message(msg, msg_type=None, parent=None):
"""validate a message
This is a generator, and must be iterated through to actually
trigger each test.
If msg_type and/or parent are given, the msg_type and/or parent msg_id
are compared with the given values.
"""
RMessage().check(msg)
if msg_type:
assert msg['msg_type'] == msg_type
if parent:
assert msg['parent_header']['msg_id'] == parent
content = msg['content']
ref = references[msg['msg_type']]
ref.check(content)
#-----------------------------------------------------------------------------
# Tests
#-----------------------------------------------------------------------------
# Shell channel
def test_execute():
flush_channels()
msg_id = KC.execute(code='x=1')
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'execute_reply', msg_id)
def test_execute_silent():
flush_channels()
msg_id, reply = execute(code='x=1', silent=True)
# flush status=idle
status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
validate_message(status, 'status', msg_id)
assert status['content']['execution_state'] == 'idle'
nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
count = reply['execution_count']
msg_id, reply = execute(code='x=2', silent=True)
# flush status=idle
status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
validate_message(status, 'status', msg_id)
assert status['content']['execution_state'] == 'idle'
nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
count_2 = reply['execution_count']
assert count_2 == count
def test_execute_error():
flush_channels()
msg_id, reply = execute(code='1/0')
assert reply['status'] == 'error'
assert reply['ename'] == 'ZeroDivisionError'
error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
validate_message(error, 'error', msg_id)
def test_execute_inc():
"""execute request should increment execution_count"""
flush_channels()
msg_id, reply = execute(code='x=1')
count = reply['execution_count']
flush_channels()
msg_id, reply = execute(code='x=2')
count_2 = reply['execution_count']
assert count_2 == count+1
def test_execute_stop_on_error():
"""execute request should not abort execution queue with stop_on_error False"""
flush_channels()
fail = '\n'.join([
# sleep to ensure subsequent message is waiting in the queue to be aborted
'import time',
'time.sleep(0.5)',
'raise ValueError',
])
KC.execute(code=fail)
msg_id = KC.execute(code='print("Hello")')
KC.get_shell_msg(timeout=TIMEOUT)
reply = KC.get_shell_msg(timeout=TIMEOUT)
assert reply['content']['status'] == 'aborted'
flush_channels()
KC.execute(code=fail, stop_on_error=False)
msg_id = KC.execute(code='print("Hello")')
KC.get_shell_msg(timeout=TIMEOUT)
reply = KC.get_shell_msg(timeout=TIMEOUT)
assert reply['content']['status'] == 'ok'
def test_user_expressions():
flush_channels()
msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
user_expressions = reply['user_expressions']
nt.assert_equal(user_expressions, {u'foo': {
u'status': u'ok',
u'data': {u'text/plain': u'2'},
u'metadata': {},
}})
def test_user_expressions_fail():
flush_channels()
msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
user_expressions = reply['user_expressions']
foo = user_expressions['foo']
assert foo['status'] == 'error'
assert foo['ename'] == 'NameError'
def test_oinfo():
flush_channels()
msg_id = KC.inspect('a')
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'inspect_reply', msg_id)
def test_oinfo_found():
flush_channels()
msg_id, reply = execute(code='a=5')
msg_id = KC.inspect('a')
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'inspect_reply', msg_id)
content = reply['content']
assert content['found']
text = content['data']['text/plain']
assert 'Type:' in text
assert 'Docstring:' in text
def test_oinfo_detail():
flush_channels()
msg_id, reply = execute(code='ip=get_ipython()')
msg_id = KC.inspect('ip.object_inspect', cursor_pos=10, detail_level=1)
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'inspect_reply', msg_id)
content = reply['content']
assert content['found']
text = content['data']['text/plain']
assert 'Signature:' in text
assert 'Source:' in text
def test_oinfo_not_found():
flush_channels()
msg_id = KC.inspect('dne')
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'inspect_reply', msg_id)
content = reply['content']
assert not content['found']
def test_complete():
flush_channels()
msg_id, reply = execute(code="alpha = albert = 5")
msg_id = KC.complete('al', 2)
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'complete_reply', msg_id)
matches = reply['content']['matches']
for name in ('alpha', 'albert'):
assert name in matches
def test_kernel_info_request():
flush_channels()
msg_id = KC.kernel_info()
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'kernel_info_reply', msg_id)
def test_connect_request():
flush_channels()
msg = KC.session.msg('connect_request')
KC.shell_channel.send(msg)
return msg['header']['msg_id']
msg_id = KC.kernel_info()
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'connect_reply', msg_id)
def test_comm_info_request():
flush_channels()
if not hasattr(KC, 'comm_info'):
raise SkipTest()
msg_id = KC.comm_info()
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'comm_info_reply', msg_id)
def test_single_payload():
"""
We want to test the set_next_input is not triggered several time per cell.
This is (was ?) mostly due to the fact that `?` in a loop would trigger
several set_next_input.
I'm tempted to thing that we actually want to _allow_ multiple
set_next_input (that's users' choice). But that `?` itself (and ?'s
transform) should avoid setting multiple set_next_input).
"""
flush_channels()
msg_id, reply = execute(code="ip = get_ipython()\n"
"for i in range(3):\n"
" ip.set_next_input('Hello There')\n")
payload = reply['payload']
next_input_pls = [pl for pl in payload if pl["source"] == "set_next_input"]
assert len(next_input_pls) == 1
def test_is_complete():
flush_channels()
msg_id = KC.is_complete("a = 1")
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'is_complete_reply', msg_id)
def test_history_range():
flush_channels()
msg_id_exec = KC.execute(code='x=1', store_history = True)
reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
msg_id = KC.history(hist_access_type = 'range', raw = True, output = True, start = 1, stop = 2, session = 0)
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'history_reply', msg_id)
content = reply['content']
assert len(content['history']) == 1
def test_history_tail():
flush_channels()
msg_id_exec = KC.execute(code='x=1', store_history = True)
reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
msg_id = KC.history(hist_access_type = 'tail', raw = True, output = True, n = 1, session = 0)
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'history_reply', msg_id)
content = reply['content']
assert len(content['history']) == 1
def test_history_search():
flush_channels()
msg_id_exec = KC.execute(code='x=1', store_history = True)
reply_exec = KC.get_shell_msg(timeout=TIMEOUT)
msg_id = KC.history(hist_access_type = 'search', raw = True, output = True, n = 1, pattern = '*', session = 0)
reply = KC.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'history_reply', msg_id)
content = reply['content']
assert len(content['history']) == 1
# IOPub channel
def test_stream():
flush_channels()
msg_id, reply = execute("print('hi')")
stdout = KC.iopub_channel.get_msg(timeout=TIMEOUT)
validate_message(stdout, 'stream', msg_id)
content = stdout['content']
assert content['text'] == u'hi\n'
def test_display_data():
flush_channels()
msg_id, reply = execute("from IPython.display import display; display(1)")
display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
validate_message(display, 'display_data', parent=msg_id)
data = display['content']['data']
assert data['text/plain'] == u'1'

View file

@ -0,0 +1,65 @@
import pickle
from ipykernel.pickleutil import can, uncan, codeutil
def interactive(f):
f.__module__ = '__main__'
return f
def dumps(obj):
return pickle.dumps(can(obj))
def loads(obj):
return uncan(pickle.loads(obj))
def test_no_closure():
@interactive
def foo():
a = 5
return a
pfoo = dumps(foo)
bar = loads(pfoo)
assert foo() == bar()
def test_generator_closure():
# this only creates a closure on Python 3
@interactive
def foo():
i = 'i'
r = [ i for j in (1,2) ]
return r
pfoo = dumps(foo)
bar = loads(pfoo)
assert foo() == bar()
def test_nested_closure():
@interactive
def foo():
i = 'i'
def g():
return i
return g()
pfoo = dumps(foo)
bar = loads(pfoo)
assert foo() == bar()
def test_closure():
i = 'i'
@interactive
def foo():
return i
pfoo = dumps(foo)
bar = loads(pfoo)
assert foo() == bar()
def test_uncan_bytes_buffer():
data = b'data'
canned = can(data)
canned.buffers = [memoryview(buf) for buf in canned.buffers]
out = uncan(canned)
assert out == data

View file

@ -0,0 +1,201 @@
"""test serialization tools"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import pickle
from collections import namedtuple
from ipykernel.serialize import serialize_object, deserialize_object
from IPython.testing import decorators as dec
from ipykernel.pickleutil import CannedArray, CannedClass, interactive
def roundtrip(obj):
"""roundtrip an object through serialization"""
bufs = serialize_object(obj)
obj2, remainder = deserialize_object(bufs)
assert remainder == []
return obj2
SHAPES = ((100,), (1024,10), (10,8,6,5), (), (0,))
DTYPES = ('uint8', 'float64', 'int32', [('g', 'float32')], '|S10')
def new_array(shape, dtype):
import numpy
return numpy.random.random(shape).astype(dtype)
def test_roundtrip_simple():
for obj in [
'hello',
dict(a='b', b=10),
[1,2,'hi'],
(b'123', 'hello'),
]:
obj2 = roundtrip(obj)
assert obj == obj2
def test_roundtrip_nested():
for obj in [
dict(a=range(5), b={1:b'hello'}),
[range(5),[range(3),(1,[b'whoda'])]],
]:
obj2 = roundtrip(obj)
assert obj == obj2
def test_roundtrip_buffered():
for obj in [
dict(a=b"x"*1025),
b"hello"*500,
[b"hello"*501, 1,2,3]
]:
bufs = serialize_object(obj)
assert len(bufs) == 2
obj2, remainder = deserialize_object(bufs)
assert remainder == []
assert obj == obj2
def test_roundtrip_memoryview():
b = b'asdf' * 1025
view = memoryview(b)
bufs = serialize_object(view)
assert len(bufs) == 2
v2, remainder = deserialize_object(bufs)
assert remainder == []
assert v2.tobytes() == b
@dec.skip_without('numpy')
def test_numpy():
import numpy
from numpy.testing.utils import assert_array_equal
for shape in SHAPES:
for dtype in DTYPES:
A = new_array(shape, dtype=dtype)
bufs = serialize_object(A)
bufs = [memoryview(b) for b in bufs]
B, r = deserialize_object(bufs)
assert r == []
assert A.shape == B.shape
assert A.dtype == B.dtype
assert_array_equal(A,B)
@dec.skip_without('numpy')
def test_recarray():
import numpy
from numpy.testing.utils import assert_array_equal
for shape in SHAPES:
for dtype in [
[('f', float), ('s', '|S10')],
[('n', int), ('s', '|S1'), ('u', 'uint32')],
]:
A = new_array(shape, dtype=dtype)
bufs = serialize_object(A)
B, r = deserialize_object(bufs)
assert r == []
assert A.shape == B.shape
assert A.dtype == B.dtype
assert_array_equal(A,B)
@dec.skip_without('numpy')
def test_numpy_in_seq():
import numpy
from numpy.testing.utils import assert_array_equal
for shape in SHAPES:
for dtype in DTYPES:
A = new_array(shape, dtype=dtype)
bufs = serialize_object((A,1,2,b'hello'))
canned = pickle.loads(bufs[0])
assert isinstance(canned[0], CannedArray)
tup, r = deserialize_object(bufs)
B = tup[0]
assert r == []
assert A.shape == B.shape
assert A.dtype == B.dtype
assert_array_equal(A,B)
@dec.skip_without('numpy')
def test_numpy_in_dict():
import numpy
from numpy.testing.utils import assert_array_equal
for shape in SHAPES:
for dtype in DTYPES:
A = new_array(shape, dtype=dtype)
bufs = serialize_object(dict(a=A,b=1,c=range(20)))
canned = pickle.loads(bufs[0])
assert isinstance(canned['a'], CannedArray)
d, r = deserialize_object(bufs)
B = d['a']
assert r == []
assert A.shape == B.shape
assert A.dtype == B.dtype
assert_array_equal(A,B)
def test_class():
@interactive
class C(object):
a=5
bufs = serialize_object(dict(C=C))
canned = pickle.loads(bufs[0])
assert isinstance(canned['C'], CannedClass)
d, r = deserialize_object(bufs)
C2 = d['C']
assert C2.a == C.a
def test_class_oldstyle():
@interactive
class C:
a=5
bufs = serialize_object(dict(C=C))
canned = pickle.loads(bufs[0])
assert isinstance(canned['C'], CannedClass)
d, r = deserialize_object(bufs)
C2 = d['C']
assert C2.a == C.a
def test_tuple():
tup = (lambda x:x, 1)
bufs = serialize_object(tup)
canned = pickle.loads(bufs[0])
assert isinstance(canned, tuple)
t2, r = deserialize_object(bufs)
assert t2[0](t2[1]) == tup[0](tup[1])
point = namedtuple('point', 'x y')
def test_namedtuple():
p = point(1,2)
bufs = serialize_object(p)
canned = pickle.loads(bufs[0])
assert isinstance(canned, point)
p2, r = deserialize_object(bufs, globals())
assert p2.x == p.x
assert p2.y == p.y
def test_list():
lis = [lambda x:x, 1]
bufs = serialize_object(lis)
canned = pickle.loads(bufs[0])
assert isinstance(canned, list)
l2, r = deserialize_object(bufs)
assert l2[0](l2[1]) == lis[0](lis[1])
def test_class_inheritance():
@interactive
class C(object):
a=5
@interactive
class D(C):
b=10
bufs = serialize_object(dict(D=D))
canned = pickle.loads(bufs[0])
assert isinstance(canned['D'], CannedClass)
d, r = deserialize_object(bufs)
D2 = d['D']
assert D2.a == D.a
assert D2.b == D.b

View file

@ -0,0 +1,51 @@
from .test_embed_kernel import setup_kernel
from flaky import flaky
TIMEOUT = 15
@flaky(max_runs=3)
def test_ipython_start_kernel_userns():
cmd = ('from IPython import start_kernel\n'
'ns = {"tre": 123}\n'
'start_kernel(user_ns=ns)')
with setup_kernel(cmd) as client:
msg_id = client.inspect('tre')
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert content['found']
text = content['data']['text/plain']
assert u'123' in text
# user_module should be an instance of DummyMod
msg_id = client.execute("usermod = get_ipython().user_module")
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert content['status'] == u'ok'
msg_id = client.inspect('usermod')
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert content['found']
text = content['data']['text/plain']
assert u'DummyMod' in text
@flaky(max_runs=3)
def test_ipython_start_kernel_no_userns():
# Issue #4188 - user_ns should be passed to shell as None, not {}
cmd = ('from IPython import start_kernel\n'
'start_kernel()')
with setup_kernel(cmd) as client:
# user_module should not be an instance of DummyMod
msg_id = client.execute("usermod = get_ipython().user_module")
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert content['status'] == u'ok'
msg_id = client.inspect('usermod')
msg = client.get_shell_msg(block=True, timeout=TIMEOUT)
content = msg['content']
assert content['found']
text = content['data']['text/plain']
assert u'DummyMod' not in text

View file

@ -0,0 +1,207 @@
# -*- coding: utf-8 -*-
""" Tests for zmq shell / display publisher. """
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
try:
from queue import Queue
except ImportError:
# py2
from Queue import Queue
from threading import Thread
import unittest
from traitlets import Int
import zmq
from ipykernel.zmqshell import ZMQDisplayPublisher
from jupyter_client.session import Session
class NoReturnDisplayHook(object):
"""
A dummy DisplayHook which allows us to monitor
the number of times an object is called, but which
does *not* return a message when it is called.
"""
call_count = 0
def __call__(self, obj):
self.call_count += 1
class ReturnDisplayHook(NoReturnDisplayHook):
"""
A dummy DisplayHook with the same counting ability
as its base class, but which also returns the same
message when it is called.
"""
def __call__(self, obj):
super(ReturnDisplayHook, self).__call__(obj)
return obj
class CounterSession(Session):
"""
This is a simple subclass to allow us to count
the calls made to the session object by the display
publisher.
"""
send_count = Int(0)
def send(self, *args, **kwargs):
"""
A trivial override to just augment the existing call
with an increment to the send counter.
"""
self.send_count += 1
super(CounterSession, self).send(*args, **kwargs)
class ZMQDisplayPublisherTests(unittest.TestCase):
"""
Tests the ZMQDisplayPublisher in zmqshell.py
"""
def setUp(self):
self.context = zmq.Context()
self.socket = self.context.socket(zmq.PUB)
self.session = CounterSession()
self.disp_pub = ZMQDisplayPublisher(
session = self.session,
pub_socket = self.socket
)
def tearDown(self):
"""
We need to close the socket in order to proceed with the
tests.
TODO - There is still an open file handler to '/dev/null',
presumably created by zmq.
"""
self.disp_pub.clear_output()
self.socket.close()
self.context.term()
def test_display_publisher_creation(self):
"""
Since there's no explicit constructor, here we confirm
that keyword args get assigned correctly, and override
the defaults.
"""
assert self.disp_pub.session == self.session
assert self.disp_pub.pub_socket == self.socket
def test_thread_local_hooks(self):
"""
Confirms that the thread_local attribute is correctly
initialised with an empty list for the display hooks
"""
assert self.disp_pub._hooks == []
def hook(msg):
return msg
self.disp_pub.register_hook(hook)
assert self.disp_pub._hooks == [hook]
q = Queue()
def set_thread_hooks():
q.put(self.disp_pub._hooks)
t = Thread(target=set_thread_hooks)
t.start()
thread_hooks = q.get(timeout=10)
assert thread_hooks == []
def test_publish(self):
"""
Publish should prepare the message and eventually call
`send` by default.
"""
data = dict(a = 1)
assert self.session.send_count == 0
self.disp_pub.publish(data)
assert self.session.send_count == 1
def test_display_hook_halts_send(self):
"""
If a hook is installed, and on calling the object
it does *not* return a message, then we assume that
the message has been consumed, and should not be
processed (`sent`) in the normal manner.
"""
data = dict(a = 1)
hook = NoReturnDisplayHook()
self.disp_pub.register_hook(hook)
assert hook.call_count == 0
assert self.session.send_count == 0
self.disp_pub.publish(data)
assert hook.call_count == 1
assert self.session.send_count == 0
def test_display_hook_return_calls_send(self):
"""
If a hook is installed and on calling the object
it returns a new message, then we assume that this
is just a message transformation, and the message
should be sent in the usual manner.
"""
data = dict(a=1)
hook = ReturnDisplayHook()
self.disp_pub.register_hook(hook)
assert hook.call_count == 0
assert self.session.send_count == 0
self.disp_pub.publish(data)
assert hook.call_count == 1
assert self.session.send_count == 1
def test_unregister_hook(self):
"""
Once a hook is unregistered, it should not be called
during `publish`.
"""
data = dict(a = 1)
hook = NoReturnDisplayHook()
self.disp_pub.register_hook(hook)
assert hook.call_count == 0
assert self.session.send_count == 0
self.disp_pub.publish(data)
assert hook.call_count == 1
assert self.session.send_count == 0
#
# After unregistering the `NoReturn` hook, any calls
# to publish should *not* got through the DisplayHook,
# but should instead hit the usual `session.send` call
# at the end.
#
# As a result, the hook call count should *not* increase,
# but the session send count *should* increase.
#
first = self.disp_pub.unregister_hook(hook)
self.disp_pub.publish(data)
self.assertTrue(first)
assert hook.call_count == 1
assert self.session.send_count == 1
#
# If a hook is not installed, `unregister_hook`
# should return false.
#
second = self.disp_pub.unregister_hook(hook)
self.assertFalse(second)
if __name__ == '__main__':
unittest.main()

View file

@ -0,0 +1,172 @@
"""utilities for testing IPython kernels"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
import atexit
import os
import sys
from contextlib import contextmanager
from subprocess import PIPE, STDOUT
try:
from queue import Empty # Py 3
except ImportError:
from Queue import Empty # Py 2
import nose
from jupyter_client import manager
STARTUP_TIMEOUT = 60
TIMEOUT = 15
KM = None
KC = None
def start_new_kernel(**kwargs):
"""start a new kernel, and return its Manager and Client
Integrates with our output capturing for tests.
"""
kwargs['stderr'] = STDOUT
try:
kwargs['stdout'] = nose.iptest_stdstreams_fileno()
except AttributeError:
pass
return manager.start_new_kernel(startup_timeout=STARTUP_TIMEOUT, **kwargs)
def flush_channels(kc=None):
"""flush any messages waiting on the queue"""
from .test_message_spec import validate_message
if kc is None:
kc = KC
for channel in (kc.shell_channel, kc.iopub_channel):
while True:
try:
msg = channel.get_msg(block=True, timeout=0.1)
except Empty:
break
else:
validate_message(msg)
def execute(code='', kc=None, **kwargs):
"""wrapper for doing common steps for validating an execution request"""
from .test_message_spec import validate_message
if kc is None:
kc = KC
msg_id = kc.execute(code=code, **kwargs)
reply = kc.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'execute_reply', msg_id)
busy = kc.get_iopub_msg(timeout=TIMEOUT)
validate_message(busy, 'status', msg_id)
assert busy['content']['execution_state'] == 'busy'
if not kwargs.get('silent'):
execute_input = kc.get_iopub_msg(timeout=TIMEOUT)
validate_message(execute_input, 'execute_input', msg_id)
assert execute_input['content']['code'] == code
# show tracebacks if present for debugging
if reply['content'].get('traceback'):
print('\n'.join(reply['content']['traceback']), file=sys.stderr)
return msg_id, reply['content']
def start_global_kernel():
"""start the global kernel (if it isn't running) and return its client"""
global KM, KC
if KM is None:
KM, KC = start_new_kernel()
atexit.register(stop_global_kernel)
else:
flush_channels(KC)
return KC
@contextmanager
def kernel():
"""Context manager for the global kernel instance
Should be used for most kernel tests
Returns
-------
kernel_client: connected KernelClient instance
"""
yield start_global_kernel()
def uses_kernel(test_f):
"""Decorator for tests that use the global kernel"""
def wrapped_test():
with kernel() as kc:
test_f(kc)
wrapped_test.__doc__ = test_f.__doc__
wrapped_test.__name__ = test_f.__name__
return wrapped_test
def stop_global_kernel():
"""Stop the global shared kernel instance, if it exists"""
global KM, KC
KC.stop_channels()
KC = None
if KM is None:
return
KM.shutdown_kernel(now=True)
KM = None
def new_kernel(argv=None):
"""Context manager for a new kernel in a subprocess
Should only be used for tests where the kernel must not be re-used.
Returns
-------
kernel_client: connected KernelClient instance
"""
kwargs = {'stderr': STDOUT}
try:
kwargs['stdout'] = nose.iptest_stdstreams_fileno()
except AttributeError:
pass
if argv is not None:
kwargs['extra_arguments'] = argv
return manager.run_kernel(**kwargs)
def assemble_output(iopub):
"""assemble stdout/err from an execution"""
stdout = ''
stderr = ''
while True:
msg = iopub.get_msg(block=True, timeout=1)
msg_type = msg['msg_type']
content = msg['content']
if msg_type == 'status' and content['execution_state'] == 'idle':
# idle message signals end of output
break
elif msg['msg_type'] == 'stream':
if content['name'] == 'stdout':
stdout += content['text']
elif content['name'] == 'stderr':
stderr += content['text']
else:
raise KeyError("bad stream: %r" % content['name'])
else:
# other output, ignored
pass
return stdout, stderr
def wait_for_idle(kc):
while True:
msg = kc.iopub_channel.get_msg(block=True, timeout=1)
msg_type = msg['msg_type']
content = msg['content']
if msg_type == 'status' and content['execution_state'] == 'idle':
break