Uploaded Test files

This commit is contained in:
Batuhan Berk Başoğlu 2020-11-12 11:05:57 -05:00
parent f584ad9d97
commit 2e81cb7d99
16627 changed files with 2065359 additions and 102444 deletions

View file

@ -0,0 +1,77 @@
"""Test kernel for signalling subprocesses"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import os
from subprocess import Popen, PIPE
import sys
import time
from ipykernel.displayhook import ZMQDisplayHook
from ipykernel.kernelbase import Kernel
from ipykernel.kernelapp import IPKernelApp
class SignalTestKernel(Kernel):
"""Kernel for testing subprocess signaling"""
implementation = 'signaltest'
implementation_version = '0.0'
banner = ''
def __init__(self, **kwargs):
kwargs.pop('user_ns', None)
super().__init__(**kwargs)
self.children = []
def do_execute(self, code, silent, store_history=True, user_expressions=None,
allow_stdin=False):
code = code.strip()
reply = {
'status': 'ok',
'user_expressions': {},
}
if code == 'start':
child = Popen(['bash', '-i', '-c', 'sleep 30'], stderr=PIPE)
self.children.append(child)
reply['user_expressions']['pid'] = self.children[-1].pid
elif code == 'check':
reply['user_expressions']['poll'] = [ child.poll() for child in self.children ]
elif code == 'env':
reply['user_expressions']['env'] = os.getenv("TEST_VARS", "")
elif code == 'sleep':
try:
time.sleep(10)
except KeyboardInterrupt:
reply['user_expressions']['interrupted'] = True
else:
reply['user_expressions']['interrupted'] = False
else:
reply['status'] = 'error'
reply['ename'] = 'Error'
reply['evalue'] = code
reply['traceback'] = ['no such command: %s' % code]
return reply
def kernel_info_request(self, *args, **kwargs):
"""Add delay to kernel_info_request
triggers slow-response code in KernelClient.wait_for_ready
"""
return super().kernel_info_request(*args, **kwargs)
class SignalTestApp(IPKernelApp):
kernel_class = SignalTestKernel
def init_io(self):
# Overridden to disable stdout/stderr capture
self.displayhook = ZMQDisplayHook(self.session, self.iopub_socket)
if __name__ == '__main__':
# make startup artificially slow,
# so that we exercise client logic for slow-starting kernels
time.sleep(2)
SignalTestApp.launch_instance()

View file

@ -0,0 +1,404 @@
"""Tests for adapting Jupyter msg spec versions"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import copy
import json
from unittest import TestCase
from jupyter_client.adapter import adapt, V4toV5, V5toV4, code_to_line
from jupyter_client.session import Session
def test_default_version():
s = Session()
msg = s.msg("msg_type")
msg['header'].pop('version')
original = copy.deepcopy(msg)
adapted = adapt(original)
assert adapted['header']['version'] == V4toV5.version
def test_code_to_line_no_code():
line, pos = code_to_line("", 0)
assert line == ""
assert pos == 0
class AdapterTest(TestCase):
def setUp(self):
self.session = Session()
def adapt(self, msg, version=None):
original = copy.deepcopy(msg)
adapted = adapt(msg, version or self.to_version)
return original, adapted
def check_header(self, msg):
pass
class V4toV5TestCase(AdapterTest):
from_version = 4
to_version = 5
def msg(self, msg_type, content):
"""Create a v4 msg (same as v5, minus version header)"""
msg = self.session.msg(msg_type, content)
msg['header'].pop('version')
return msg
def test_same_version(self):
msg = self.msg("execute_result",
content={'status' : 'ok'}
)
original, adapted = self.adapt(msg, self.from_version)
self.assertEqual(original, adapted)
def test_no_adapt(self):
msg = self.msg("input_reply", {'value' : 'some text'})
v4, v5 = self.adapt(msg)
self.assertEqual(v5['header']['version'], V4toV5.version)
v5['header'].pop('version')
self.assertEqual(v4, v5)
def test_rename_type(self):
for v5_type, v4_type in [
('execute_result', 'pyout'),
('execute_input', 'pyin'),
('error', 'pyerr'),
]:
msg = self.msg(v4_type, {'key' : 'value'})
v4, v5 = self.adapt(msg)
self.assertEqual(v5['header']['version'], V4toV5.version)
self.assertEqual(v5['header']['msg_type'], v5_type)
self.assertEqual(v4['content'], v5['content'])
def test_execute_request(self):
msg = self.msg("execute_request", {
'code' : 'a=5',
'silent' : False,
'user_expressions' : {'a' : 'apple'},
'user_variables' : ['b'],
})
v4, v5 = self.adapt(msg)
self.assertEqual(v4['header']['msg_type'], v5['header']['msg_type'])
v4c = v4['content']
v5c = v5['content']
self.assertEqual(v5c['user_expressions'], {'a' : 'apple', 'b': 'b'})
self.assertNotIn('user_variables', v5c)
self.assertEqual(v5c['code'], v4c['code'])
def test_execute_reply(self):
msg = self.msg("execute_reply", {
'status': 'ok',
'execution_count': 7,
'user_variables': {'a': 1},
'user_expressions': {'a+a': 2},
'payload': [{'source':'page', 'text':'blah'}]
})
v4, v5 = self.adapt(msg)
v5c = v5['content']
self.assertNotIn('user_variables', v5c)
self.assertEqual(v5c['user_expressions'], {'a': 1, 'a+a': 2})
self.assertEqual(v5c['payload'], [{'source': 'page',
'data': {'text/plain': 'blah'}}
])
def test_complete_request(self):
msg = self.msg("complete_request", {
'text' : 'a.is',
'line' : 'foo = a.is',
'block' : None,
'cursor_pos' : 10,
})
v4, v5 = self.adapt(msg)
v4c = v4['content']
v5c = v5['content']
for key in ('text', 'line', 'block'):
self.assertNotIn(key, v5c)
self.assertEqual(v5c['cursor_pos'], v4c['cursor_pos'])
self.assertEqual(v5c['code'], v4c['line'])
def test_complete_reply(self):
msg = self.msg("complete_reply", {
'matched_text' : 'a.is',
'matches' : ['a.isalnum',
'a.isalpha',
'a.isdigit',
'a.islower',
],
})
v4, v5 = self.adapt(msg)
v4c = v4['content']
v5c = v5['content']
self.assertEqual(v5c['matches'], v4c['matches'])
self.assertEqual(v5c['metadata'], {})
self.assertEqual(v5c['cursor_start'], -4)
self.assertEqual(v5c['cursor_end'], None)
def test_object_info_request(self):
msg = self.msg("object_info_request", {
'oname' : 'foo',
'detail_level' : 1,
})
v4, v5 = self.adapt(msg)
self.assertEqual(v5['header']['msg_type'], 'inspect_request')
v4c = v4['content']
v5c = v5['content']
self.assertEqual(v5c['code'], v4c['oname'])
self.assertEqual(v5c['cursor_pos'], len(v4c['oname']))
self.assertEqual(v5c['detail_level'], v4c['detail_level'])
def test_object_info_reply(self):
msg = self.msg("object_info_reply", {
'name' : 'foo',
'found' : True,
'status' : 'ok',
'definition' : 'foo(a=5)',
'docstring' : "the docstring",
})
v4, v5 = self.adapt(msg)
self.assertEqual(v5['header']['msg_type'], 'inspect_reply')
v4c = v4['content']
v5c = v5['content']
self.assertEqual(sorted(v5c), [ 'data', 'found', 'metadata', 'status'])
text = v5c['data']['text/plain']
self.assertEqual(text, '\n'.join([v4c['definition'], v4c['docstring']]))
def test_object_info_reply_not_found(self):
msg = self.msg("object_info_reply", {
'name' : 'foo',
'found' : False,
})
v4, v5 = self.adapt(msg)
self.assertEqual(v5['header']['msg_type'], 'inspect_reply')
v4c = v4['content']
v5c = v5['content']
self.assertEqual(v5c, {
'status': 'ok',
'found': False,
'data': {},
'metadata': {},
})
def test_kernel_info_reply(self):
msg = self.msg("kernel_info_reply", {
'language': 'python',
'language_version': [2,8,0],
'ipython_version': [1,2,3],
})
v4, v5 = self.adapt(msg)
v4c = v4['content']
v5c = v5['content']
self.assertEqual(v5c, {
'protocol_version': '4.1',
'implementation': 'ipython',
'implementation_version': '1.2.3',
'language_info': {
'name': 'python',
'version': '2.8.0',
},
'banner' : '',
})
# iopub channel
def test_display_data(self):
jsondata = dict(a=5)
msg = self.msg("display_data", {
'data' : {
'text/plain' : 'some text',
'application/json' : json.dumps(jsondata)
},
'metadata' : {'text/plain' : { 'key' : 'value' }},
})
v4, v5 = self.adapt(msg)
v4c = v4['content']
v5c = v5['content']
self.assertEqual(v5c['metadata'], v4c['metadata'])
self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
self.assertEqual(v5c['data']['application/json'], jsondata)
# stdin channel
def test_input_request(self):
msg = self.msg('input_request', {'prompt': "$>"})
v4, v5 = self.adapt(msg)
self.assertEqual(v5['content']['prompt'], v4['content']['prompt'])
self.assertFalse(v5['content']['password'])
class V5toV4TestCase(AdapterTest):
from_version = 5
to_version = 4
def msg(self, msg_type, content):
return self.session.msg(msg_type, content)
def test_same_version(self):
msg = self.msg("execute_result",
content={'status' : 'ok'}
)
original, adapted = self.adapt(msg, self.from_version)
self.assertEqual(original, adapted)
def test_no_adapt(self):
msg = self.msg("input_reply", {'value' : 'some text'})
v5, v4 = self.adapt(msg)
self.assertNotIn('version', v4['header'])
v5['header'].pop('version')
self.assertEqual(v4, v5)
def test_rename_type(self):
for v5_type, v4_type in [
('execute_result', 'pyout'),
('execute_input', 'pyin'),
('error', 'pyerr'),
]:
msg = self.msg(v5_type, {'key' : 'value'})
v5, v4 = self.adapt(msg)
self.assertEqual(v4['header']['msg_type'], v4_type)
assert 'version' not in v4['header']
self.assertEqual(v4['content'], v5['content'])
def test_execute_request(self):
msg = self.msg("execute_request", {
'code' : 'a=5',
'silent' : False,
'user_expressions' : {'a' : 'apple'},
})
v5, v4 = self.adapt(msg)
self.assertEqual(v4['header']['msg_type'], v5['header']['msg_type'])
v4c = v4['content']
v5c = v5['content']
self.assertEqual(v4c['user_variables'], [])
self.assertEqual(v5c['code'], v4c['code'])
def test_complete_request(self):
msg = self.msg("complete_request", {
'code' : 'def foo():\n'
' a.is\n'
'foo()',
'cursor_pos': 19,
})
v5, v4 = self.adapt(msg)
v4c = v4['content']
v5c = v5['content']
self.assertNotIn('code', v4c)
self.assertEqual(v4c['line'], v5c['code'].splitlines(True)[1])
self.assertEqual(v4c['cursor_pos'], 8)
self.assertEqual(v4c['text'], '')
self.assertEqual(v4c['block'], None)
def test_complete_reply(self):
msg = self.msg("complete_reply", {
'cursor_start' : 10,
'cursor_end' : 14,
'matches' : ['a.isalnum',
'a.isalpha',
'a.isdigit',
'a.islower',
],
'metadata' : {},
})
v5, v4 = self.adapt(msg)
v4c = v4['content']
v5c = v5['content']
self.assertEqual(v4c['matched_text'], 'a.is')
self.assertEqual(v4c['matches'], v5c['matches'])
def test_inspect_request(self):
msg = self.msg("inspect_request", {
'code' : 'def foo():\n'
' apple\n'
'bar()',
'cursor_pos': 18,
'detail_level' : 1,
})
v5, v4 = self.adapt(msg)
self.assertEqual(v4['header']['msg_type'], 'object_info_request')
v4c = v4['content']
v5c = v5['content']
self.assertEqual(v4c['oname'], 'apple')
self.assertEqual(v5c['detail_level'], v4c['detail_level'])
def test_inspect_request_token(self):
line = 'something(range(10), kwarg=smth) ; xxx.xxx.xxx( firstarg, rand(234,23), kwarg1=2,'
msg = self.msg("inspect_request", {
'code' : line,
'cursor_pos': len(line)-1,
'detail_level' : 1,
})
v5, v4 = self.adapt(msg)
self.assertEqual(v4['header']['msg_type'], 'object_info_request')
v4c = v4['content']
v5c = v5['content']
self.assertEqual(v4c['oname'], 'xxx.xxx.xxx')
self.assertEqual(v5c['detail_level'], v4c['detail_level'])
def test_inspect_reply(self):
msg = self.msg("inspect_reply", {
'name' : 'foo',
'found' : True,
'data' : {'text/plain' : 'some text'},
'metadata' : {},
})
v5, v4 = self.adapt(msg)
self.assertEqual(v4['header']['msg_type'], 'object_info_reply')
v4c = v4['content']
v5c = v5['content']
self.assertEqual(sorted(v4c), ['found', 'oname'])
self.assertEqual(v4c['found'], False)
def test_kernel_info_reply(self):
msg = self.msg("kernel_info_reply", {
'protocol_version': '5.0',
'implementation': 'ipython',
'implementation_version': '1.2.3',
'language_info': {
'name': 'python',
'version': '2.8.0',
'mimetype': 'text/x-python',
},
'banner' : 'the banner',
})
v5, v4 = self.adapt(msg)
v4c = v4['content']
v5c = v5['content']
info = v5c['language_info']
self.assertEqual(v4c, {
'protocol_version': [5,0],
'language': 'python',
'language_version': [2,8,0],
'ipython_version': [1,2,3],
})
# iopub channel
def test_display_data(self):
jsondata = dict(a=5)
msg = self.msg("display_data", {
'data' : {
'text/plain' : 'some text',
'application/json' : jsondata,
},
'metadata' : {'text/plain' : { 'key' : 'value' }},
})
v5, v4 = self.adapt(msg)
v4c = v4['content']
v5c = v5['content']
self.assertEqual(v5c['metadata'], v4c['metadata'])
self.assertEqual(v5c['data']['text/plain'], v4c['data']['text/plain'])
self.assertEqual(v4c['data']['application/json'], json.dumps(jsondata))
# stdin channel
def test_input_request(self):
msg = self.msg('input_request', {'prompt': "$>", 'password' : True})
v5, v4 = self.adapt(msg)
self.assertEqual(v5['content']['prompt'], v4['content']['prompt'])
self.assertNotIn('password', v4['content'])

View file

@ -0,0 +1,87 @@
"""Tests for the KernelClient"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import os
pjoin = os.path.join
from unittest import TestCase
from jupyter_client.kernelspec import KernelSpecManager, NoSuchKernel, NATIVE_KERNEL_NAME
from ..manager import start_new_kernel
from .utils import test_env
import pytest
from IPython.utils.capture import capture_output
TIMEOUT = 30
class TestKernelClient(TestCase):
def setUp(self):
self.env_patch = test_env()
self.env_patch.start()
self.addCleanup(self.env_patch.stop)
try:
KernelSpecManager().get_kernel_spec(NATIVE_KERNEL_NAME)
except NoSuchKernel:
pytest.skip()
self.km, self.kc = start_new_kernel(kernel_name=NATIVE_KERNEL_NAME)
self.addCleanup(self.kc.stop_channels)
self.addCleanup(self.km.shutdown_kernel)
def test_execute_interactive(self):
kc = self.kc
with capture_output() as io:
reply = kc.execute_interactive("print('hello')", timeout=TIMEOUT)
assert 'hello' in io.stdout
assert reply['content']['status'] == 'ok'
def _check_reply(self, reply_type, reply):
self.assertIsInstance(reply, dict)
self.assertEqual(reply['header']['msg_type'], reply_type + '_reply')
self.assertEqual(reply['parent_header']['msg_type'], reply_type + '_request')
def test_history(self):
kc = self.kc
msg_id = kc.history(session=0)
self.assertIsInstance(msg_id, str)
reply = kc.history(session=0, reply=True, timeout=TIMEOUT)
self._check_reply('history', reply)
def test_inspect(self):
kc = self.kc
msg_id = kc.inspect('who cares')
self.assertIsInstance(msg_id, str)
reply = kc.inspect('code', reply=True, timeout=TIMEOUT)
self._check_reply('inspect', reply)
def test_complete(self):
kc = self.kc
msg_id = kc.complete('who cares')
self.assertIsInstance(msg_id, str)
reply = kc.complete('code', reply=True, timeout=TIMEOUT)
self._check_reply('complete', reply)
def test_kernel_info(self):
kc = self.kc
msg_id = kc.kernel_info()
self.assertIsInstance(msg_id, str)
reply = kc.kernel_info(reply=True, timeout=TIMEOUT)
self._check_reply('kernel_info', reply)
def test_comm_info(self):
kc = self.kc
msg_id = kc.comm_info()
self.assertIsInstance(msg_id, str)
reply = kc.comm_info(reply=True, timeout=TIMEOUT)
self._check_reply('comm_info', reply)
def test_shutdown(self):
kc = self.kc
msg_id = kc.shutdown()
self.assertIsInstance(msg_id, str)
reply = kc.shutdown(reply=True, timeout=TIMEOUT)
self._check_reply('shutdown', reply)

View file

@ -0,0 +1,259 @@
"""Tests for kernel connection utilities"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import json
import os
import re
import stat
import tempfile
import shutil
from traitlets.config import Config
from jupyter_core.application import JupyterApp
from jupyter_core.paths import jupyter_runtime_dir
from ipython_genutils.tempdir import TemporaryDirectory, TemporaryWorkingDirectory
from ipython_genutils.py3compat import str_to_bytes
from jupyter_client import connect, KernelClient
from jupyter_client.consoleapp import JupyterConsoleApp
from jupyter_client.session import Session
from jupyter_client.connect import secure_write
class DummyConsoleApp(JupyterApp, JupyterConsoleApp):
def initialize(self, argv=[]):
JupyterApp.initialize(self, argv=argv)
self.init_connection_file()
class DummyConfigurable(connect.ConnectionFileMixin):
def initialize(self):
pass
sample_info = dict(ip='1.2.3.4', transport='ipc',
shell_port=1, hb_port=2, iopub_port=3, stdin_port=4, control_port=5,
key=b'abc123', signature_scheme='hmac-md5', kernel_name='python'
)
sample_info_kn = dict(ip='1.2.3.4', transport='ipc',
shell_port=1, hb_port=2, iopub_port=3, stdin_port=4, control_port=5,
key=b'abc123', signature_scheme='hmac-md5', kernel_name='test'
)
def test_write_connection_file():
with TemporaryDirectory() as d:
cf = os.path.join(d, 'kernel.json')
connect.write_connection_file(cf, **sample_info)
assert os.path.exists(cf)
with open(cf, 'r') as f:
info = json.load(f)
info['key'] = str_to_bytes(info['key'])
assert info == sample_info
def test_load_connection_file_session():
"""test load_connection_file() after """
session = Session()
app = DummyConsoleApp(session=Session())
app.initialize(argv=[])
session = app.session
with TemporaryDirectory() as d:
cf = os.path.join(d, 'kernel.json')
connect.write_connection_file(cf, **sample_info)
app.connection_file = cf
app.load_connection_file()
assert session.key == sample_info['key']
assert session.signature_scheme == sample_info['signature_scheme']
def test_load_connection_file_session_with_kn():
"""test load_connection_file() after """
session = Session()
app = DummyConsoleApp(session=Session())
app.initialize(argv=[])
session = app.session
with TemporaryDirectory() as d:
cf = os.path.join(d, 'kernel.json')
connect.write_connection_file(cf, **sample_info_kn)
app.connection_file = cf
app.load_connection_file()
assert session.key == sample_info_kn['key']
assert session.signature_scheme == sample_info_kn['signature_scheme']
def test_app_load_connection_file():
"""test `ipython console --existing` loads a connection file"""
with TemporaryDirectory() as d:
cf = os.path.join(d, 'kernel.json')
connect.write_connection_file(cf, **sample_info)
app = DummyConsoleApp(connection_file=cf)
app.initialize(argv=[])
for attr, expected in sample_info.items():
if attr in ('key', 'signature_scheme'):
continue
value = getattr(app, attr)
assert value == expected, "app.%s = %s != %s" % (attr, value, expected)
def test_load_connection_info():
client = KernelClient()
info = {
'control_port': 53702,
'hb_port': 53705,
'iopub_port': 53703,
'ip': '0.0.0.0',
'key': 'secret',
'shell_port': 53700,
'signature_scheme': 'hmac-sha256',
'stdin_port': 53701,
'transport': 'tcp',
}
client.load_connection_info(info)
assert client.control_port == info['control_port']
assert client.session.key.decode('ascii') == info['key']
assert client.ip == info['ip']
def test_find_connection_file():
with TemporaryDirectory() as d:
cf = 'kernel.json'
app = DummyConsoleApp(runtime_dir=d, connection_file=cf)
app.initialize()
security_dir = app.runtime_dir
profile_cf = os.path.join(security_dir, cf)
with open(profile_cf, 'w') as f:
f.write("{}")
for query in (
'kernel.json',
'kern*',
'*ernel*',
'k*',
):
assert connect.find_connection_file(query, path=security_dir) == profile_cf
def test_find_connection_file_local():
with TemporaryWorkingDirectory() as d:
cf = 'test.json'
abs_cf = os.path.abspath(cf)
with open(cf, 'w') as f:
f.write('{}')
for query in (
'test.json',
'test',
abs_cf,
os.path.join('.', 'test.json'),
):
assert connect.find_connection_file(query, path=['.', jupyter_runtime_dir()]) == abs_cf
def test_find_connection_file_relative():
with TemporaryWorkingDirectory() as d:
cf = 'test.json'
os.mkdir('subdir')
cf = os.path.join('subdir', 'test.json')
abs_cf = os.path.abspath(cf)
with open(cf, 'w') as f:
f.write('{}')
for query in (
os.path.join('.', 'subdir', 'test.json'),
os.path.join('subdir', 'test.json'),
abs_cf,
):
assert connect.find_connection_file(query, path=['.', jupyter_runtime_dir()]) == abs_cf
def test_find_connection_file_abspath():
with TemporaryDirectory() as d:
cf = 'absolute.json'
abs_cf = os.path.abspath(cf)
with open(cf, 'w') as f:
f.write('{}')
assert connect.find_connection_file(abs_cf, path=jupyter_runtime_dir()) == abs_cf
os.remove(abs_cf)
def test_mixin_record_random_ports():
with TemporaryDirectory() as d:
dc = DummyConfigurable(data_dir=d, kernel_name='via-tcp', transport='tcp')
dc.write_connection_file()
assert dc._connection_file_written
assert os.path.exists(dc.connection_file)
assert dc._random_port_names == connect.port_names
def test_mixin_cleanup_random_ports():
with TemporaryDirectory() as d:
dc = DummyConfigurable(data_dir=d, kernel_name='via-tcp', transport='tcp')
dc.write_connection_file()
filename = dc.connection_file
dc.cleanup_random_ports()
assert not os.path.exists(filename)
for name in dc._random_port_names:
assert getattr(dc, name) == 0
def test_secure_write():
def fetch_win32_permissions(filename):
'''Extracts file permissions on windows using icacls'''
role_permissions = {}
for index, line in enumerate(os.popen("icacls %s" % filename).read().splitlines()):
if index == 0:
line = line.split(filename)[-1].strip().lower()
match = re.match(r'\s*([^:]+):\(([^\)]*)\)', line)
if match:
usergroup, permissions = match.groups()
usergroup = usergroup.lower().split('\\')[-1]
permissions = set(p.lower() for p in permissions.split(','))
role_permissions[usergroup] = permissions
elif not line.strip():
break
return role_permissions
def check_user_only_permissions(fname):
# Windows has it's own permissions ACL patterns
if os.name == 'nt':
import win32api
username = win32api.GetUserName().lower()
permissions = fetch_win32_permissions(fname)
assert username in permissions
assert permissions[username] == set(['r', 'w'])
assert 'administrators' in permissions
assert permissions['administrators'] == set(['f'])
assert 'everyone' not in permissions
assert len(permissions) == 2
else:
mode = os.stat(fname).st_mode
assert '0600' == oct(stat.S_IMODE(mode)).replace('0o', '0')
directory = tempfile.mkdtemp()
fname = os.path.join(directory, 'check_perms')
try:
with secure_write(fname) as f:
f.write('test 1')
check_user_only_permissions(fname)
with open(fname, 'r') as f:
assert f.read() == 'test 1'
# Try changing file permissions ahead of time
os.chmod(fname, 0o755)
with secure_write(fname) as f:
f.write('test 2')
check_user_only_permissions(fname)
with open(fname, 'r') as f:
assert f.read() == 'test 2'
finally:
shutil.rmtree(directory)

View file

@ -0,0 +1,85 @@
"""Test suite for our JSON utilities."""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import json
import pytest
import datetime
from datetime import timedelta
from unittest import mock
from dateutil.tz import tzlocal, tzoffset
from jupyter_client import jsonutil
from jupyter_client.session import utcnow
REFERENCE_DATETIME = datetime.datetime(
2013, 7, 3, 16, 34, 52, 249482, tzlocal()
)
def test_extract_date_from_naive():
ref = REFERENCE_DATETIME
timestamp = '2013-07-03T16:34:52.249482'
with pytest.deprecated_call(match='Interpreting naive datetime as local'):
extracted = jsonutil.extract_dates(timestamp)
assert isinstance(extracted, datetime.datetime)
assert extracted.tzinfo is not None
assert extracted.tzinfo.utcoffset(ref) == tzlocal().utcoffset(ref)
assert extracted == ref
def test_extract_dates():
ref = REFERENCE_DATETIME
timestamps = [
'2013-07-03T16:34:52.249482Z',
'2013-07-03T16:34:52.249482-0800',
'2013-07-03T16:34:52.249482+0800',
'2013-07-03T16:34:52.249482-08:00',
'2013-07-03T16:34:52.249482+08:00',
]
extracted = jsonutil.extract_dates(timestamps)
for dt in extracted:
assert isinstance(dt, datetime.datetime)
assert dt.tzinfo is not None
assert extracted[0].tzinfo.utcoffset(ref) == timedelta(0)
assert extracted[1].tzinfo.utcoffset(ref) == timedelta(hours=-8)
assert extracted[2].tzinfo.utcoffset(ref) == timedelta(hours=8)
assert extracted[3].tzinfo.utcoffset(ref) == timedelta(hours=-8)
assert extracted[4].tzinfo.utcoffset(ref) == timedelta(hours=8)
def test_parse_ms_precision():
base = '2013-07-03T16:34:52'
digits = '1234567890'
parsed = jsonutil.parse_date(base+'Z')
assert isinstance(parsed, datetime.datetime)
for i in range(len(digits)):
ts = base + '.' + digits[:i]
parsed = jsonutil.parse_date(ts+'Z')
if i >= 1 and i <= 6:
assert isinstance(parsed, datetime.datetime)
else:
assert isinstance(parsed, str)
def test_date_default():
naive = datetime.datetime.now()
local = tzoffset('Local', -8 * 3600)
other = tzoffset('Other', 2 * 3600)
data = dict(naive=naive, utc=utcnow(), withtz=naive.replace(tzinfo=other))
with mock.patch.object(jsonutil, 'tzlocal', lambda : local):
with pytest.deprecated_call(match='Please add timezone info'):
jsondata = json.dumps(data, default=jsonutil.date_default)
assert "Z" in jsondata
assert jsondata.count("Z") == 1
extracted = jsonutil.extract_dates(json.loads(jsondata))
for dt in extracted.values():
assert isinstance(dt, datetime.datetime)
assert dt.tzinfo != None

View file

@ -0,0 +1,50 @@
import os
import sys
import shutil
import time
from subprocess import Popen, PIPE
from tempfile import mkdtemp
def _launch(extra_env):
env = os.environ.copy()
env.update(extra_env)
return Popen([sys.executable, '-c',
'from jupyter_client.kernelapp import main; main()'],
env=env, stderr=PIPE)
WAIT_TIME = 10
POLL_FREQ = 10
def test_kernelapp_lifecycle():
# Check that 'jupyter kernel' starts and terminates OK.
runtime_dir = mkdtemp()
startup_dir = mkdtemp()
started = os.path.join(startup_dir, 'started')
try:
p = _launch({'JUPYTER_RUNTIME_DIR': runtime_dir,
'JUPYTER_CLIENT_TEST_RECORD_STARTUP_PRIVATE': started,
})
# Wait for start
for _ in range(WAIT_TIME * POLL_FREQ):
if os.path.isfile(started):
break
time.sleep(1 / POLL_FREQ)
else:
raise AssertionError("No started file created in {} seconds"
.format(WAIT_TIME))
# Connection file should be there by now
files = os.listdir(runtime_dir)
assert len(files) == 1
cf = files[0]
assert cf.startswith('kernel')
assert cf.endswith('.json')
# Send SIGTERM to shut down
p.terminate()
_, stderr = p.communicate(timeout=WAIT_TIME)
assert cf in stderr.decode('utf-8', 'replace')
finally:
shutil.rmtree(runtime_dir)
shutil.rmtree(startup_dir)

View file

@ -0,0 +1,414 @@
"""Tests for the KernelManager"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import asyncio
import json
import os
import signal
import sys
import time
import threading
import multiprocessing as mp
import pytest
from async_generator import async_generator, yield_
from traitlets.config.loader import Config
from jupyter_core import paths
from jupyter_client import KernelManager, AsyncKernelManager
from subprocess import PIPE
from ..manager import start_new_kernel, start_new_async_kernel
from .utils import test_env, skip_win32, AsyncKernelManagerSubclass, AsyncKernelManagerWithCleanup
pjoin = os.path.join
TIMEOUT = 30
@pytest.fixture(autouse=True)
def env():
env_patch = test_env()
env_patch.start()
yield
env_patch.stop()
@pytest.fixture(params=['tcp', 'ipc'])
def transport(request):
if sys.platform == 'win32' and request.param == 'ipc': #
pytest.skip("Transport 'ipc' not supported on Windows.")
return request.param
@pytest.fixture
def config(transport):
c = Config()
c.KernelManager.transport = transport
if transport == 'ipc':
c.KernelManager.ip = 'test'
return c
@pytest.fixture
def install_kernel():
kernel_dir = pjoin(paths.jupyter_data_dir(), 'kernels', 'signaltest')
os.makedirs(kernel_dir)
with open(pjoin(kernel_dir, 'kernel.json'), 'w') as f:
f.write(json.dumps({
'argv': [sys.executable,
'-m', 'jupyter_client.tests.signalkernel',
'-f', '{connection_file}'],
'display_name': "Signal Test Kernel",
'env': {'TEST_VARS': '${TEST_VARS}:test_var_2'},
}))
@pytest.fixture
def start_kernel():
km, kc = start_new_kernel(kernel_name='signaltest')
yield km, kc
kc.stop_channels()
km.shutdown_kernel()
assert km.context.closed
@pytest.fixture
def start_kernel_w_env():
kernel_cmd = [sys.executable,
'-m', 'jupyter_client.tests.signalkernel',
'-f', '{connection_file}']
extra_env = {'TEST_VARS': '${TEST_VARS}:test_var_2'}
km = KernelManager(kernel_name='signaltest')
km.kernel_cmd = kernel_cmd
km.extra_env = extra_env
km.start_kernel()
kc = km.client()
kc.start_channels()
kc.wait_for_ready(timeout=60)
yield km, kc
kc.stop_channels()
km.shutdown_kernel()
@pytest.fixture
def km(config):
km = KernelManager(config=config)
return km
@pytest.fixture
def zmq_context():
import zmq
ctx = zmq.Context()
yield ctx
ctx.term()
@pytest.fixture(params=[AsyncKernelManager, AsyncKernelManagerSubclass, AsyncKernelManagerWithCleanup])
def async_km(request, config):
km = request.param(config=config)
return km
@pytest.fixture
@async_generator # This is only necessary while Python 3.5 is support afterwhich both it and yield_() can be removed
async def start_async_kernel():
km, kc = await start_new_async_kernel(kernel_name='signaltest')
await yield_((km, kc))
kc.stop_channels()
await km.shutdown_kernel()
assert km.context.closed
class TestKernelManager:
def test_lifecycle(self, km):
km.start_kernel(stdout=PIPE, stderr=PIPE)
assert km.is_alive()
km.restart_kernel(now=True)
assert km.is_alive()
km.interrupt_kernel()
assert isinstance(km, KernelManager)
km.shutdown_kernel(now=True)
assert km.context.closed
def test_get_connect_info(self, km):
cinfo = km.get_connection_info()
keys = sorted(cinfo.keys())
expected = sorted([
'ip', 'transport',
'hb_port', 'shell_port', 'stdin_port', 'iopub_port', 'control_port',
'key', 'signature_scheme',
])
assert keys == expected
@pytest.mark.skipif(sys.platform == 'win32', reason="Windows doesn't support signals")
def test_signal_kernel_subprocesses(self, install_kernel, start_kernel):
km, kc = start_kernel
def execute(cmd):
kc.execute(cmd)
reply = kc.get_shell_msg(TIMEOUT)
content = reply['content']
assert content['status'] == 'ok'
return content
N = 5
for i in range(N):
execute("start")
time.sleep(1) # make sure subprocs stay up
reply = execute('check')
assert reply['user_expressions']['poll'] == [None] * N
# start a job on the kernel to be interrupted
kc.execute('sleep')
time.sleep(1) # ensure sleep message has been handled before we interrupt
km.interrupt_kernel()
reply = kc.get_shell_msg(TIMEOUT)
content = reply['content']
assert content['status'] == 'ok'
assert content['user_expressions']['interrupted']
# wait up to 5s for subprocesses to handle signal
for i in range(50):
reply = execute('check')
if reply['user_expressions']['poll'] != [-signal.SIGINT] * N:
time.sleep(0.1)
else:
break
# verify that subprocesses were interrupted
assert reply['user_expressions']['poll'] == [-signal.SIGINT] * N
def test_start_new_kernel(self, install_kernel, start_kernel):
km, kc = start_kernel
assert km.is_alive()
assert kc.is_alive()
assert km.context.closed is False
def _env_test_body(self, kc):
def execute(cmd):
kc.execute(cmd)
reply = kc.get_shell_msg(TIMEOUT)
content = reply['content']
assert content['status'] == 'ok'
return content
reply = execute('env')
assert reply is not None
assert reply['user_expressions']['env'] == 'test_var_1:test_var_2'
def test_templated_kspec_env(self, install_kernel, start_kernel):
km, kc = start_kernel
assert km.is_alive()
assert kc.is_alive()
assert km.context.closed is False
self._env_test_body(kc)
def test_templated_extra_env(self, install_kernel, start_kernel_w_env):
km, kc = start_kernel_w_env
assert km.is_alive()
assert kc.is_alive()
assert km.context.closed is False
self._env_test_body(kc)
def test_cleanup_context(self, km):
assert km.context is not None
km.cleanup_resources(restart=False)
assert km.context.closed
def test_no_cleanup_shared_context(self, zmq_context):
"""kernel manager does not terminate shared context"""
km = KernelManager(context=zmq_context)
assert km.context == zmq_context
assert km.context is not None
km.cleanup_resources(restart=False)
assert km.context.closed is False
assert zmq_context.closed is False
class TestParallel:
@pytest.mark.timeout(TIMEOUT)
def test_start_sequence_kernels(self, config, install_kernel):
"""Ensure that a sequence of kernel startups doesn't break anything."""
self._run_signaltest_lifecycle(config)
self._run_signaltest_lifecycle(config)
self._run_signaltest_lifecycle(config)
@pytest.mark.timeout(TIMEOUT)
def test_start_parallel_thread_kernels(self, config, install_kernel):
if config.KernelManager.transport == 'ipc': # FIXME
pytest.skip("IPC transport is currently not working for this test!")
self._run_signaltest_lifecycle(config)
thread = threading.Thread(target=self._run_signaltest_lifecycle, args=(config,))
thread2 = threading.Thread(target=self._run_signaltest_lifecycle, args=(config,))
try:
thread.start()
thread2.start()
finally:
thread.join()
thread2.join()
@pytest.mark.timeout(TIMEOUT)
def test_start_parallel_process_kernels(self, config, install_kernel):
if config.KernelManager.transport == 'ipc': # FIXME
pytest.skip("IPC transport is currently not working for this test!")
self._run_signaltest_lifecycle(config)
thread = threading.Thread(target=self._run_signaltest_lifecycle, args=(config,))
proc = mp.Process(target=self._run_signaltest_lifecycle, args=(config,))
try:
thread.start()
proc.start()
finally:
thread.join()
proc.join()
assert proc.exitcode == 0
@pytest.mark.timeout(TIMEOUT)
def test_start_sequence_process_kernels(self, config, install_kernel):
self._run_signaltest_lifecycle(config)
proc = mp.Process(target=self._run_signaltest_lifecycle, args=(config,))
try:
proc.start()
finally:
proc.join()
assert proc.exitcode == 0
def _prepare_kernel(self, km, startup_timeout=TIMEOUT, **kwargs):
km.start_kernel(**kwargs)
kc = km.client()
kc.start_channels()
try:
kc.wait_for_ready(timeout=startup_timeout)
except RuntimeError:
kc.stop_channels()
km.shutdown_kernel()
raise
return kc
def _run_signaltest_lifecycle(self, config=None):
km = KernelManager(config=config, kernel_name='signaltest')
kc = self._prepare_kernel(km, stdout=PIPE, stderr=PIPE)
def execute(cmd):
kc.execute(cmd)
reply = kc.get_shell_msg(TIMEOUT)
content = reply['content']
assert content['status'] == 'ok'
return content
execute("start")
assert km.is_alive()
execute('check')
assert km.is_alive()
km.restart_kernel(now=True)
assert km.is_alive()
execute('check')
km.shutdown_kernel()
assert km.context.closed
@pytest.mark.asyncio
class TestAsyncKernelManager:
async def test_lifecycle(self, async_km):
await async_km.start_kernel(stdout=PIPE, stderr=PIPE)
is_alive = await async_km.is_alive()
assert is_alive
await async_km.restart_kernel(now=True)
is_alive = await async_km.is_alive()
assert is_alive
await async_km.interrupt_kernel()
assert isinstance(async_km, AsyncKernelManager)
await async_km.shutdown_kernel(now=True)
is_alive = await async_km.is_alive()
assert is_alive is False
assert async_km.context.closed
async def test_get_connect_info(self, async_km):
cinfo = async_km.get_connection_info()
keys = sorted(cinfo.keys())
expected = sorted([
'ip', 'transport',
'hb_port', 'shell_port', 'stdin_port', 'iopub_port', 'control_port',
'key', 'signature_scheme',
])
assert keys == expected
async def test_subclasses(self, async_km):
await async_km.start_kernel(stdout=PIPE, stderr=PIPE)
is_alive = await async_km.is_alive()
assert is_alive
assert isinstance(async_km, AsyncKernelManager)
await async_km.shutdown_kernel(now=True)
is_alive = await async_km.is_alive()
assert is_alive is False
assert async_km.context.closed
if isinstance(async_km, AsyncKernelManagerWithCleanup):
assert async_km.which_cleanup == "cleanup"
elif isinstance(async_km, AsyncKernelManagerSubclass):
assert async_km.which_cleanup == "cleanup_resources"
else:
assert hasattr(async_km, "which_cleanup") is False
@pytest.mark.timeout(10)
@pytest.mark.skipif(sys.platform == 'win32', reason="Windows doesn't support signals")
async def test_signal_kernel_subprocesses(self, install_kernel, start_async_kernel):
km, kc = start_async_kernel
async def execute(cmd):
kc.execute(cmd)
reply = await kc.get_shell_msg(TIMEOUT)
content = reply['content']
assert content['status'] == 'ok'
return content
# Ensure that shutdown_kernel and stop_channels are called at the end of the test.
# Note: we cannot use addCleanup(<func>) for these since it doesn't prpperly handle
# coroutines - which km.shutdown_kernel now is.
N = 5
for i in range(N):
await execute("start")
await asyncio.sleep(1) # make sure subprocs stay up
reply = await execute('check')
assert reply['user_expressions']['poll'] == [None] * N
# start a job on the kernel to be interrupted
kc.execute('sleep')
await asyncio.sleep(1) # ensure sleep message has been handled before we interrupt
await km.interrupt_kernel()
reply = await kc.get_shell_msg(TIMEOUT)
content = reply['content']
assert content['status'] == 'ok'
assert content['user_expressions']['interrupted'] is True
# wait up to 5s for subprocesses to handle signal
for i in range(50):
reply = await execute('check')
if reply['user_expressions']['poll'] != [-signal.SIGINT] * N:
await asyncio.sleep(0.1)
else:
break
# verify that subprocesses were interrupted
assert reply['user_expressions']['poll'] == [-signal.SIGINT] * N
@pytest.mark.timeout(10)
async def test_start_new_async_kernel(self, install_kernel, start_async_kernel):
km, kc = start_async_kernel
is_alive = await km.is_alive()
assert is_alive
is_alive = await kc.is_alive()
assert is_alive

View file

@ -0,0 +1,193 @@
"""Tests for the KernelSpecManager"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import pytest
import copy
import io
import json
import os
import sys
import tempfile
import unittest
from io import StringIO
from os.path import join as pjoin
from subprocess import Popen, PIPE, STDOUT
from logging import StreamHandler
from ipython_genutils.tempdir import TemporaryDirectory
from jupyter_client import kernelspec
from jupyter_core import paths
from .utils import test_env
sample_kernel_json = {'argv':['cat', '{connection_file}'],
'display_name':'Test kernel',
}
class KernelSpecTests(unittest.TestCase):
def _install_sample_kernel(self, kernels_dir):
"""install a sample kernel in a kernels directory"""
sample_kernel_dir = pjoin(kernels_dir, 'sample')
os.makedirs(sample_kernel_dir)
json_file = pjoin(sample_kernel_dir, 'kernel.json')
with open(json_file, 'w') as f:
json.dump(sample_kernel_json, f)
return sample_kernel_dir
def setUp(self):
self.env_patch = test_env()
self.env_patch.start()
self.sample_kernel_dir = self._install_sample_kernel(
pjoin(paths.jupyter_data_dir(), 'kernels'))
self.ksm = kernelspec.KernelSpecManager()
td2 = TemporaryDirectory()
self.addCleanup(td2.cleanup)
self.installable_kernel = td2.name
with open(pjoin(self.installable_kernel, 'kernel.json'), 'w') as f:
json.dump(sample_kernel_json, f)
def tearDown(self):
self.env_patch.stop()
def test_find_kernel_specs(self):
kernels = self.ksm.find_kernel_specs()
self.assertEqual(kernels['sample'], self.sample_kernel_dir)
def test_get_kernel_spec(self):
ks = self.ksm.get_kernel_spec('SAMPLE') # Case insensitive
self.assertEqual(ks.resource_dir, self.sample_kernel_dir)
self.assertEqual(ks.argv, sample_kernel_json['argv'])
self.assertEqual(ks.display_name, sample_kernel_json['display_name'])
self.assertEqual(ks.env, {})
self.assertEqual(ks.metadata, {})
def test_find_all_specs(self):
kernels = self.ksm.get_all_specs()
self.assertEqual(kernels['sample']['resource_dir'], self.sample_kernel_dir)
self.assertIsNotNone(kernels['sample']['spec'])
def test_kernel_spec_priority(self):
td = TemporaryDirectory()
self.addCleanup(td.cleanup)
sample_kernel = self._install_sample_kernel(td.name)
self.ksm.kernel_dirs.append(td.name)
kernels = self.ksm.find_kernel_specs()
self.assertEqual(kernels['sample'], self.sample_kernel_dir)
self.ksm.kernel_dirs.insert(0, td.name)
kernels = self.ksm.find_kernel_specs()
self.assertEqual(kernels['sample'], sample_kernel)
def test_install_kernel_spec(self):
self.ksm.install_kernel_spec(self.installable_kernel,
kernel_name='tstinstalled',
user=True)
self.assertIn('tstinstalled', self.ksm.find_kernel_specs())
# install again works
self.ksm.install_kernel_spec(self.installable_kernel,
kernel_name='tstinstalled',
user=True)
def test_install_kernel_spec_prefix(self):
td = TemporaryDirectory()
self.addCleanup(td.cleanup)
capture = StringIO()
handler = StreamHandler(capture)
self.ksm.log.addHandler(handler)
self.ksm.install_kernel_spec(self.installable_kernel,
kernel_name='tstinstalled',
prefix=td.name)
captured = capture.getvalue()
self.ksm.log.removeHandler(handler)
self.assertIn("may not be found", captured)
self.assertNotIn('tstinstalled', self.ksm.find_kernel_specs())
# add prefix to path, so we find the spec
self.ksm.kernel_dirs.append(pjoin(td.name, 'share', 'jupyter', 'kernels'))
self.assertIn('tstinstalled', self.ksm.find_kernel_specs())
# Run it again, no warning this time because we've added it to the path
capture = StringIO()
handler = StreamHandler(capture)
self.ksm.log.addHandler(handler)
self.ksm.install_kernel_spec(self.installable_kernel,
kernel_name='tstinstalled',
prefix=td.name)
captured = capture.getvalue()
self.ksm.log.removeHandler(handler)
self.assertNotIn("may not be found", captured)
@pytest.mark.skipif(
not (os.name != 'nt' and not os.access('/usr/local/share', os.W_OK)),
reason="needs Unix system without root privileges")
def test_cant_install_kernel_spec(self):
with self.assertRaises(OSError):
self.ksm.install_kernel_spec(self.installable_kernel,
kernel_name='tstinstalled',
user=False)
def test_remove_kernel_spec(self):
path = self.ksm.remove_kernel_spec('sample')
self.assertEqual(path, self.sample_kernel_dir)
def test_remove_kernel_spec_app(self):
p = Popen(
[sys.executable, '-m', 'jupyter_client.kernelspecapp', 'remove', 'sample', '-f'],
stdout=PIPE, stderr=STDOUT,
env=os.environ,
)
out, _ = p.communicate()
self.assertEqual(p.returncode, 0, out.decode('utf8', 'replace'))
def test_validate_kernel_name(self):
for good in [
'julia-0.4',
'ipython',
'R',
'python_3',
'Haskell-1-2-3',
]:
assert kernelspec._is_valid_kernel_name(good)
for bad in [
'has space',
'ünicode',
'%percent',
'question?',
]:
assert not kernelspec._is_valid_kernel_name(bad)
def test_subclass(self):
"""Test get_all_specs in subclasses that override find_kernel_specs"""
ksm = self.ksm
resource_dir = tempfile.gettempdir()
native_name = kernelspec.NATIVE_KERNEL_NAME
native_kernel = ksm.get_kernel_spec(native_name)
class MyKSM(kernelspec.KernelSpecManager):
def get_kernel_spec(self, name):
spec = copy.copy(native_kernel)
if name == 'fake':
spec.name = name
spec.resource_dir = resource_dir
elif name == native_name:
pass
else:
raise KeyError(name)
return spec
def find_kernel_specs(self):
return {
'fake': resource_dir,
native_name: native_kernel.resource_dir,
}
# ensure that get_all_specs doesn't raise if only
# find_kernel_specs and get_kernel_spec are defined
myksm = MyKSM()
specs = myksm.get_all_specs()
assert sorted(specs) == ['fake', native_name]

View file

@ -0,0 +1,15 @@
#-----------------------------------------------------------------------------
# Copyright (c) The Jupyter Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
from .. import localinterfaces
def test_load_ips():
# Override the machinery that skips it if it was called before
localinterfaces._load_ips.called = False
# Just check this doesn't error
localinterfaces._load_ips(suppress_exceptions=False)

View file

@ -0,0 +1,38 @@
"""Tests for KernelManager"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from jupyter_client.kernelspec import KernelSpec
from unittest import mock
from jupyter_client.manager import KernelManager
import os
import tempfile
def test_connection_file_real_path():
""" Verify realpath is used when formatting connection file """
with mock.patch('os.path.realpath') as patched_realpath:
patched_realpath.return_value = 'foobar'
km = KernelManager(connection_file=os.path.join(
tempfile.gettempdir(), "kernel-test.json"),
kernel_name='test_kernel')
# KernelSpec and launch args have to be mocked as we don't have an actual kernel on disk
km._kernel_spec = KernelSpec(resource_dir='test',
**{
"argv": [
"python.exe",
"-m",
"test_kernel",
"-f",
"{connection_file}"
],
"env": {},
"display_name": "test_kernel",
"language": "python",
"metadata": {}
})
km._launch_args = {}
cmds = km.format_kernel_cmd()
assert cmds[4] is 'foobar'

View file

@ -0,0 +1,286 @@
"""Tests for the notebook kernel and session manager."""
import asyncio
import threading
import uuid
import multiprocessing as mp
from subprocess import PIPE
from unittest import TestCase
from tornado.testing import AsyncTestCase, gen_test
from traitlets.config.loader import Config
from jupyter_client import KernelManager, AsyncKernelManager
from jupyter_client.multikernelmanager import MultiKernelManager, AsyncMultiKernelManager
from .utils import skip_win32
from ..localinterfaces import localhost
TIMEOUT = 30
class TestKernelManager(TestCase):
def _get_tcp_km(self):
c = Config()
km = MultiKernelManager(config=c)
return km
def _get_ipc_km(self):
c = Config()
c.KernelManager.transport = 'ipc'
c.KernelManager.ip = 'test'
km = MultiKernelManager(config=c)
return km
def _run_lifecycle(self, km, test_kid=None):
if test_kid:
kid = km.start_kernel(stdout=PIPE, stderr=PIPE, kernel_id=test_kid)
self.assertTrue(kid == test_kid)
else:
kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
self.assertTrue(km.is_alive(kid))
self.assertTrue(kid in km)
self.assertTrue(kid in km.list_kernel_ids())
self.assertEqual(len(km), 1)
km.restart_kernel(kid, now=True)
self.assertTrue(km.is_alive(kid))
self.assertTrue(kid in km.list_kernel_ids())
km.interrupt_kernel(kid)
k = km.get_kernel(kid)
self.assertTrue(isinstance(k, KernelManager))
km.shutdown_kernel(kid, now=True)
self.assertNotIn(kid, km)
def _run_cinfo(self, km, transport, ip):
kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
k = km.get_kernel(kid)
cinfo = km.get_connection_info(kid)
self.assertEqual(transport, cinfo['transport'])
self.assertEqual(ip, cinfo['ip'])
self.assertTrue('stdin_port' in cinfo)
self.assertTrue('iopub_port' in cinfo)
stream = km.connect_iopub(kid)
stream.close()
self.assertTrue('shell_port' in cinfo)
stream = km.connect_shell(kid)
stream.close()
self.assertTrue('hb_port' in cinfo)
stream = km.connect_hb(kid)
stream.close()
km.shutdown_kernel(kid, now=True)
def test_tcp_lifecycle(self):
km = self._get_tcp_km()
self._run_lifecycle(km)
def test_tcp_lifecycle_with_kernel_id(self):
km = self._get_tcp_km()
self._run_lifecycle(km, test_kid=str(uuid.uuid4()))
def test_shutdown_all(self):
km = self._get_tcp_km()
kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
self.assertIn(kid, km)
km.shutdown_all()
self.assertNotIn(kid, km)
# shutdown again is okay, because we have no kernels
km.shutdown_all()
def test_tcp_cinfo(self):
km = self._get_tcp_km()
self._run_cinfo(km, 'tcp', localhost())
@skip_win32
def test_ipc_lifecycle(self):
km = self._get_ipc_km()
self._run_lifecycle(km)
@skip_win32
def test_ipc_cinfo(self):
km = self._get_ipc_km()
self._run_cinfo(km, 'ipc', 'test')
def test_start_sequence_tcp_kernels(self):
"""Ensure that a sequence of kernel startups doesn't break anything."""
self._run_lifecycle(self._get_tcp_km())
self._run_lifecycle(self._get_tcp_km())
self._run_lifecycle(self._get_tcp_km())
def test_start_sequence_ipc_kernels(self):
"""Ensure that a sequence of kernel startups doesn't break anything."""
self._run_lifecycle(self._get_ipc_km())
self._run_lifecycle(self._get_ipc_km())
self._run_lifecycle(self._get_ipc_km())
def tcp_lifecycle_with_loop(self):
# Ensure each thread has an event loop
asyncio.set_event_loop(asyncio.new_event_loop())
self.test_tcp_lifecycle()
def test_start_parallel_thread_kernels(self):
self.test_tcp_lifecycle()
thread = threading.Thread(target=self.tcp_lifecycle_with_loop)
thread2 = threading.Thread(target=self.tcp_lifecycle_with_loop)
try:
thread.start()
thread2.start()
finally:
thread.join()
thread2.join()
def test_start_parallel_process_kernels(self):
self.test_tcp_lifecycle()
thread = threading.Thread(target=self.tcp_lifecycle_with_loop)
proc = mp.Process(target=self.test_tcp_lifecycle)
try:
thread.start()
proc.start()
finally:
thread.join()
proc.join()
assert proc.exitcode == 0
class TestAsyncKernelManager(AsyncTestCase):
def _get_tcp_km(self):
c = Config()
km = AsyncMultiKernelManager(config=c)
return km
def _get_ipc_km(self):
c = Config()
c.KernelManager.transport = 'ipc'
c.KernelManager.ip = 'test'
km = AsyncMultiKernelManager(config=c)
return km
async def _run_lifecycle(self, km, test_kid=None):
if test_kid:
kid = await km.start_kernel(stdout=PIPE, stderr=PIPE, kernel_id=test_kid)
self.assertTrue(kid == test_kid)
else:
kid = await km.start_kernel(stdout=PIPE, stderr=PIPE)
self.assertTrue(await km.is_alive(kid))
self.assertTrue(kid in km)
self.assertTrue(kid in km.list_kernel_ids())
self.assertEqual(len(km), 1)
await km.restart_kernel(kid, now=True)
self.assertTrue(await km.is_alive(kid))
self.assertTrue(kid in km.list_kernel_ids())
await km.interrupt_kernel(kid)
k = km.get_kernel(kid)
self.assertTrue(isinstance(k, AsyncKernelManager))
await km.shutdown_kernel(kid, now=True)
self.assertNotIn(kid, km)
async def _run_cinfo(self, km, transport, ip):
kid = await km.start_kernel(stdout=PIPE, stderr=PIPE)
k = km.get_kernel(kid)
cinfo = km.get_connection_info(kid)
self.assertEqual(transport, cinfo['transport'])
self.assertEqual(ip, cinfo['ip'])
self.assertTrue('stdin_port' in cinfo)
self.assertTrue('iopub_port' in cinfo)
stream = km.connect_iopub(kid)
stream.close()
self.assertTrue('shell_port' in cinfo)
stream = km.connect_shell(kid)
stream.close()
self.assertTrue('hb_port' in cinfo)
stream = km.connect_hb(kid)
stream.close()
await km.shutdown_kernel(kid, now=True)
self.assertNotIn(kid, km)
@gen_test
async def test_tcp_lifecycle(self):
await self.raw_tcp_lifecycle()
@gen_test
async def test_tcp_lifecycle_with_kernel_id(self):
await self.raw_tcp_lifecycle(test_kid=str(uuid.uuid4()))
@gen_test
async def test_shutdown_all(self):
km = self._get_tcp_km()
kid = await km.start_kernel(stdout=PIPE, stderr=PIPE)
self.assertIn(kid, km)
await km.shutdown_all()
self.assertNotIn(kid, km)
# shutdown again is okay, because we have no kernels
await km.shutdown_all()
@gen_test
async def test_tcp_cinfo(self):
km = self._get_tcp_km()
await self._run_cinfo(km, 'tcp', localhost())
@skip_win32
@gen_test
async def test_ipc_lifecycle(self):
km = self._get_ipc_km()
await self._run_lifecycle(km)
@skip_win32
@gen_test
async def test_ipc_cinfo(self):
km = self._get_ipc_km()
await self._run_cinfo(km, 'ipc', 'test')
@gen_test
async def test_start_sequence_tcp_kernels(self):
"""Ensure that a sequence of kernel startups doesn't break anything."""
await self._run_lifecycle(self._get_tcp_km())
await self._run_lifecycle(self._get_tcp_km())
await self._run_lifecycle(self._get_tcp_km())
@gen_test
async def test_start_sequence_ipc_kernels(self):
"""Ensure that a sequence of kernel startups doesn't break anything."""
await self._run_lifecycle(self._get_ipc_km())
await self._run_lifecycle(self._get_ipc_km())
await self._run_lifecycle(self._get_ipc_km())
def tcp_lifecycle_with_loop(self):
# Ensure each thread has an event loop
asyncio.set_event_loop(asyncio.new_event_loop())
asyncio.get_event_loop().run_until_complete(self.raw_tcp_lifecycle())
async def raw_tcp_lifecycle(self, test_kid=None):
# Since @gen_test creates an event loop, we need a raw form of
# test_tcp_lifecycle that assumes the loop already exists.
km = self._get_tcp_km()
await self._run_lifecycle(km, test_kid=test_kid)
@gen_test
async def test_start_parallel_thread_kernels(self):
await self.raw_tcp_lifecycle()
thread = threading.Thread(target=self.tcp_lifecycle_with_loop)
thread2 = threading.Thread(target=self.tcp_lifecycle_with_loop)
try:
thread.start()
thread2.start()
finally:
thread.join()
thread2.join()
@gen_test
async def test_start_parallel_process_kernels(self):
await self.raw_tcp_lifecycle()
thread = threading.Thread(target=self.tcp_lifecycle_with_loop)
proc = mp.Process(target=self.raw_tcp_lifecycle)
try:
thread.start()
proc.start()
finally:
proc.join()
thread.join()
assert proc.exitcode == 0

View file

@ -0,0 +1,27 @@
"""Test the jupyter_client public API
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from jupyter_client import launcher, connect
import jupyter_client
def test_kms():
for base in ("", "Multi"):
KM = base + "KernelManager"
assert KM in dir(jupyter_client)
def test_kcs():
for base in ("", "Blocking"):
KM = base + "KernelClient"
assert KM in dir(jupyter_client)
def test_launcher():
for name in launcher.__all__:
assert name in dir(jupyter_client)
def test_connect():
for name in connect.__all__:
assert name in dir(jupyter_client)

View file

@ -0,0 +1,346 @@
"""test building messages with Session"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import hmac
import os
import uuid
from datetime import datetime
from unittest import mock
import pytest
import zmq
from zmq.tests import BaseZMQTestCase
from zmq.eventloop.zmqstream import ZMQStream
from jupyter_client import session as ss
from jupyter_client import jsonutil
def _bad_packer(obj):
raise TypeError("I don't work")
def _bad_unpacker(bytes):
raise TypeError("I don't work either")
class SessionTestCase(BaseZMQTestCase):
def setUp(self):
BaseZMQTestCase.setUp(self)
self.session = ss.Session()
@pytest.fixture
def no_copy_threshold():
"""Disable zero-copy optimizations in pyzmq >= 17"""
with mock.patch.object(zmq, 'COPY_THRESHOLD', 1, create=True):
yield
@pytest.mark.usefixtures('no_copy_threshold')
class TestSession(SessionTestCase):
def test_msg(self):
"""message format"""
msg = self.session.msg('execute')
thekeys = set('header parent_header metadata content msg_type msg_id'.split())
s = set(msg.keys())
self.assertEqual(s, thekeys)
self.assertTrue(isinstance(msg['content'],dict))
self.assertTrue(isinstance(msg['metadata'],dict))
self.assertTrue(isinstance(msg['header'],dict))
self.assertTrue(isinstance(msg['parent_header'],dict))
self.assertTrue(isinstance(msg['msg_id'], str))
self.assertTrue(isinstance(msg['msg_type'], str))
self.assertEqual(msg['header']['msg_type'], 'execute')
self.assertEqual(msg['msg_type'], 'execute')
def test_serialize(self):
msg = self.session.msg('execute', content=dict(a=10, b=1.1))
msg_list = self.session.serialize(msg, ident=b'foo')
ident, msg_list = self.session.feed_identities(msg_list)
new_msg = self.session.deserialize(msg_list)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],msg['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
# ensure floats don't come out as Decimal:
self.assertEqual(type(new_msg['content']['b']),type(new_msg['content']['b']))
def test_default_secure(self):
self.assertIsInstance(self.session.key, bytes)
self.assertIsInstance(self.session.auth, hmac.HMAC)
def test_send(self):
ctx = zmq.Context()
A = ctx.socket(zmq.PAIR)
B = ctx.socket(zmq.PAIR)
A.bind("inproc://test")
B.connect("inproc://test")
msg = self.session.msg('execute', content=dict(a=10))
self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
ident, msg_list = self.session.feed_identities(B.recv_multipart())
new_msg = self.session.deserialize(msg_list)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],msg['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
self.assertEqual(new_msg['buffers'],[b'bar'])
content = msg['content']
header = msg['header']
header['msg_id'] = self.session.msg_id
parent = msg['parent_header']
metadata = msg['metadata']
msg_type = header['msg_type']
self.session.send(A, None, content=content, parent=parent,
header=header, metadata=metadata, ident=b'foo', buffers=[b'bar'])
ident, msg_list = self.session.feed_identities(B.recv_multipart())
new_msg = self.session.deserialize(msg_list)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],header['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['buffers'],[b'bar'])
header['msg_id'] = self.session.msg_id
self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
ident, new_msg = self.session.recv(B)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],header['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['buffers'],[b'bar'])
# buffers must support the buffer protocol
with self.assertRaises(TypeError):
self.session.send(A, msg, ident=b'foo', buffers=[1])
# buffers must be contiguous
buf = memoryview(os.urandom(16))
with self.assertRaises(ValueError):
self.session.send(A, msg, ident=b'foo', buffers=[buf[::2]])
A.close()
B.close()
ctx.term()
def test_args(self):
"""initialization arguments for Session"""
s = self.session
self.assertTrue(s.pack is ss.default_packer)
self.assertTrue(s.unpack is ss.default_unpacker)
self.assertEqual(s.username, os.environ.get('USER', 'username'))
s = ss.Session()
self.assertEqual(s.username, os.environ.get('USER', 'username'))
self.assertRaises(TypeError, ss.Session, pack='hi')
self.assertRaises(TypeError, ss.Session, unpack='hi')
u = str(uuid.uuid4())
s = ss.Session(username='carrot', session=u)
self.assertEqual(s.session, u)
self.assertEqual(s.username, 'carrot')
def test_tracking(self):
"""test tracking messages"""
a,b = self.create_bound_pair(zmq.PAIR, zmq.PAIR)
s = self.session
s.copy_threshold = 1
stream = ZMQStream(a)
msg = s.send(a, 'hello', track=False)
self.assertTrue(msg['tracker'] is ss.DONE)
msg = s.send(a, 'hello', track=True)
self.assertTrue(isinstance(msg['tracker'], zmq.MessageTracker))
M = zmq.Message(b'hi there', track=True)
msg = s.send(a, 'hello', buffers=[M], track=True)
t = msg['tracker']
self.assertTrue(isinstance(t, zmq.MessageTracker))
self.assertRaises(zmq.NotDone, t.wait, .1)
del M
t.wait(1) # this will raise
def test_unique_msg_ids(self):
"""test that messages receive unique ids"""
ids = set()
for i in range(2**12):
h = self.session.msg_header('test')
msg_id = h['msg_id']
self.assertTrue(msg_id not in ids)
ids.add(msg_id)
def test_feed_identities(self):
"""scrub the front for zmq IDENTITIES"""
theids = "engine client other".split()
content = dict(code='whoda',stuff=object())
themsg = self.session.msg('execute',content=content)
pmsg = theids
def test_session_id(self):
session = ss.Session()
# get bs before us
bs = session.bsession
us = session.session
self.assertEqual(us.encode('ascii'), bs)
session = ss.Session()
# get us before bs
us = session.session
bs = session.bsession
self.assertEqual(us.encode('ascii'), bs)
# change propagates:
session.session = 'something else'
bs = session.bsession
us = session.session
self.assertEqual(us.encode('ascii'), bs)
session = ss.Session(session='stuff')
# get us before bs
self.assertEqual(session.bsession, session.session.encode('ascii'))
self.assertEqual(b'stuff', session.bsession)
def test_zero_digest_history(self):
session = ss.Session(digest_history_size=0)
for i in range(11):
session._add_digest(uuid.uuid4().bytes)
self.assertEqual(len(session.digest_history), 0)
def test_cull_digest_history(self):
session = ss.Session(digest_history_size=100)
for i in range(100):
session._add_digest(uuid.uuid4().bytes)
self.assertTrue(len(session.digest_history) == 100)
session._add_digest(uuid.uuid4().bytes)
self.assertTrue(len(session.digest_history) == 91)
for i in range(9):
session._add_digest(uuid.uuid4().bytes)
self.assertTrue(len(session.digest_history) == 100)
session._add_digest(uuid.uuid4().bytes)
self.assertTrue(len(session.digest_history) == 91)
def test_bad_pack(self):
try:
session = ss.Session(pack=_bad_packer)
except ValueError as e:
self.assertIn("could not serialize", str(e))
self.assertIn("don't work", str(e))
else:
self.fail("Should have raised ValueError")
def test_bad_unpack(self):
try:
session = ss.Session(unpack=_bad_unpacker)
except ValueError as e:
self.assertIn("could not handle output", str(e))
self.assertIn("don't work either", str(e))
else:
self.fail("Should have raised ValueError")
def test_bad_packer(self):
try:
session = ss.Session(packer=__name__ + '._bad_packer')
except ValueError as e:
self.assertIn("could not serialize", str(e))
self.assertIn("don't work", str(e))
else:
self.fail("Should have raised ValueError")
def test_bad_unpacker(self):
try:
session = ss.Session(unpacker=__name__ + '._bad_unpacker')
except ValueError as e:
self.assertIn("could not handle output", str(e))
self.assertIn("don't work either", str(e))
else:
self.fail("Should have raised ValueError")
def test_bad_roundtrip(self):
with self.assertRaises(ValueError):
session = ss.Session(unpack=lambda b: 5)
def _datetime_test(self, session):
content = dict(t=ss.utcnow())
metadata = dict(t=ss.utcnow())
p = session.msg('msg')
msg = session.msg('msg', content=content, metadata=metadata, parent=p['header'])
smsg = session.serialize(msg)
msg2 = session.deserialize(session.feed_identities(smsg)[1])
assert isinstance(msg2['header']['date'], datetime)
self.assertEqual(msg['header'], msg2['header'])
self.assertEqual(msg['parent_header'], msg2['parent_header'])
self.assertEqual(msg['parent_header'], msg2['parent_header'])
assert isinstance(msg['content']['t'], datetime)
assert isinstance(msg['metadata']['t'], datetime)
assert isinstance(msg2['content']['t'], str)
assert isinstance(msg2['metadata']['t'], str)
self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
def test_datetimes(self):
self._datetime_test(self.session)
def test_datetimes_pickle(self):
session = ss.Session(packer='pickle')
self._datetime_test(session)
def test_datetimes_msgpack(self):
msgpack = pytest.importorskip('msgpack')
session = ss.Session(
pack=msgpack.packb,
unpack=lambda buf: msgpack.unpackb(buf, encoding='utf8'),
)
self._datetime_test(session)
def test_send_raw(self):
ctx = zmq.Context()
A = ctx.socket(zmq.PAIR)
B = ctx.socket(zmq.PAIR)
A.bind("inproc://test")
B.connect("inproc://test")
msg = self.session.msg('execute', content=dict(a=10))
msg_list = [self.session.pack(msg[part]) for part in
['header', 'parent_header', 'metadata', 'content']]
self.session.send_raw(A, msg_list, ident=b'foo')
ident, new_msg_list = self.session.feed_identities(B.recv_multipart())
new_msg = self.session.deserialize(new_msg_list)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
A.close()
B.close()
ctx.term()
def test_clone(self):
s = self.session
s._add_digest('initial')
s2 = s.clone()
assert s2.session == s.session
assert s2.digest_history == s.digest_history
assert s2.digest_history is not s.digest_history
digest = 'abcdef'
s._add_digest(digest)
assert digest in s.digest_history
assert digest not in s2.digest_history

View file

@ -0,0 +1,8 @@
from jupyter_client.ssh.tunnel import select_random_ports
def test_random_ports():
for i in range(4096):
ports = select_random_ports(10)
assert len(ports) == 10
for p in ports:
assert ports.count(p) == 1

View file

@ -0,0 +1,89 @@
"""Testing utils for jupyter_client tests
"""
import os
pjoin = os.path.join
import sys
from unittest.mock import patch
import pytest
from jupyter_client import AsyncKernelManager
from ipython_genutils.tempdir import TemporaryDirectory
skip_win32 = pytest.mark.skipif(sys.platform.startswith('win'), reason="Windows")
class test_env(object):
"""Set Jupyter path variables to a temporary directory
Useful as a context manager or with explicit start/stop
"""
def start(self):
self.test_dir = td = TemporaryDirectory()
self.env_patch = patch.dict(os.environ, {
'JUPYTER_CONFIG_DIR': pjoin(td.name, 'jupyter'),
'JUPYTER_DATA_DIR': pjoin(td.name, 'jupyter_data'),
'JUPYTER_RUNTIME_DIR': pjoin(td.name, 'jupyter_runtime'),
'IPYTHONDIR': pjoin(td.name, 'ipython'),
'TEST_VARS': 'test_var_1',
})
self.env_patch.start()
def stop(self):
self.env_patch.stop()
self.test_dir.cleanup()
def __enter__(self):
self.start()
return self.test_dir.name
def __exit__(self, *exc_info):
self.stop()
def execute(code='', kc=None, **kwargs):
"""wrapper for doing common steps for validating an execution request"""
from .test_message_spec import validate_message
if kc is None:
kc = KC
msg_id = kc.execute(code=code, **kwargs)
reply = kc.get_shell_msg(timeout=TIMEOUT)
validate_message(reply, 'execute_reply', msg_id)
busy = kc.get_iopub_msg(timeout=TIMEOUT)
validate_message(busy, 'status', msg_id)
assert busy['content']['execution_state'] == 'busy'
if not kwargs.get('silent'):
execute_input = kc.get_iopub_msg(timeout=TIMEOUT)
validate_message(execute_input, 'execute_input', msg_id)
assert execute_input['content']['code'] == code
return msg_id, reply['content']
class AsyncKernelManagerSubclass(AsyncKernelManager):
"""Used to test deprecation "routes" that are determined by superclass' detection of methods.
This class represents a current subclass that overrides both cleanup() and cleanup_resources()
in order to be compatible with older jupyter_clients. We should find that cleanup_resources()
is called on these instances vix TestAsyncKernelManagerSubclass.
"""
def cleanup(self, connection_file=True):
super().cleanup(connection_file=connection_file)
self.which_cleanup = 'cleanup'
def cleanup_resources(self, restart=False):
super().cleanup_resources(restart=restart)
self.which_cleanup = 'cleanup_resources'
class AsyncKernelManagerWithCleanup(AsyncKernelManager):
"""Used to test deprecation "routes" that are determined by superclass' detection of methods.
This class represents the older subclass that overrides cleanup(). We should find that
cleanup() is called on these instances via TestAsyncKernelManagerWithCleanup.
"""
def cleanup(self, connection_file=True):
super().cleanup(connection_file=connection_file)
self.which_cleanup = 'cleanup'