Uploaded Test files
This commit is contained in:
parent
f584ad9d97
commit
2e81cb7d99
16627 changed files with 2065359 additions and 102444 deletions
Binary file not shown.
Binary file not shown.
Binary file not shown.
175
venv/Lib/site-packages/notebook/services/sessions/handlers.py
Normal file
175
venv/Lib/site-packages/notebook/services/sessions/handlers.py
Normal file
|
@ -0,0 +1,175 @@
|
|||
"""Tornado handlers for the sessions web service.
|
||||
|
||||
Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-16%3A-Notebook-multi-directory-dashboard-and-URL-mapping#sessions-api
|
||||
"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import json
|
||||
|
||||
from tornado import gen, web
|
||||
|
||||
from ...base.handlers import APIHandler
|
||||
from jupyter_client.jsonutil import date_default
|
||||
from notebook.utils import maybe_future, url_path_join
|
||||
from jupyter_client.kernelspec import NoSuchKernel
|
||||
|
||||
|
||||
class SessionRootHandler(APIHandler):
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def get(self):
|
||||
# Return a list of running sessions
|
||||
sm = self.session_manager
|
||||
sessions = yield maybe_future(sm.list_sessions())
|
||||
self.finish(json.dumps(sessions, default=date_default))
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def post(self):
|
||||
# Creates a new session
|
||||
#(unless a session already exists for the named session)
|
||||
sm = self.session_manager
|
||||
|
||||
model = self.get_json_body()
|
||||
if model is None:
|
||||
raise web.HTTPError(400, "No JSON data provided")
|
||||
|
||||
if 'notebook' in model and 'path' in model['notebook']:
|
||||
self.log.warning('Sessions API changed, see updated swagger docs')
|
||||
model['path'] = model['notebook']['path']
|
||||
model['type'] = 'notebook'
|
||||
|
||||
try:
|
||||
path = model['path']
|
||||
except KeyError as e:
|
||||
raise web.HTTPError(400, "Missing field in JSON data: path") from e
|
||||
|
||||
try:
|
||||
mtype = model['type']
|
||||
except KeyError as e:
|
||||
raise web.HTTPError(400, "Missing field in JSON data: type") from e
|
||||
|
||||
name = model.get('name', None)
|
||||
kernel = model.get('kernel', {})
|
||||
kernel_name = kernel.get('name', None)
|
||||
kernel_id = kernel.get('id', None)
|
||||
|
||||
if not kernel_id and not kernel_name:
|
||||
self.log.debug("No kernel specified, using default kernel")
|
||||
kernel_name = None
|
||||
|
||||
exists = yield maybe_future(sm.session_exists(path=path))
|
||||
if exists:
|
||||
model = yield maybe_future(sm.get_session(path=path))
|
||||
else:
|
||||
try:
|
||||
model = yield maybe_future(
|
||||
sm.create_session(path=path, kernel_name=kernel_name,
|
||||
kernel_id=kernel_id, name=name,
|
||||
type=mtype))
|
||||
except NoSuchKernel:
|
||||
msg = ("The '%s' kernel is not available. Please pick another "
|
||||
"suitable kernel instead, or install that kernel." % kernel_name)
|
||||
status_msg = '%s not found' % kernel_name
|
||||
self.log.warning('Kernel not found: %s' % kernel_name)
|
||||
self.set_status(501)
|
||||
self.finish(json.dumps(dict(message=msg, short_message=status_msg)))
|
||||
return
|
||||
|
||||
location = url_path_join(self.base_url, 'api', 'sessions', model['id'])
|
||||
self.set_header('Location', location)
|
||||
self.set_status(201)
|
||||
self.finish(json.dumps(model, default=date_default))
|
||||
|
||||
|
||||
class SessionHandler(APIHandler):
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def get(self, session_id):
|
||||
# Returns the JSON model for a single session
|
||||
sm = self.session_manager
|
||||
model = yield maybe_future(sm.get_session(session_id=session_id))
|
||||
self.finish(json.dumps(model, default=date_default))
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def patch(self, session_id):
|
||||
"""Patch updates sessions:
|
||||
|
||||
- path updates session to track renamed paths
|
||||
- kernel.name starts a new kernel with a given kernelspec
|
||||
"""
|
||||
sm = self.session_manager
|
||||
km = self.kernel_manager
|
||||
model = self.get_json_body()
|
||||
if model is None:
|
||||
raise web.HTTPError(400, "No JSON data provided")
|
||||
|
||||
# get the previous session model
|
||||
before = yield maybe_future(sm.get_session(session_id=session_id))
|
||||
|
||||
changes = {}
|
||||
if 'notebook' in model and 'path' in model['notebook']:
|
||||
self.log.warning('Sessions API changed, see updated swagger docs')
|
||||
model['path'] = model['notebook']['path']
|
||||
model['type'] = 'notebook'
|
||||
if 'path' in model:
|
||||
changes['path'] = model['path']
|
||||
if 'name' in model:
|
||||
changes['name'] = model['name']
|
||||
if 'type' in model:
|
||||
changes['type'] = model['type']
|
||||
if 'kernel' in model:
|
||||
# Kernel id takes precedence over name.
|
||||
if model['kernel'].get('id') is not None:
|
||||
kernel_id = model['kernel']['id']
|
||||
if kernel_id not in km:
|
||||
raise web.HTTPError(400, "No such kernel: %s" % kernel_id)
|
||||
changes['kernel_id'] = kernel_id
|
||||
elif model['kernel'].get('name') is not None:
|
||||
kernel_name = model['kernel']['name']
|
||||
kernel_id = yield sm.start_kernel_for_session(
|
||||
session_id, kernel_name=kernel_name, name=before['name'],
|
||||
path=before['path'], type=before['type'])
|
||||
changes['kernel_id'] = kernel_id
|
||||
|
||||
yield maybe_future(sm.update_session(session_id, **changes))
|
||||
model = yield maybe_future(sm.get_session(session_id=session_id))
|
||||
|
||||
if model['kernel']['id'] != before['kernel']['id']:
|
||||
# kernel_id changed because we got a new kernel
|
||||
# shutdown the old one
|
||||
yield maybe_future(
|
||||
km.shutdown_kernel(before['kernel']['id'])
|
||||
)
|
||||
self.finish(json.dumps(model, default=date_default))
|
||||
|
||||
@web.authenticated
|
||||
@gen.coroutine
|
||||
def delete(self, session_id):
|
||||
# Deletes the session with given session_id
|
||||
sm = self.session_manager
|
||||
try:
|
||||
yield maybe_future(sm.delete_session(session_id))
|
||||
except KeyError as e:
|
||||
# the kernel was deleted but the session wasn't!
|
||||
raise web.HTTPError(410, "Kernel deleted before session") from e
|
||||
self.set_status(204)
|
||||
self.finish()
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# URL to handler mappings
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
_session_id_regex = r"(?P<session_id>\w+-\w+-\w+-\w+-\w+)"
|
||||
|
||||
default_handlers = [
|
||||
(r"/api/sessions/%s" % _session_id_regex, SessionHandler),
|
||||
(r"/api/sessions", SessionRootHandler)
|
||||
]
|
||||
|
|
@ -0,0 +1,275 @@
|
|||
"""A base class session manager."""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import uuid
|
||||
|
||||
try:
|
||||
import sqlite3
|
||||
except ImportError:
|
||||
# fallback on pysqlite2 if Python was build without sqlite
|
||||
from pysqlite2 import dbapi2 as sqlite3
|
||||
|
||||
from tornado import gen, web
|
||||
|
||||
from traitlets.config.configurable import LoggingConfigurable
|
||||
from ipython_genutils.py3compat import unicode_type
|
||||
from traitlets import Instance
|
||||
|
||||
from notebook.utils import maybe_future
|
||||
|
||||
|
||||
class SessionManager(LoggingConfigurable):
|
||||
|
||||
kernel_manager = Instance('notebook.services.kernels.kernelmanager.MappingKernelManager')
|
||||
contents_manager = Instance('notebook.services.contents.manager.ContentsManager')
|
||||
|
||||
# Session database initialized below
|
||||
_cursor = None
|
||||
_connection = None
|
||||
_columns = {'session_id', 'path', 'name', 'type', 'kernel_id'}
|
||||
|
||||
@property
|
||||
def cursor(self):
|
||||
"""Start a cursor and create a database called 'session'"""
|
||||
if self._cursor is None:
|
||||
self._cursor = self.connection.cursor()
|
||||
self._cursor.execute("""CREATE TABLE session
|
||||
(session_id, path, name, type, kernel_id)""")
|
||||
return self._cursor
|
||||
|
||||
@property
|
||||
def connection(self):
|
||||
"""Start a database connection"""
|
||||
if self._connection is None:
|
||||
self._connection = sqlite3.connect(':memory:')
|
||||
self._connection.row_factory = sqlite3.Row
|
||||
return self._connection
|
||||
|
||||
def close(self):
|
||||
"""Close the sqlite connection"""
|
||||
if self._cursor is not None:
|
||||
self._cursor.close()
|
||||
self._cursor = None
|
||||
|
||||
def __del__(self):
|
||||
"""Close connection once SessionManager closes"""
|
||||
self.close()
|
||||
|
||||
@gen.coroutine
|
||||
def session_exists(self, path):
|
||||
"""Check to see if the session of a given name exists"""
|
||||
exists = False
|
||||
self.cursor.execute("SELECT * FROM session WHERE path=?", (path,))
|
||||
row = self.cursor.fetchone()
|
||||
if row is not None:
|
||||
# Note, although we found a row for the session, the associated kernel may have
|
||||
# been culled or died unexpectedly. If that's the case, we should delete the
|
||||
# row, thereby terminating the session. This can be done via a call to
|
||||
# row_to_model that tolerates that condition. If row_to_model returns None,
|
||||
# we'll return false, since, at that point, the session doesn't exist anyway.
|
||||
model = yield maybe_future(self.row_to_model(row, tolerate_culled=True))
|
||||
if model is not None:
|
||||
exists = True
|
||||
raise gen.Return(exists)
|
||||
|
||||
def new_session_id(self):
|
||||
"Create a uuid for a new session"
|
||||
return unicode_type(uuid.uuid4())
|
||||
|
||||
@gen.coroutine
|
||||
def create_session(self, path=None, name=None, type=None, kernel_name=None, kernel_id=None):
|
||||
"""Creates a session and returns its model"""
|
||||
session_id = self.new_session_id()
|
||||
if kernel_id is not None and kernel_id in self.kernel_manager:
|
||||
pass
|
||||
else:
|
||||
kernel_id = yield self.start_kernel_for_session(session_id, path, name, type, kernel_name)
|
||||
result = yield maybe_future(
|
||||
self.save_session(session_id, path=path, name=name, type=type, kernel_id=kernel_id)
|
||||
)
|
||||
# py2-compat
|
||||
raise gen.Return(result)
|
||||
|
||||
@gen.coroutine
|
||||
def start_kernel_for_session(self, session_id, path, name, type, kernel_name):
|
||||
"""Start a new kernel for a given session."""
|
||||
# allow contents manager to specify kernels cwd
|
||||
kernel_path = self.contents_manager.get_kernel_path(path=path)
|
||||
kernel_id = yield maybe_future(
|
||||
self.kernel_manager.start_kernel(path=kernel_path, kernel_name=kernel_name)
|
||||
)
|
||||
# py2-compat
|
||||
raise gen.Return(kernel_id)
|
||||
|
||||
@gen.coroutine
|
||||
def save_session(self, session_id, path=None, name=None, type=None, kernel_id=None):
|
||||
"""Saves the items for the session with the given session_id
|
||||
|
||||
Given a session_id (and any other of the arguments), this method
|
||||
creates a row in the sqlite session database that holds the information
|
||||
for a session.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
session_id : str
|
||||
uuid for the session; this method must be given a session_id
|
||||
path : str
|
||||
the path for the given session
|
||||
name: str
|
||||
the name of the session
|
||||
type: string
|
||||
the type of the session
|
||||
kernel_id : str
|
||||
a uuid for the kernel associated with this session
|
||||
|
||||
Returns
|
||||
-------
|
||||
model : dict
|
||||
a dictionary of the session model
|
||||
"""
|
||||
self.cursor.execute("INSERT INTO session VALUES (?,?,?,?,?)",
|
||||
(session_id, path, name, type, kernel_id)
|
||||
)
|
||||
result = yield maybe_future(self.get_session(session_id=session_id))
|
||||
raise gen.Return(result)
|
||||
|
||||
@gen.coroutine
|
||||
def get_session(self, **kwargs):
|
||||
"""Returns the model for a particular session.
|
||||
|
||||
Takes a keyword argument and searches for the value in the session
|
||||
database, then returns the rest of the session's info.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
**kwargs : keyword argument
|
||||
must be given one of the keywords and values from the session database
|
||||
(i.e. session_id, path, name, type, kernel_id)
|
||||
|
||||
Returns
|
||||
-------
|
||||
model : dict
|
||||
returns a dictionary that includes all the information from the
|
||||
session described by the kwarg.
|
||||
"""
|
||||
if not kwargs:
|
||||
raise TypeError("must specify a column to query")
|
||||
|
||||
conditions = []
|
||||
for column in kwargs.keys():
|
||||
if column not in self._columns:
|
||||
raise TypeError("No such column: %r", column)
|
||||
conditions.append("%s=?" % column)
|
||||
|
||||
query = "SELECT * FROM session WHERE %s" % (' AND '.join(conditions))
|
||||
|
||||
self.cursor.execute(query, list(kwargs.values()))
|
||||
try:
|
||||
row = self.cursor.fetchone()
|
||||
except KeyError:
|
||||
# The kernel is missing, so the session just got deleted.
|
||||
row = None
|
||||
|
||||
if row is None:
|
||||
q = []
|
||||
for key, value in kwargs.items():
|
||||
q.append("%s=%r" % (key, value))
|
||||
|
||||
raise web.HTTPError(404, u'Session not found: %s' % (', '.join(q)))
|
||||
|
||||
model = yield maybe_future(self.row_to_model(row))
|
||||
raise gen.Return(model)
|
||||
|
||||
@gen.coroutine
|
||||
def update_session(self, session_id, **kwargs):
|
||||
"""Updates the values in the session database.
|
||||
|
||||
Changes the values of the session with the given session_id
|
||||
with the values from the keyword arguments.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
session_id : str
|
||||
a uuid that identifies a session in the sqlite3 database
|
||||
**kwargs : str
|
||||
the key must correspond to a column title in session database,
|
||||
and the value replaces the current value in the session
|
||||
with session_id.
|
||||
"""
|
||||
yield maybe_future(self.get_session(session_id=session_id))
|
||||
|
||||
if not kwargs:
|
||||
# no changes
|
||||
return
|
||||
|
||||
sets = []
|
||||
for column in kwargs.keys():
|
||||
if column not in self._columns:
|
||||
raise TypeError("No such column: %r" % column)
|
||||
sets.append("%s=?" % column)
|
||||
query = "UPDATE session SET %s WHERE session_id=?" % (', '.join(sets))
|
||||
self.cursor.execute(query, list(kwargs.values()) + [session_id])
|
||||
|
||||
def kernel_culled(self, kernel_id):
|
||||
"""Checks if the kernel is still considered alive and returns true if its not found. """
|
||||
return kernel_id not in self.kernel_manager
|
||||
|
||||
@gen.coroutine
|
||||
def row_to_model(self, row, tolerate_culled=False):
|
||||
"""Takes sqlite database session row and turns it into a dictionary"""
|
||||
kernel_culled = yield maybe_future(self.kernel_culled(row['kernel_id']))
|
||||
if kernel_culled:
|
||||
# The kernel was culled or died without deleting the session.
|
||||
# We can't use delete_session here because that tries to find
|
||||
# and shut down the kernel - so we'll delete the row directly.
|
||||
#
|
||||
# If caller wishes to tolerate culled kernels, log a warning
|
||||
# and return None. Otherwise, raise KeyError with a similar
|
||||
# message.
|
||||
self.cursor.execute("DELETE FROM session WHERE session_id=?",
|
||||
(row['session_id'],))
|
||||
msg = "Kernel '{kernel_id}' appears to have been culled or died unexpectedly, " \
|
||||
"invalidating session '{session_id}'. The session has been removed.".\
|
||||
format(kernel_id=row['kernel_id'],session_id=row['session_id'])
|
||||
if tolerate_culled:
|
||||
self.log.warning(msg + " Continuing...")
|
||||
raise gen.Return(None)
|
||||
raise KeyError(msg)
|
||||
|
||||
kernel_model = yield maybe_future(self.kernel_manager.kernel_model(row['kernel_id']))
|
||||
model = {
|
||||
'id': row['session_id'],
|
||||
'path': row['path'],
|
||||
'name': row['name'],
|
||||
'type': row['type'],
|
||||
'kernel': kernel_model
|
||||
}
|
||||
if row['type'] == 'notebook':
|
||||
# Provide the deprecated API.
|
||||
model['notebook'] = {'path': row['path'], 'name': row['name']}
|
||||
raise gen.Return(model)
|
||||
|
||||
@gen.coroutine
|
||||
def list_sessions(self):
|
||||
"""Returns a list of dictionaries containing all the information from
|
||||
the session database"""
|
||||
c = self.cursor.execute("SELECT * FROM session")
|
||||
result = []
|
||||
# We need to use fetchall() here, because row_to_model can delete rows,
|
||||
# which messes up the cursor if we're iterating over rows.
|
||||
for row in c.fetchall():
|
||||
try:
|
||||
model = yield maybe_future(self.row_to_model(row))
|
||||
result.append(model)
|
||||
except KeyError:
|
||||
pass
|
||||
raise gen.Return(result)
|
||||
|
||||
@gen.coroutine
|
||||
def delete_session(self, session_id):
|
||||
"""Deletes the row in the session database with given session_id"""
|
||||
session = yield maybe_future(self.get_session(session_id=session_id))
|
||||
yield maybe_future(self.kernel_manager.shutdown_kernel(session['kernel']['id']))
|
||||
self.cursor.execute("DELETE FROM session WHERE session_id=?", (session_id,))
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,256 @@
|
|||
"""Tests for the session manager."""
|
||||
|
||||
from functools import partial
|
||||
from unittest import TestCase
|
||||
|
||||
from tornado import gen, web
|
||||
from tornado.ioloop import IOLoop
|
||||
|
||||
from ..sessionmanager import SessionManager
|
||||
from notebook.services.kernels.kernelmanager import MappingKernelManager
|
||||
from notebook.services.contents.manager import ContentsManager
|
||||
from notebook._tz import utcnow, isoformat
|
||||
|
||||
class DummyKernel(object):
|
||||
def __init__(self, kernel_name='python'):
|
||||
self.kernel_name = kernel_name
|
||||
|
||||
dummy_date = utcnow()
|
||||
dummy_date_s = isoformat(dummy_date)
|
||||
|
||||
class DummyMKM(MappingKernelManager):
|
||||
"""MappingKernelManager interface that doesn't start kernels, for testing"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(DummyMKM, self).__init__(*args, **kwargs)
|
||||
self.id_letters = iter(u'ABCDEFGHIJK')
|
||||
|
||||
def _new_id(self):
|
||||
return next(self.id_letters)
|
||||
|
||||
def start_kernel(self, kernel_id=None, path=None, kernel_name='python', **kwargs):
|
||||
kernel_id = kernel_id or self._new_id()
|
||||
k = self._kernels[kernel_id] = DummyKernel(kernel_name=kernel_name)
|
||||
self._kernel_connections[kernel_id] = 0
|
||||
k.last_activity = dummy_date
|
||||
k.execution_state = 'idle'
|
||||
return kernel_id
|
||||
|
||||
def shutdown_kernel(self, kernel_id, now=False):
|
||||
del self._kernels[kernel_id]
|
||||
|
||||
|
||||
class TestSessionManager(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.sm = SessionManager(
|
||||
kernel_manager=DummyMKM(),
|
||||
contents_manager=ContentsManager(),
|
||||
)
|
||||
self.loop = IOLoop()
|
||||
self.addCleanup(partial(self.loop.close, all_fds=True))
|
||||
|
||||
def create_sessions(self, *kwarg_list):
|
||||
@gen.coroutine
|
||||
def co_add():
|
||||
sessions = []
|
||||
for kwargs in kwarg_list:
|
||||
kwargs.setdefault('type', 'notebook')
|
||||
session = yield self.sm.create_session(**kwargs)
|
||||
sessions.append(session)
|
||||
raise gen.Return(sessions)
|
||||
return self.loop.run_sync(co_add)
|
||||
|
||||
def create_session(self, **kwargs):
|
||||
return self.create_sessions(kwargs)[0]
|
||||
|
||||
def test_get_session(self):
|
||||
sm = self.sm
|
||||
session_id = self.create_session(path='/path/to/test.ipynb', kernel_name='bar')['id']
|
||||
model = self.loop.run_sync(lambda: sm.get_session(session_id=session_id))
|
||||
expected = {'id':session_id,
|
||||
'path': u'/path/to/test.ipynb',
|
||||
'notebook': {'path': u'/path/to/test.ipynb', 'name': None},
|
||||
'type': 'notebook',
|
||||
'name': None,
|
||||
'kernel': {
|
||||
'id': 'A',
|
||||
'name': 'bar',
|
||||
'connections': 0,
|
||||
'last_activity': dummy_date_s,
|
||||
'execution_state': 'idle',
|
||||
}}
|
||||
self.assertEqual(model, expected)
|
||||
|
||||
def test_bad_get_session(self):
|
||||
# Should raise error if a bad key is passed to the database.
|
||||
sm = self.sm
|
||||
session_id = self.create_session(path='/path/to/test.ipynb',
|
||||
kernel_name='foo')['id']
|
||||
with self.assertRaises(TypeError):
|
||||
self.loop.run_sync(lambda: sm.get_session(bad_id=session_id)) # Bad keyword
|
||||
|
||||
def test_get_session_dead_kernel(self):
|
||||
sm = self.sm
|
||||
session = self.create_session(path='/path/to/1/test1.ipynb', kernel_name='python')
|
||||
# kill the kernel
|
||||
sm.kernel_manager.shutdown_kernel(session['kernel']['id'])
|
||||
with self.assertRaises(KeyError):
|
||||
self.loop.run_sync(lambda: sm.get_session(session_id=session['id']))
|
||||
# no sessions left
|
||||
listed = self.loop.run_sync(lambda: sm.list_sessions())
|
||||
self.assertEqual(listed, [])
|
||||
|
||||
def test_list_sessions(self):
|
||||
sm = self.sm
|
||||
sessions = self.create_sessions(
|
||||
dict(path='/path/to/1/test1.ipynb', kernel_name='python'),
|
||||
dict(path='/path/to/2/test2.py', type='file', kernel_name='python'),
|
||||
dict(path='/path/to/3', name='foo', type='console', kernel_name='python'),
|
||||
)
|
||||
|
||||
sessions = self.loop.run_sync(lambda: sm.list_sessions())
|
||||
expected = [
|
||||
{
|
||||
'id':sessions[0]['id'],
|
||||
'path': u'/path/to/1/test1.ipynb',
|
||||
'type': 'notebook',
|
||||
'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None},
|
||||
'name': None,
|
||||
'kernel': {
|
||||
'id': 'A',
|
||||
'name':'python',
|
||||
'connections': 0,
|
||||
'last_activity': dummy_date_s,
|
||||
'execution_state': 'idle',
|
||||
}
|
||||
}, {
|
||||
'id':sessions[1]['id'],
|
||||
'path': u'/path/to/2/test2.py',
|
||||
'type': 'file',
|
||||
'name': None,
|
||||
'kernel': {
|
||||
'id': 'B',
|
||||
'name':'python',
|
||||
'connections': 0,
|
||||
'last_activity': dummy_date_s,
|
||||
'execution_state': 'idle',
|
||||
}
|
||||
}, {
|
||||
'id':sessions[2]['id'],
|
||||
'path': u'/path/to/3',
|
||||
'type': 'console',
|
||||
'name': 'foo',
|
||||
'kernel': {
|
||||
'id': 'C',
|
||||
'name':'python',
|
||||
'connections': 0,
|
||||
'last_activity': dummy_date_s,
|
||||
'execution_state': 'idle',
|
||||
}
|
||||
}
|
||||
]
|
||||
self.assertEqual(sessions, expected)
|
||||
|
||||
def test_list_sessions_dead_kernel(self):
|
||||
sm = self.sm
|
||||
sessions = self.create_sessions(
|
||||
dict(path='/path/to/1/test1.ipynb', kernel_name='python'),
|
||||
dict(path='/path/to/2/test2.ipynb', kernel_name='python'),
|
||||
)
|
||||
# kill one of the kernels
|
||||
sm.kernel_manager.shutdown_kernel(sessions[0]['kernel']['id'])
|
||||
listed = self.loop.run_sync(lambda: sm.list_sessions())
|
||||
expected = [
|
||||
{
|
||||
'id': sessions[1]['id'],
|
||||
'path': u'/path/to/2/test2.ipynb',
|
||||
'type': 'notebook',
|
||||
'name': None,
|
||||
'notebook': {'path': u'/path/to/2/test2.ipynb', 'name': None},
|
||||
'kernel': {
|
||||
'id': 'B',
|
||||
'name':'python',
|
||||
'connections': 0,
|
||||
'last_activity': dummy_date_s,
|
||||
'execution_state': 'idle',
|
||||
}
|
||||
}
|
||||
]
|
||||
self.assertEqual(listed, expected)
|
||||
|
||||
def test_update_session(self):
|
||||
sm = self.sm
|
||||
session_id = self.create_session(path='/path/to/test.ipynb',
|
||||
kernel_name='julia')['id']
|
||||
self.loop.run_sync(lambda: sm.update_session(session_id, path='/path/to/new_name.ipynb'))
|
||||
model = self.loop.run_sync(lambda: sm.get_session(session_id=session_id))
|
||||
expected = {'id':session_id,
|
||||
'path': u'/path/to/new_name.ipynb',
|
||||
'type': 'notebook',
|
||||
'name': None,
|
||||
'notebook': {'path': u'/path/to/new_name.ipynb', 'name': None},
|
||||
'kernel': {
|
||||
'id': 'A',
|
||||
'name':'julia',
|
||||
'connections': 0,
|
||||
'last_activity': dummy_date_s,
|
||||
'execution_state': 'idle',
|
||||
}
|
||||
}
|
||||
self.assertEqual(model, expected)
|
||||
|
||||
def test_bad_update_session(self):
|
||||
# try to update a session with a bad keyword ~ raise error
|
||||
sm = self.sm
|
||||
session_id = self.create_session(path='/path/to/test.ipynb',
|
||||
kernel_name='ir')['id']
|
||||
with self.assertRaises(TypeError):
|
||||
self.loop.run_sync(lambda: sm.update_session(session_id=session_id, bad_kw='test.ipynb')) # Bad keyword
|
||||
|
||||
def test_delete_session(self):
|
||||
sm = self.sm
|
||||
sessions = self.create_sessions(
|
||||
dict(path='/path/to/1/test1.ipynb', kernel_name='python'),
|
||||
dict(path='/path/to/2/test2.ipynb', kernel_name='python'),
|
||||
dict(path='/path/to/3', name='foo', type='console', kernel_name='python'),
|
||||
)
|
||||
self.loop.run_sync(lambda: sm.delete_session(sessions[1]['id']))
|
||||
new_sessions = self.loop.run_sync(lambda: sm.list_sessions())
|
||||
expected = [{
|
||||
'id': sessions[0]['id'],
|
||||
'path': u'/path/to/1/test1.ipynb',
|
||||
'type': 'notebook',
|
||||
'name': None,
|
||||
'notebook': {'path': u'/path/to/1/test1.ipynb', 'name': None},
|
||||
'kernel': {
|
||||
'id': 'A',
|
||||
'name':'python',
|
||||
'connections': 0,
|
||||
'last_activity': dummy_date_s,
|
||||
'execution_state': 'idle',
|
||||
}
|
||||
}, {
|
||||
'id': sessions[2]['id'],
|
||||
'type': 'console',
|
||||
'path': u'/path/to/3',
|
||||
'name': 'foo',
|
||||
'kernel': {
|
||||
'id': 'C',
|
||||
'name':'python',
|
||||
'connections': 0,
|
||||
'last_activity': dummy_date_s,
|
||||
'execution_state': 'idle',
|
||||
}
|
||||
}
|
||||
]
|
||||
self.assertEqual(new_sessions, expected)
|
||||
|
||||
def test_bad_delete_session(self):
|
||||
# try to delete a session that doesn't exist ~ raise error
|
||||
sm = self.sm
|
||||
self.create_session(path='/path/to/test.ipynb', kernel_name='python')
|
||||
with self.assertRaises(TypeError):
|
||||
self.loop.run_sync(lambda : sm.delete_session(bad_kwarg='23424')) # Bad keyword
|
||||
with self.assertRaises(web.HTTPError):
|
||||
self.loop.run_sync(lambda : sm.delete_session(session_id='23424')) # nonexistent
|
||||
|
|
@ -0,0 +1,290 @@
|
|||
"""Test the sessions web service API."""
|
||||
|
||||
import errno
|
||||
from functools import partial
|
||||
import io
|
||||
import os
|
||||
import json
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
|
||||
from unittest import SkipTest
|
||||
|
||||
from notebook.utils import url_path_join
|
||||
from notebook.tests.launchnotebook import NotebookTestBase, assert_http_error
|
||||
from nbformat.v4 import new_notebook
|
||||
from nbformat import write
|
||||
|
||||
try:
|
||||
from jupyter_client import AsyncMultiKernelManager
|
||||
async_testing_enabled = True
|
||||
except ImportError:
|
||||
async_testing_enabled = False
|
||||
|
||||
pjoin = os.path.join
|
||||
|
||||
|
||||
class SessionAPI(object):
|
||||
"""Wrapper for notebook API calls."""
|
||||
def __init__(self, request):
|
||||
self.request = request
|
||||
|
||||
def _req(self, verb, path, body=None):
|
||||
response = self.request(verb,
|
||||
url_path_join('api/sessions', path), data=body)
|
||||
|
||||
if 400 <= response.status_code < 600:
|
||||
try:
|
||||
response.reason = response.json()['message']
|
||||
except:
|
||||
pass
|
||||
response.raise_for_status()
|
||||
|
||||
return response
|
||||
|
||||
def list(self):
|
||||
return self._req('GET', '')
|
||||
|
||||
def get(self, id):
|
||||
return self._req('GET', id)
|
||||
|
||||
def create(self, path, type='notebook', kernel_name='python', kernel_id=None):
|
||||
body = json.dumps({'path': path,
|
||||
'type': type,
|
||||
'kernel': {'name': kernel_name,
|
||||
'id': kernel_id}})
|
||||
return self._req('POST', '', body)
|
||||
|
||||
def create_deprecated(self, path):
|
||||
body = json.dumps({'notebook': {'path': path},
|
||||
'kernel': {'name': 'python',
|
||||
'id': 'foo'}})
|
||||
return self._req('POST', '', body)
|
||||
|
||||
def modify_path(self, id, path):
|
||||
body = json.dumps({'path': path})
|
||||
return self._req('PATCH', id, body)
|
||||
|
||||
def modify_path_deprecated(self, id, path):
|
||||
body = json.dumps({'notebook': {'path': path}})
|
||||
return self._req('PATCH', id, body)
|
||||
|
||||
def modify_type(self, id, type):
|
||||
body = json.dumps({'type': type})
|
||||
return self._req('PATCH', id, body)
|
||||
|
||||
def modify_kernel_name(self, id, kernel_name):
|
||||
body = json.dumps({'kernel': {'name': kernel_name}})
|
||||
return self._req('PATCH', id, body)
|
||||
|
||||
def modify_kernel_id(self, id, kernel_id):
|
||||
# Also send a dummy name to show that id takes precedence.
|
||||
body = json.dumps({'kernel': {'id': kernel_id, 'name': 'foo'}})
|
||||
return self._req('PATCH', id, body)
|
||||
|
||||
def delete(self, id):
|
||||
return self._req('DELETE', id)
|
||||
|
||||
|
||||
class SessionAPITest(NotebookTestBase):
|
||||
"""Test the sessions web service API"""
|
||||
def setUp(self):
|
||||
nbdir = self.notebook_dir
|
||||
subdir = pjoin(nbdir, 'foo')
|
||||
|
||||
try:
|
||||
os.mkdir(subdir)
|
||||
except OSError as e:
|
||||
# Deleting the folder in an earlier test may have failed
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
self.addCleanup(partial(shutil.rmtree, subdir, ignore_errors=True))
|
||||
|
||||
with io.open(pjoin(subdir, 'nb1.ipynb'), 'w', encoding='utf-8') as f:
|
||||
nb = new_notebook()
|
||||
write(nb, f, version=4)
|
||||
|
||||
self.sess_api = SessionAPI(self.request)
|
||||
|
||||
@self.addCleanup
|
||||
def cleanup_sessions():
|
||||
for session in self.sess_api.list().json():
|
||||
self.sess_api.delete(session['id'])
|
||||
|
||||
# This is necessary in some situations on Windows: without it, it
|
||||
# fails to delete the directory because something is still using
|
||||
# it. I think there is a brief period after the kernel terminates
|
||||
# where Windows still treats its working directory as in use. On my
|
||||
# Windows VM, 0.01s is not long enough, but 0.1s appears to work
|
||||
# reliably. -- TK, 15 December 2014
|
||||
time.sleep(0.1)
|
||||
|
||||
def test_create(self):
|
||||
sessions = self.sess_api.list().json()
|
||||
self.assertEqual(len(sessions), 0)
|
||||
|
||||
resp = self.sess_api.create('foo/nb1.ipynb')
|
||||
self.assertEqual(resp.status_code, 201)
|
||||
newsession = resp.json()
|
||||
self.assertIn('id', newsession)
|
||||
self.assertEqual(newsession['path'], 'foo/nb1.ipynb')
|
||||
self.assertEqual(newsession['type'], 'notebook')
|
||||
self.assertEqual(resp.headers['Location'], self.url_prefix + 'api/sessions/{0}'.format(newsession['id']))
|
||||
|
||||
sessions = self.sess_api.list().json()
|
||||
self.assertEqual(sessions, [newsession])
|
||||
|
||||
# Retrieve it
|
||||
sid = newsession['id']
|
||||
got = self.sess_api.get(sid).json()
|
||||
self.assertEqual(got, newsession)
|
||||
|
||||
def test_create_file_session(self):
|
||||
resp = self.sess_api.create('foo/nb1.py', type='file')
|
||||
self.assertEqual(resp.status_code, 201)
|
||||
newsession = resp.json()
|
||||
self.assertEqual(newsession['path'], 'foo/nb1.py')
|
||||
self.assertEqual(newsession['type'], 'file')
|
||||
|
||||
def test_create_console_session(self):
|
||||
resp = self.sess_api.create('foo/abc123', type='console')
|
||||
self.assertEqual(resp.status_code, 201)
|
||||
newsession = resp.json()
|
||||
self.assertEqual(newsession['path'], 'foo/abc123')
|
||||
self.assertEqual(newsession['type'], 'console')
|
||||
|
||||
def test_create_deprecated(self):
|
||||
resp = self.sess_api.create_deprecated('foo/nb1.ipynb')
|
||||
self.assertEqual(resp.status_code, 201)
|
||||
newsession = resp.json()
|
||||
self.assertEqual(newsession['path'], 'foo/nb1.ipynb')
|
||||
self.assertEqual(newsession['type'], 'notebook')
|
||||
self.assertEqual(newsession['notebook']['path'], 'foo/nb1.ipynb')
|
||||
|
||||
def test_create_with_kernel_id(self):
|
||||
# create a new kernel
|
||||
r = self.request('POST', 'api/kernels')
|
||||
r.raise_for_status()
|
||||
kernel = r.json()
|
||||
|
||||
resp = self.sess_api.create('foo/nb1.ipynb', kernel_id=kernel['id'])
|
||||
self.assertEqual(resp.status_code, 201)
|
||||
newsession = resp.json()
|
||||
self.assertIn('id', newsession)
|
||||
self.assertEqual(newsession['path'], 'foo/nb1.ipynb')
|
||||
self.assertEqual(newsession['kernel']['id'], kernel['id'])
|
||||
self.assertEqual(resp.headers['Location'], self.url_prefix + 'api/sessions/{0}'.format(newsession['id']))
|
||||
|
||||
sessions = self.sess_api.list().json()
|
||||
self.assertEqual(sessions, [newsession])
|
||||
|
||||
# Retrieve it
|
||||
sid = newsession['id']
|
||||
got = self.sess_api.get(sid).json()
|
||||
self.assertEqual(got, newsession)
|
||||
|
||||
def test_delete(self):
|
||||
newsession = self.sess_api.create('foo/nb1.ipynb').json()
|
||||
sid = newsession['id']
|
||||
|
||||
resp = self.sess_api.delete(sid)
|
||||
self.assertEqual(resp.status_code, 204)
|
||||
|
||||
sessions = self.sess_api.list().json()
|
||||
self.assertEqual(sessions, [])
|
||||
|
||||
with assert_http_error(404):
|
||||
self.sess_api.get(sid)
|
||||
|
||||
def test_modify_path(self):
|
||||
newsession = self.sess_api.create('foo/nb1.ipynb').json()
|
||||
sid = newsession['id']
|
||||
|
||||
changed = self.sess_api.modify_path(sid, 'nb2.ipynb').json()
|
||||
self.assertEqual(changed['id'], sid)
|
||||
self.assertEqual(changed['path'], 'nb2.ipynb')
|
||||
|
||||
def test_modify_path_deprecated(self):
|
||||
newsession = self.sess_api.create('foo/nb1.ipynb').json()
|
||||
sid = newsession['id']
|
||||
|
||||
changed = self.sess_api.modify_path_deprecated(sid, 'nb2.ipynb').json()
|
||||
self.assertEqual(changed['id'], sid)
|
||||
self.assertEqual(changed['notebook']['path'], 'nb2.ipynb')
|
||||
|
||||
def test_modify_type(self):
|
||||
newsession = self.sess_api.create('foo/nb1.ipynb').json()
|
||||
sid = newsession['id']
|
||||
|
||||
changed = self.sess_api.modify_type(sid, 'console').json()
|
||||
self.assertEqual(changed['id'], sid)
|
||||
self.assertEqual(changed['type'], 'console')
|
||||
|
||||
def test_modify_kernel_name(self):
|
||||
before = self.sess_api.create('foo/nb1.ipynb').json()
|
||||
sid = before['id']
|
||||
|
||||
after = self.sess_api.modify_kernel_name(sid, before['kernel']['name']).json()
|
||||
self.assertEqual(after['id'], sid)
|
||||
self.assertEqual(after['path'], before['path'])
|
||||
self.assertEqual(after['type'], before['type'])
|
||||
self.assertNotEqual(after['kernel']['id'], before['kernel']['id'])
|
||||
|
||||
# check kernel list, to be sure previous kernel was cleaned up
|
||||
r = self.request('GET', 'api/kernels')
|
||||
r.raise_for_status()
|
||||
kernel_list = r.json()
|
||||
after['kernel'].pop('last_activity')
|
||||
[ k.pop('last_activity') for k in kernel_list ]
|
||||
self.assertEqual(kernel_list, [after['kernel']])
|
||||
|
||||
def test_modify_kernel_id(self):
|
||||
before = self.sess_api.create('foo/nb1.ipynb').json()
|
||||
sid = before['id']
|
||||
|
||||
# create a new kernel
|
||||
r = self.request('POST', 'api/kernels')
|
||||
r.raise_for_status()
|
||||
kernel = r.json()
|
||||
|
||||
# Attach our session to the existing kernel
|
||||
after = self.sess_api.modify_kernel_id(sid, kernel['id']).json()
|
||||
self.assertEqual(after['id'], sid)
|
||||
self.assertEqual(after['path'], before['path'])
|
||||
self.assertEqual(after['type'], before['type'])
|
||||
self.assertNotEqual(after['kernel']['id'], before['kernel']['id'])
|
||||
self.assertEqual(after['kernel']['id'], kernel['id'])
|
||||
|
||||
# check kernel list, to be sure previous kernel was cleaned up
|
||||
r = self.request('GET', 'api/kernels')
|
||||
r.raise_for_status()
|
||||
kernel_list = r.json()
|
||||
|
||||
kernel.pop('last_activity')
|
||||
[ k.pop('last_activity') for k in kernel_list ]
|
||||
self.assertEqual(kernel_list, [kernel])
|
||||
|
||||
|
||||
class AsyncSessionAPITest(SessionAPITest):
|
||||
"""Test the sessions web service API using the AsyncMappingKernelManager"""
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
if not async_testing_enabled: # Can be removed once jupyter_client >= 6.1 is required.
|
||||
raise SkipTest("AsyncSessionAPITest tests skipped due to down-level jupyter_client!")
|
||||
if sys.version_info < (3, 6): # Can be removed once 3.5 is dropped.
|
||||
raise SkipTest("AsyncSessionAPITest tests skipped due to Python < 3.6!")
|
||||
super(AsyncSessionAPITest, cls).setup_class()
|
||||
|
||||
@classmethod
|
||||
def get_argv(cls):
|
||||
argv = super(AsyncSessionAPITest, cls).get_argv()
|
||||
|
||||
# Before we extend the argv with the class, ensure that appropriate jupyter_client is available.
|
||||
# if not available, don't set kernel_manager_class, resulting in the repeat of sync-based tests.
|
||||
if async_testing_enabled:
|
||||
argv.extend(['--NotebookApp.kernel_manager_class='
|
||||
'notebook.services.kernels.kernelmanager.AsyncMappingKernelManager'])
|
||||
|
||||
return argv
|
Loading…
Add table
Add a link
Reference in a new issue