Uploaded Test files
This commit is contained in:
parent
f584ad9d97
commit
2e81cb7d99
16627 changed files with 2065359 additions and 102444 deletions
8
venv/Lib/site-packages/nbconvert/__init__.py
Normal file
8
venv/Lib/site-packages/nbconvert/__init__.py
Normal file
|
@ -0,0 +1,8 @@
|
|||
"""Utilities for converting notebooks to and from different formats."""
|
||||
|
||||
from ._version import version_info, __version__
|
||||
from .exporters import *
|
||||
from . import filters
|
||||
from . import preprocessors
|
||||
from . import postprocessors
|
||||
from . import writers
|
2
venv/Lib/site-packages/nbconvert/__main__.py
Normal file
2
venv/Lib/site-packages/nbconvert/__main__.py
Normal file
|
@ -0,0 +1,2 @@
|
|||
from .nbconvertapp import main
|
||||
main()
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
63
venv/Lib/site-packages/nbconvert/_version.py
Normal file
63
venv/Lib/site-packages/nbconvert/_version.py
Normal file
|
@ -0,0 +1,63 @@
|
|||
version_info = (6, 0, 7)
|
||||
pre_info = ''
|
||||
dev_info = ''
|
||||
|
||||
def create_valid_version(release_info, epoch=None, pre_input='', dev_input=''):
|
||||
'''
|
||||
Creates a pep440 valid version of version number given a tuple integers
|
||||
and optional epoch, prerelease and developmental info.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
release_info : Tuple(Int)
|
||||
epoch : Int, default None
|
||||
pre_input : Str, default ''
|
||||
dev_input : Str, default ''
|
||||
'''
|
||||
|
||||
pep440_err = "The version number is not a pep 440 compliant version number"
|
||||
|
||||
|
||||
if epoch is not None:
|
||||
epoch_seg = str(epoch) + '!'
|
||||
else:
|
||||
epoch_seg = ''
|
||||
|
||||
release_seg = '.'.join(map(str, release_info))
|
||||
|
||||
_magic_pre = ['a','b','rc']
|
||||
if pre_input!='' and not any([pre_input.startswith(prefix) for prefix in _magic_pre]):
|
||||
raise ValueError(pep440_err + "\n please fix your prerelease segment.")
|
||||
else:
|
||||
pre_seg = pre_input
|
||||
|
||||
if dev_input=='':
|
||||
dev_seg = dev_input
|
||||
elif not dev_input.startswith('.') and dev_input.startswith('dev'):
|
||||
dev_seg = ''.join(['.', dev_input])
|
||||
elif dev_input.startswith('.dev'):
|
||||
dev_seg = dev_input
|
||||
elif dev_input!='':
|
||||
raise ValueError(pep440_err + "\n please fix your development segment.")
|
||||
|
||||
if dev_input!='' and not any([dev_seg.endswith(str(n)) for n in range(10)]):
|
||||
dev_seg = ''.join([dev_seg,'0'])
|
||||
|
||||
out_version = ''.join([epoch_seg, release_seg, pre_seg, dev_seg])
|
||||
|
||||
|
||||
import re
|
||||
def is_canonical(version):
|
||||
return re.match(r'^([1-9]\d*!)?(0|[1-9]\d*)'
|
||||
r'(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?'
|
||||
r'(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*))?$',
|
||||
version
|
||||
) is not None
|
||||
|
||||
if is_canonical(out_version):
|
||||
return out_version
|
||||
else:
|
||||
raise ValueError(pep440_err)
|
||||
|
||||
|
||||
__version__ = create_valid_version(version_info, pre_input=pre_info, dev_input=dev_info)
|
15
venv/Lib/site-packages/nbconvert/exporters/__init__.py
Normal file
15
venv/Lib/site-packages/nbconvert/exporters/__init__.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
from .base import (export, get_exporter,
|
||||
ExporterNameError, get_export_names)
|
||||
from .html import HTMLExporter
|
||||
from .slides import SlidesExporter
|
||||
from .templateexporter import TemplateExporter
|
||||
from .latex import LatexExporter
|
||||
from .markdown import MarkdownExporter
|
||||
from .asciidoc import ASCIIDocExporter
|
||||
from .notebook import NotebookExporter
|
||||
from .pdf import PDFExporter
|
||||
from .webpdf import WebPDFExporter
|
||||
from .python import PythonExporter
|
||||
from .rst import RSTExporter
|
||||
from .exporter import Exporter, FilenameExtension
|
||||
from .script import ScriptExporter
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
51
venv/Lib/site-packages/nbconvert/exporters/asciidoc.py
Normal file
51
venv/Lib/site-packages/nbconvert/exporters/asciidoc.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
"""ASCIIDoc Exporter class"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from traitlets import default
|
||||
from traitlets.config import Config
|
||||
|
||||
from .templateexporter import TemplateExporter
|
||||
|
||||
|
||||
class ASCIIDocExporter(TemplateExporter):
|
||||
"""
|
||||
Exports to an ASCIIDoc document (.asciidoc)
|
||||
"""
|
||||
|
||||
@default('file_extension')
|
||||
def _file_extension_default(self):
|
||||
return '.asciidoc'
|
||||
|
||||
@default('template_name')
|
||||
def _template_name_default(self):
|
||||
return 'asciidoc'
|
||||
|
||||
output_mimetype = 'text/asciidoc'
|
||||
export_from_notebook = "AsciiDoc"
|
||||
|
||||
@default('raw_mimetypes')
|
||||
def _raw_mimetypes_default(self):
|
||||
return ['text/asciidoc/', 'text/markdown', 'text/html', '']
|
||||
|
||||
@property
|
||||
def default_config(self):
|
||||
c = Config({
|
||||
'NbConvertBase': {
|
||||
'display_data_priority': ['text/html',
|
||||
'text/markdown',
|
||||
'image/svg+xml',
|
||||
'image/png',
|
||||
'image/jpeg',
|
||||
'text/plain',
|
||||
'text/latex'
|
||||
]
|
||||
},
|
||||
'ExtractOutputPreprocessor': {'enabled': True},
|
||||
'HighlightMagicsPreprocessor': {
|
||||
'enabled':True
|
||||
},
|
||||
})
|
||||
c.merge(super().default_config)
|
||||
return c
|
146
venv/Lib/site-packages/nbconvert/exporters/base.py
Normal file
146
venv/Lib/site-packages/nbconvert/exporters/base.py
Normal file
|
@ -0,0 +1,146 @@
|
|||
"""Module containing single call export functions."""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import warnings
|
||||
|
||||
import entrypoints
|
||||
|
||||
from traitlets.config import get_config
|
||||
from traitlets.log import get_logger
|
||||
from traitlets.utils.importstring import import_item
|
||||
|
||||
from nbformat import NotebookNode
|
||||
|
||||
from .exporter import Exporter
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Functions
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
__all__ = [
|
||||
'export',
|
||||
'Exporter',
|
||||
'get_exporter',
|
||||
'get_export_names',
|
||||
'ExporterNameError',
|
||||
]
|
||||
|
||||
|
||||
class ExporterNameError(NameError):
|
||||
pass
|
||||
|
||||
class ExporterDisabledError(ValueError):
|
||||
pass
|
||||
|
||||
def export(exporter, nb, **kw):
|
||||
"""
|
||||
Export a notebook object using specific exporter class.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
exporter : `Exporter` class or instance
|
||||
Class or instance of the exporter that should be used. If the
|
||||
method initializes its own instance of the class, it is ASSUMED that
|
||||
the class type provided exposes a constructor (``__init__``) with the same
|
||||
signature as the base Exporter class.
|
||||
nb : `nbformat.NotebookNode`
|
||||
The notebook to export.
|
||||
config : config (optional, keyword arg)
|
||||
User configuration instance.
|
||||
resources : dict (optional, keyword arg)
|
||||
Resources used in the conversion process.
|
||||
|
||||
Returns
|
||||
-------
|
||||
tuple
|
||||
output : str
|
||||
The resulting converted notebook.
|
||||
resources : dictionary
|
||||
Dictionary of resources used prior to and during the conversion
|
||||
process.
|
||||
"""
|
||||
|
||||
#Check arguments
|
||||
if exporter is None:
|
||||
raise TypeError("Exporter is None")
|
||||
elif not isinstance(exporter, Exporter) and not issubclass(exporter, Exporter):
|
||||
raise TypeError("exporter does not inherit from Exporter (base)")
|
||||
if nb is None:
|
||||
raise TypeError("nb is None")
|
||||
|
||||
#Create the exporter
|
||||
resources = kw.pop('resources', None)
|
||||
if isinstance(exporter, Exporter):
|
||||
exporter_instance = exporter
|
||||
else:
|
||||
exporter_instance = exporter(**kw)
|
||||
|
||||
#Try to convert the notebook using the appropriate conversion function.
|
||||
if isinstance(nb, NotebookNode):
|
||||
output, resources = exporter_instance.from_notebook_node(nb, resources)
|
||||
elif isinstance(nb, (str,)):
|
||||
output, resources = exporter_instance.from_filename(nb, resources)
|
||||
else:
|
||||
output, resources = exporter_instance.from_file(nb, resources)
|
||||
return output, resources
|
||||
|
||||
|
||||
def get_exporter(name, config=get_config()):
|
||||
"""Given an exporter name or import path, return a class ready to be instantiated
|
||||
|
||||
Raises ExporterName if exporter is not found or ExporterDisabledError if not enabled
|
||||
"""
|
||||
|
||||
if name == 'ipynb':
|
||||
name = 'notebook'
|
||||
|
||||
try:
|
||||
exporter = entrypoints.get_single('nbconvert.exporters', name).load()
|
||||
if getattr(exporter(config=config), 'enabled', True):
|
||||
return exporter
|
||||
else:
|
||||
raise ExporterDisabledError('Exporter "%s" disabled in configuration' % (name))
|
||||
except entrypoints.NoSuchEntryPoint:
|
||||
try:
|
||||
exporter = entrypoints.get_single('nbconvert.exporters', name.lower()).load()
|
||||
if getattr(exporter(config=config), 'enabled', True):
|
||||
return exporter
|
||||
else:
|
||||
raise ExporterDisabledError('Exporter "%s" disabled in configuration' % (name))
|
||||
except entrypoints.NoSuchEntryPoint:
|
||||
pass
|
||||
|
||||
if '.' in name:
|
||||
try:
|
||||
exporter = import_item(name)
|
||||
if getattr(exporter(config=config), 'enabled', True):
|
||||
return exporter
|
||||
else:
|
||||
raise ExporterDisabledError('Exporter "%s" disabled in configuration' % (name))
|
||||
except ImportError:
|
||||
log = get_logger()
|
||||
log.error("Error importing %s" % name, exc_info=True)
|
||||
|
||||
raise ExporterNameError('Unknown exporter "%s", did you mean one of: %s?'
|
||||
% (name, ', '.join(get_export_names())))
|
||||
|
||||
|
||||
def get_export_names(config=get_config()):
|
||||
"""Return a list of the currently supported export targets
|
||||
|
||||
Exporters can be found in external packages by registering
|
||||
them as an nbconvert.exporter entrypoint.
|
||||
"""
|
||||
exporters = sorted(entrypoints.get_group_named('nbconvert.exporters'))
|
||||
enabled_exporters = []
|
||||
for exporter_name in exporters:
|
||||
try:
|
||||
e = get_exporter(exporter_name)(config=config)
|
||||
if e.enabled:
|
||||
enabled_exporters.append(exporter_name)
|
||||
except (ExporterDisabledError, ValueError):
|
||||
pass
|
||||
return enabled_exporters
|
326
venv/Lib/site-packages/nbconvert/exporters/exporter.py
Normal file
326
venv/Lib/site-packages/nbconvert/exporters/exporter.py
Normal file
|
@ -0,0 +1,326 @@
|
|||
"""This module defines a base Exporter class. For Jinja template-based export,
|
||||
see templateexporter.py.
|
||||
"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
import io
|
||||
import os
|
||||
import copy
|
||||
import collections
|
||||
import datetime
|
||||
import sys
|
||||
|
||||
import nbformat
|
||||
|
||||
from traitlets.config.configurable import LoggingConfigurable
|
||||
from traitlets.config import Config
|
||||
from traitlets import Bool, HasTraits, Unicode, List, TraitError
|
||||
from traitlets.utils.importstring import import_item
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class ResourcesDict(collections.defaultdict):
|
||||
def __missing__(self, key):
|
||||
return ''
|
||||
|
||||
|
||||
class FilenameExtension(Unicode):
|
||||
"""A trait for filename extensions."""
|
||||
|
||||
default_value = u''
|
||||
info_text = 'a filename extension, beginning with a dot'
|
||||
|
||||
def validate(self, obj, value):
|
||||
# cast to proper unicode
|
||||
value = super().validate(obj, value)
|
||||
|
||||
# check that it starts with a dot
|
||||
if value and not value.startswith('.'):
|
||||
msg = "FileExtension trait '{}' does not begin with a dot: {!r}"
|
||||
raise TraitError(msg.format(self.name, value))
|
||||
|
||||
return value
|
||||
|
||||
|
||||
class Exporter(LoggingConfigurable):
|
||||
"""
|
||||
Class containing methods that sequentially run a list of preprocessors on a
|
||||
NotebookNode object and then return the modified NotebookNode object and
|
||||
accompanying resources dict.
|
||||
"""
|
||||
|
||||
enabled = Bool(True,
|
||||
help = "Disable this exporter (and any exporters inherited from it)."
|
||||
).tag(config=True)
|
||||
|
||||
file_extension = FilenameExtension(
|
||||
help="Extension of the file that should be written to disk"
|
||||
).tag(config=True)
|
||||
|
||||
# MIME type of the result file, for HTTP response headers.
|
||||
# This is *not* a traitlet, because we want to be able to access it from
|
||||
# the class, not just on instances.
|
||||
output_mimetype = ''
|
||||
|
||||
# Should this converter be accessible from the notebook front-end?
|
||||
# If so, should be a friendly name to display (and possibly translated).
|
||||
export_from_notebook = None
|
||||
|
||||
#Configurability, allows the user to easily add filters and preprocessors.
|
||||
preprocessors = List(
|
||||
help="""List of preprocessors, by name or namespace, to enable."""
|
||||
).tag(config=True)
|
||||
|
||||
_preprocessors = List()
|
||||
|
||||
default_preprocessors = List([
|
||||
'nbconvert.preprocessors.TagRemovePreprocessor',
|
||||
'nbconvert.preprocessors.RegexRemovePreprocessor',
|
||||
'nbconvert.preprocessors.ClearOutputPreprocessor',
|
||||
'nbconvert.preprocessors.ExecutePreprocessor',
|
||||
'nbconvert.preprocessors.coalesce_streams',
|
||||
'nbconvert.preprocessors.SVG2PDFPreprocessor',
|
||||
'nbconvert.preprocessors.LatexPreprocessor',
|
||||
'nbconvert.preprocessors.HighlightMagicsPreprocessor',
|
||||
'nbconvert.preprocessors.ExtractOutputPreprocessor',
|
||||
'nbconvert.preprocessors.ClearMetadataPreprocessor',
|
||||
],
|
||||
help="""List of preprocessors available by default, by name, namespace,
|
||||
instance, or type."""
|
||||
).tag(config=True)
|
||||
|
||||
def __init__(self, config=None, **kw):
|
||||
"""
|
||||
Public constructor
|
||||
|
||||
Parameters
|
||||
----------
|
||||
config : ``traitlets.config.Config``
|
||||
User configuration instance.
|
||||
`**kw`
|
||||
Additional keyword arguments passed to parent __init__
|
||||
|
||||
"""
|
||||
with_default_config = self.default_config
|
||||
if config:
|
||||
with_default_config.merge(config)
|
||||
|
||||
super().__init__(config=with_default_config, **kw)
|
||||
|
||||
self._init_preprocessors()
|
||||
|
||||
|
||||
@property
|
||||
def default_config(self):
|
||||
return Config()
|
||||
|
||||
def from_notebook_node(self, nb, resources=None, **kw):
|
||||
"""
|
||||
Convert a notebook from a notebook node instance.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
nb : `nbformat.NotebookNode`
|
||||
Notebook node (dict-like with attr-access)
|
||||
resources : dict
|
||||
Additional resources that can be accessed read/write by
|
||||
preprocessors and filters.
|
||||
`**kw`
|
||||
Ignored
|
||||
|
||||
"""
|
||||
nb_copy = copy.deepcopy(nb)
|
||||
resources = self._init_resources(resources)
|
||||
|
||||
if 'language' in nb['metadata']:
|
||||
resources['language'] = nb['metadata']['language'].lower()
|
||||
|
||||
# Preprocess
|
||||
nb_copy, resources = self._preprocess(nb_copy, resources)
|
||||
|
||||
return nb_copy, resources
|
||||
|
||||
def from_filename(self, filename: str, resources: Optional[dict] = None, **kw):
|
||||
"""
|
||||
Convert a notebook from a notebook file.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
filename : str
|
||||
Full filename of the notebook file to open and convert.
|
||||
resources : dict
|
||||
Additional resources that can be accessed read/write by
|
||||
preprocessors and filters.
|
||||
`**kw`
|
||||
Ignored
|
||||
|
||||
"""
|
||||
# Pull the metadata from the filesystem.
|
||||
if resources is None:
|
||||
resources = ResourcesDict()
|
||||
if not 'metadata' in resources or resources['metadata'] == '':
|
||||
resources['metadata'] = ResourcesDict()
|
||||
path, basename = os.path.split(filename)
|
||||
notebook_name = os.path.splitext(basename)[0]
|
||||
resources['metadata']['name'] = notebook_name
|
||||
resources['metadata']['path'] = path
|
||||
|
||||
modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(filename))
|
||||
# datetime.strftime date format for ipython
|
||||
if sys.platform == 'win32':
|
||||
date_format = "%B %d, %Y"
|
||||
else:
|
||||
date_format = "%B %-d, %Y"
|
||||
resources['metadata']['modified_date'] = modified_date.strftime(date_format)
|
||||
|
||||
with io.open(filename, encoding='utf-8') as f:
|
||||
return self.from_file(f, resources=resources, **kw)
|
||||
|
||||
|
||||
def from_file(self, file_stream, resources=None, **kw):
|
||||
"""
|
||||
Convert a notebook from a notebook file.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
file_stream : file-like object
|
||||
Notebook file-like object to convert.
|
||||
resources : dict
|
||||
Additional resources that can be accessed read/write by
|
||||
preprocessors and filters.
|
||||
`**kw`
|
||||
Ignored
|
||||
|
||||
"""
|
||||
return self.from_notebook_node(nbformat.read(file_stream, as_version=4), resources=resources, **kw)
|
||||
|
||||
|
||||
def register_preprocessor(self, preprocessor, enabled=False):
|
||||
"""
|
||||
Register a preprocessor.
|
||||
Preprocessors are classes that act upon the notebook before it is
|
||||
passed into the Jinja templating engine. preprocessors are also
|
||||
capable of passing additional information to the Jinja
|
||||
templating engine.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
preprocessor : `Preprocessor`
|
||||
A dotted module name, a type, or an instance
|
||||
enabled : bool
|
||||
Mark the preprocessor as enabled
|
||||
|
||||
"""
|
||||
if preprocessor is None:
|
||||
raise TypeError('preprocessor must not be None')
|
||||
isclass = isinstance(preprocessor, type)
|
||||
constructed = not isclass
|
||||
|
||||
# Handle preprocessor's registration based on it's type
|
||||
if constructed and isinstance(preprocessor, str,):
|
||||
# Preprocessor is a string, import the namespace and recursively call
|
||||
# this register_preprocessor method
|
||||
preprocessor_cls = import_item(preprocessor)
|
||||
return self.register_preprocessor(preprocessor_cls, enabled)
|
||||
|
||||
if constructed and hasattr(preprocessor, '__call__'):
|
||||
# Preprocessor is a function, no need to construct it.
|
||||
# Register and return the preprocessor.
|
||||
if enabled:
|
||||
preprocessor.enabled = True
|
||||
self._preprocessors.append(preprocessor)
|
||||
return preprocessor
|
||||
|
||||
elif isclass and issubclass(preprocessor, HasTraits):
|
||||
# Preprocessor is configurable. Make sure to pass in new default for
|
||||
# the enabled flag if one was specified.
|
||||
self.register_preprocessor(preprocessor(parent=self), enabled)
|
||||
|
||||
elif isclass:
|
||||
# Preprocessor is not configurable, construct it
|
||||
self.register_preprocessor(preprocessor(), enabled)
|
||||
|
||||
else:
|
||||
# Preprocessor is an instance of something without a __call__
|
||||
# attribute.
|
||||
raise TypeError('preprocessor must be callable or an importable constructor, got %r' % preprocessor)
|
||||
|
||||
|
||||
def _init_preprocessors(self):
|
||||
"""
|
||||
Register all of the preprocessors needed for this exporter, disabled
|
||||
unless specified explicitly.
|
||||
"""
|
||||
self._preprocessors = []
|
||||
|
||||
# Load default preprocessors (not necessarily enabled by default).
|
||||
for preprocessor in self.default_preprocessors:
|
||||
self.register_preprocessor(preprocessor)
|
||||
|
||||
# Load user-specified preprocessors. Enable by default.
|
||||
for preprocessor in self.preprocessors:
|
||||
self.register_preprocessor(preprocessor, enabled=True)
|
||||
|
||||
|
||||
def _init_resources(self, resources):
|
||||
|
||||
#Make sure the resources dict is of ResourcesDict type.
|
||||
if resources is None:
|
||||
resources = ResourcesDict()
|
||||
if not isinstance(resources, ResourcesDict):
|
||||
new_resources = ResourcesDict()
|
||||
new_resources.update(resources)
|
||||
resources = new_resources
|
||||
|
||||
#Make sure the metadata extension exists in resources
|
||||
if 'metadata' in resources:
|
||||
if not isinstance(resources['metadata'], ResourcesDict):
|
||||
new_metadata = ResourcesDict()
|
||||
new_metadata.update(resources['metadata'])
|
||||
resources['metadata'] = new_metadata
|
||||
else:
|
||||
resources['metadata'] = ResourcesDict()
|
||||
if not resources['metadata']['name']:
|
||||
resources['metadata']['name'] = 'Notebook'
|
||||
|
||||
#Set the output extension
|
||||
resources['output_extension'] = self.file_extension
|
||||
return resources
|
||||
|
||||
|
||||
def _preprocess(self, nb, resources):
|
||||
"""
|
||||
Preprocess the notebook before passing it into the Jinja engine.
|
||||
To preprocess the notebook is to successively apply all the
|
||||
enabled preprocessors. Output from each preprocessor is passed
|
||||
along to the next one.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
nb : notebook node
|
||||
notebook that is being exported.
|
||||
resources : a dict of additional resources that
|
||||
can be accessed read/write by preprocessors
|
||||
"""
|
||||
|
||||
# Do a copy.deepcopy first,
|
||||
# we are never safe enough with what the preprocessors could do.
|
||||
nbc = copy.deepcopy(nb)
|
||||
resc = copy.deepcopy(resources)
|
||||
|
||||
# Run each preprocessor on the notebook. Carry the output along
|
||||
# to each preprocessor
|
||||
for preprocessor in self._preprocessors:
|
||||
nbc, resc = preprocessor(nbc, resc)
|
||||
try:
|
||||
nbformat.validate(nbc, relax_add_props=True)
|
||||
except nbformat.ValidationError:
|
||||
self.log.error('Notebook is invalid after preprocessor %s',
|
||||
preprocessor)
|
||||
raise
|
||||
|
||||
return nbc, resc
|
164
venv/Lib/site-packages/nbconvert/exporters/html.py
Normal file
164
venv/Lib/site-packages/nbconvert/exporters/html.py
Normal file
|
@ -0,0 +1,164 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""HTML Exporter class"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import os
|
||||
import mimetypes
|
||||
import base64
|
||||
|
||||
from traitlets import default, Unicode, Bool
|
||||
from traitlets.config import Config
|
||||
from jupyter_core.paths import jupyter_path
|
||||
from jinja2 import contextfilter
|
||||
from jinja2.loaders import split_template_path
|
||||
import jinja2
|
||||
|
||||
from nbconvert.filters.highlight import Highlight2HTML
|
||||
from nbconvert.filters.markdown_mistune import IPythonRenderer, MarkdownWithMath
|
||||
|
||||
from .templateexporter import TemplateExporter
|
||||
|
||||
|
||||
class HTMLExporter(TemplateExporter):
|
||||
"""
|
||||
Exports a basic HTML document. This exporter assists with the export of
|
||||
HTML. Inherit from it if you are writing your own HTML template and need
|
||||
custom preprocessors/filters. If you don't need custom preprocessors/
|
||||
filters, just change the 'template_file' config option.
|
||||
"""
|
||||
export_from_notebook = "HTML"
|
||||
|
||||
anchor_link_text = Unicode(u'¶',
|
||||
help="The text used as the text for anchor links.").tag(config=True)
|
||||
|
||||
exclude_anchor_links = Bool(False,
|
||||
help="If anchor links should be included or not.").tag(config=True)
|
||||
|
||||
require_js_url = Unicode(
|
||||
"https://cdnjs.cloudflare.com/ajax/libs/require.js/2.1.10/require.min.js",
|
||||
help="""
|
||||
URL to load require.js from.
|
||||
|
||||
Defaults to loading from cdnjs.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
jquery_url = Unicode(
|
||||
"https://cdnjs.cloudflare.com/ajax/libs/jquery/2.0.3/jquery.min.js",
|
||||
help="""
|
||||
URL to load jQuery from.
|
||||
|
||||
Defaults to loading from cdnjs.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
@default('file_extension')
|
||||
def _file_extension_default(self):
|
||||
return '.html'
|
||||
|
||||
@default('template_name')
|
||||
def _template_name_default(self):
|
||||
return 'lab'
|
||||
|
||||
@default('template_data_paths')
|
||||
def _template_data_paths_default(self):
|
||||
return jupyter_path("nbconvert", "templates", "html")
|
||||
|
||||
|
||||
theme = Unicode('light',
|
||||
help='Template specific theme(e.g. the JupyterLab CSS theme for the lab template)'
|
||||
).tag(config=True)
|
||||
|
||||
output_mimetype = 'text/html'
|
||||
|
||||
@property
|
||||
def default_config(self):
|
||||
c = Config({
|
||||
'NbConvertBase': {
|
||||
'display_data_priority' : ['application/vnd.jupyter.widget-state+json',
|
||||
'application/vnd.jupyter.widget-view+json',
|
||||
'application/javascript',
|
||||
'text/html',
|
||||
'text/markdown',
|
||||
'image/svg+xml',
|
||||
'text/latex',
|
||||
'image/png',
|
||||
'image/jpeg',
|
||||
'text/plain'
|
||||
]
|
||||
},
|
||||
'HighlightMagicsPreprocessor': {
|
||||
'enabled':True
|
||||
}
|
||||
})
|
||||
c.merge(super().default_config)
|
||||
return c
|
||||
|
||||
@contextfilter
|
||||
def markdown2html(self, context, source):
|
||||
"""Markdown to HTML filter respecting the anchor_link_text setting"""
|
||||
cell = context.get('cell', {})
|
||||
attachments = cell.get('attachments', {})
|
||||
renderer = IPythonRenderer(escape=False, attachments=attachments,
|
||||
anchor_link_text=self.anchor_link_text,
|
||||
exclude_anchor_links=self.exclude_anchor_links)
|
||||
return MarkdownWithMath(renderer=renderer).render(source)
|
||||
|
||||
def default_filters(self):
|
||||
for pair in super().default_filters():
|
||||
yield pair
|
||||
yield ('markdown2html', self.markdown2html)
|
||||
|
||||
def from_notebook_node(self, nb, resources=None, **kw):
|
||||
langinfo = nb.metadata.get('language_info', {})
|
||||
lexer = langinfo.get('pygments_lexer', langinfo.get('name', None))
|
||||
highlight_code = self.filters.get('highlight_code', Highlight2HTML(pygments_lexer=lexer, parent=self))
|
||||
self.register_filter('highlight_code', highlight_code)
|
||||
return super().from_notebook_node(nb, resources, **kw)
|
||||
|
||||
def _init_resources(self, resources):
|
||||
def resources_include_css(name):
|
||||
env = self.environment
|
||||
code = """<style type="text/css">\n%s</style>""" % (env.loader.get_source(env, name)[0])
|
||||
return jinja2.Markup(code)
|
||||
|
||||
def resources_include_js(name):
|
||||
env = self.environment
|
||||
code = """<script>\n%s</script>""" % (env.loader.get_source(env, name)[0])
|
||||
return jinja2.Markup(code)
|
||||
|
||||
def resources_include_url(name):
|
||||
env = self.environment
|
||||
mime_type, encoding = mimetypes.guess_type(name)
|
||||
try:
|
||||
# we try to load via the jinja loader, but that tries to load
|
||||
# as (encoded) text
|
||||
data = env.loader.get_source(env, name)[0].encode('utf8')
|
||||
except UnicodeDecodeError:
|
||||
# if that fails (for instance a binary file, png or ttf)
|
||||
# we mimic jinja2
|
||||
pieces = split_template_path(name)
|
||||
searchpaths = self.get_template_paths()
|
||||
for searchpath in searchpaths:
|
||||
filename = os.path.join(searchpath, *pieces)
|
||||
print(filename, os.path.exists(filename))
|
||||
if os.path.exists(filename):
|
||||
with open(filename, "rb") as f:
|
||||
data = f.read()
|
||||
break
|
||||
else:
|
||||
raise ValueError("No file %r found in %r" % (name, searchpaths))
|
||||
data = base64.b64encode(data)
|
||||
data = data.replace(b'\n', b'').decode('ascii')
|
||||
src = 'data:{mime_type};base64,{data}'.format(mime_type=mime_type, data=data)
|
||||
return jinja2.Markup(src)
|
||||
resources = super()._init_resources(resources)
|
||||
resources['theme'] = self.theme
|
||||
resources['include_css'] = resources_include_css
|
||||
resources['include_js'] = resources_include_js
|
||||
resources['include_url'] = resources_include_url
|
||||
resources['require_js_url'] = self.require_js_url
|
||||
resources['jquery_url'] = self.jquery_url
|
||||
return resources
|
90
venv/Lib/site-packages/nbconvert/exporters/latex.py
Normal file
90
venv/Lib/site-packages/nbconvert/exporters/latex.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
"""LaTeX Exporter class"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import os
|
||||
|
||||
from traitlets import Unicode, default
|
||||
from traitlets.config import Config
|
||||
from jupyter_core.paths import jupyter_path
|
||||
|
||||
from nbconvert.filters.highlight import Highlight2Latex
|
||||
from nbconvert.filters.filter_links import resolve_references
|
||||
from .templateexporter import TemplateExporter
|
||||
|
||||
class LatexExporter(TemplateExporter):
|
||||
"""
|
||||
Exports to a Latex template. Inherit from this class if your template is
|
||||
LaTeX based and you need custom transformers/filters.
|
||||
If you don't need custom transformers/filters, just change the
|
||||
'template_file' config option. Place your template in the special "/latex"
|
||||
subfolder of the "../templates" folder.
|
||||
"""
|
||||
export_from_notebook = "LaTeX"
|
||||
|
||||
@default('file_extension')
|
||||
def _file_extension_default(self):
|
||||
return '.tex'
|
||||
|
||||
|
||||
@default('template_data_paths')
|
||||
def _template_data_paths_default(self):
|
||||
return jupyter_path("nbconvert", "templates", "latex")
|
||||
|
||||
@default('template_name')
|
||||
def _template_name_default(self):
|
||||
return 'latex'
|
||||
|
||||
output_mimetype = 'text/latex'
|
||||
|
||||
def default_filters(self):
|
||||
for x in super().default_filters():
|
||||
yield x
|
||||
yield ('resolve_references', resolve_references)
|
||||
|
||||
@property
|
||||
def default_config(self):
|
||||
c = Config({
|
||||
'NbConvertBase': {
|
||||
'display_data_priority' : ['text/latex', 'application/pdf', 'image/png', 'image/jpeg', 'image/svg+xml', 'text/markdown', 'text/plain']
|
||||
},
|
||||
'ExtractOutputPreprocessor': {
|
||||
'enabled':True
|
||||
},
|
||||
'SVG2PDFPreprocessor': {
|
||||
'enabled':True
|
||||
},
|
||||
'LatexPreprocessor': {
|
||||
'enabled':True
|
||||
},
|
||||
'SphinxPreprocessor': {
|
||||
'enabled':True
|
||||
},
|
||||
'HighlightMagicsPreprocessor': {
|
||||
'enabled':True
|
||||
}
|
||||
})
|
||||
c.merge(super().default_config)
|
||||
return c
|
||||
|
||||
def from_notebook_node(self, nb, resources=None, **kw):
|
||||
langinfo = nb.metadata.get('language_info', {})
|
||||
lexer = langinfo.get('pygments_lexer', langinfo.get('name', None))
|
||||
highlight_code = self.filters.get('highlight_code', Highlight2Latex(pygments_lexer=lexer, parent=self))
|
||||
self.register_filter('highlight_code', highlight_code)
|
||||
|
||||
return super().from_notebook_node(nb, resources, **kw)
|
||||
|
||||
def _create_environment(self):
|
||||
environment = super()._create_environment()
|
||||
|
||||
# Set special Jinja2 syntax that will not conflict with latex.
|
||||
environment.block_start_string = "((*"
|
||||
environment.block_end_string = "*))"
|
||||
environment.variable_start_string = "((("
|
||||
environment.variable_end_string = ")))"
|
||||
environment.comment_start_string = "((="
|
||||
environment.comment_end_string = "=))"
|
||||
|
||||
return environment
|
51
venv/Lib/site-packages/nbconvert/exporters/markdown.py
Normal file
51
venv/Lib/site-packages/nbconvert/exporters/markdown.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
"""Markdown Exporter class"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from traitlets import default
|
||||
from traitlets.config import Config
|
||||
|
||||
from .templateexporter import TemplateExporter
|
||||
|
||||
|
||||
class MarkdownExporter(TemplateExporter):
|
||||
"""
|
||||
Exports to a markdown document (.md)
|
||||
"""
|
||||
export_from_notebook = "Markdown"
|
||||
|
||||
@default('file_extension')
|
||||
def _file_extension_default(self):
|
||||
return '.md'
|
||||
|
||||
@default('template_name')
|
||||
def _template_name_default(self):
|
||||
return 'markdown'
|
||||
|
||||
output_mimetype = 'text/markdown'
|
||||
|
||||
@default('raw_mimetypes')
|
||||
def _raw_mimetypes_default(self):
|
||||
return ['text/markdown', 'text/html', '']
|
||||
|
||||
@property
|
||||
def default_config(self):
|
||||
c = Config({
|
||||
'ExtractOutputPreprocessor': {'enabled': True},
|
||||
'NbConvertBase': {
|
||||
'display_data_priority': ['text/html',
|
||||
'text/markdown',
|
||||
'image/svg+xml',
|
||||
'text/latex',
|
||||
'image/png',
|
||||
'image/jpeg',
|
||||
'text/plain'
|
||||
]
|
||||
},
|
||||
'HighlightMagicsPreprocessor': {
|
||||
'enabled':True
|
||||
},
|
||||
})
|
||||
c.merge(super().default_config)
|
||||
return c
|
40
venv/Lib/site-packages/nbconvert/exporters/notebook.py
Normal file
40
venv/Lib/site-packages/nbconvert/exporters/notebook.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
"""NotebookExporter class"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from .exporter import Exporter
|
||||
import nbformat
|
||||
from traitlets import Enum, default
|
||||
|
||||
class NotebookExporter(Exporter):
|
||||
"""Exports to an IPython notebook.
|
||||
|
||||
This is useful when you want to use nbconvert's preprocessors to operate on
|
||||
a notebook (e.g. to execute it) and then write it back to a notebook file.
|
||||
"""
|
||||
|
||||
nbformat_version = Enum(list(nbformat.versions),
|
||||
default_value=nbformat.current_nbformat,
|
||||
help="""The nbformat version to write.
|
||||
Use this to downgrade notebooks.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
@default('file_extension')
|
||||
def _file_extension_default(self):
|
||||
return '.ipynb'
|
||||
|
||||
output_mimetype = 'application/json'
|
||||
export_from_notebook = "Notebook"
|
||||
|
||||
def from_notebook_node(self, nb, resources=None, **kw):
|
||||
nb_copy, resources = super().from_notebook_node(nb, resources, **kw)
|
||||
if self.nbformat_version != nb_copy.nbformat:
|
||||
resources['output_suffix'] = '.v%i' % self.nbformat_version
|
||||
else:
|
||||
resources['output_suffix'] = '.nbconvert'
|
||||
output = nbformat.writes(nb_copy, version=self.nbformat_version)
|
||||
if not output.endswith("\n"):
|
||||
output = output + "\n"
|
||||
return output, resources
|
202
venv/Lib/site-packages/nbconvert/exporters/pdf.py
Normal file
202
venv/Lib/site-packages/nbconvert/exporters/pdf.py
Normal file
|
@ -0,0 +1,202 @@
|
|||
"""Export to PDF via latex"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import subprocess
|
||||
import os
|
||||
import sys
|
||||
|
||||
import shutil
|
||||
from traitlets import Integer, List, Bool, Instance, Unicode, default
|
||||
from testpath.tempdir import TemporaryWorkingDirectory
|
||||
from typing import Optional
|
||||
from .latex import LatexExporter
|
||||
|
||||
class LatexFailed(IOError):
|
||||
"""Exception for failed latex run
|
||||
|
||||
Captured latex output is in error.output.
|
||||
"""
|
||||
def __init__(self, output):
|
||||
self.output = output
|
||||
|
||||
def __unicode__(self):
|
||||
return u"PDF creating failed, captured latex output:\n%s" % self.output
|
||||
|
||||
def __str__(self):
|
||||
u = self.__unicode__()
|
||||
return u
|
||||
|
||||
def prepend_to_env_search_path(varname, value, envdict):
|
||||
"""Add value to the environment variable varname in envdict
|
||||
|
||||
e.g. prepend_to_env_search_path('BIBINPUTS', '/home/sally/foo', os.environ)
|
||||
"""
|
||||
if not value:
|
||||
return # Nothing to add
|
||||
|
||||
envdict[varname] = value + os.pathsep + envdict.get(varname, '')
|
||||
|
||||
class PDFExporter(LatexExporter):
|
||||
"""Writer designed to write to PDF files.
|
||||
|
||||
This inherits from `LatexExporter`. It creates a LaTeX file in
|
||||
a temporary directory using the template machinery, and then runs LaTeX
|
||||
to create a pdf.
|
||||
"""
|
||||
export_from_notebook="PDF via LaTeX"
|
||||
|
||||
latex_count = Integer(3,
|
||||
help="How many times latex will be called."
|
||||
).tag(config=True)
|
||||
|
||||
latex_command = List([u"xelatex", u"{filename}", "-quiet"],
|
||||
help="Shell command used to compile latex."
|
||||
).tag(config=True)
|
||||
|
||||
bib_command = List([u"bibtex", u"{filename}"],
|
||||
help="Shell command used to run bibtex."
|
||||
).tag(config=True)
|
||||
|
||||
verbose = Bool(False,
|
||||
help="Whether to display the output of latex commands."
|
||||
).tag(config=True)
|
||||
|
||||
texinputs = Unicode(help="texinputs dir. A notebook's directory is added")
|
||||
writer = Instance("nbconvert.writers.FilesWriter", args=(), kw={'build_directory': '.'})
|
||||
|
||||
output_mimetype = "application/pdf"
|
||||
|
||||
_captured_output = List()
|
||||
|
||||
@default('file_extension')
|
||||
def _file_extension_default(self):
|
||||
return '.pdf'
|
||||
|
||||
|
||||
@default('template_extension')
|
||||
def _template_extension_default(self):
|
||||
return '.tex.j2'
|
||||
|
||||
def run_command(self, command_list, filename, count, log_function, raise_on_failure=None):
|
||||
"""Run command_list count times.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
command_list : list
|
||||
A list of args to provide to Popen. Each element of this
|
||||
list will be interpolated with the filename to convert.
|
||||
filename : unicode
|
||||
The name of the file to convert.
|
||||
count : int
|
||||
How many times to run the command.
|
||||
raise_on_failure: Exception class (default None)
|
||||
If provided, will raise the given exception for if an instead of
|
||||
returning False on command failure.
|
||||
|
||||
Returns
|
||||
-------
|
||||
success : bool
|
||||
A boolean indicating if the command was successful (True)
|
||||
or failed (False).
|
||||
"""
|
||||
command = [c.format(filename=filename) for c in command_list]
|
||||
|
||||
# This will throw a clearer error if the command is not found
|
||||
cmd = shutil.which(command_list[0])
|
||||
if cmd is None:
|
||||
link = "https://nbconvert.readthedocs.io/en/latest/install.html#installing-tex"
|
||||
raise OSError("{formatter} not found on PATH, if you have not installed "
|
||||
"{formatter} you may need to do so. Find further instructions "
|
||||
"at {link}.".format(formatter=command_list[0], link=link))
|
||||
|
||||
times = 'time' if count == 1 else 'times'
|
||||
self.log.info("Running %s %i %s: %s", command_list[0], count, times, command)
|
||||
|
||||
shell = (sys.platform == 'win32')
|
||||
if shell:
|
||||
command = subprocess.list2cmdline(command)
|
||||
env = os.environ.copy()
|
||||
prepend_to_env_search_path('TEXINPUTS', self.texinputs, env)
|
||||
prepend_to_env_search_path('BIBINPUTS', self.texinputs, env)
|
||||
prepend_to_env_search_path('BSTINPUTS', self.texinputs, env)
|
||||
|
||||
with open(os.devnull, 'rb') as null:
|
||||
stdout = subprocess.PIPE if not self.verbose else None
|
||||
for index in range(count):
|
||||
p = subprocess.Popen(command, stdout=stdout, stderr=subprocess.STDOUT,
|
||||
stdin=null, shell=shell, env=env)
|
||||
out, _ = p.communicate()
|
||||
if p.returncode:
|
||||
if self.verbose:
|
||||
# verbose means I didn't capture stdout with PIPE,
|
||||
# so it's already been displayed and `out` is None.
|
||||
out = u''
|
||||
else:
|
||||
out = out.decode('utf-8', 'replace')
|
||||
log_function(command, out)
|
||||
self._captured_output.append(out)
|
||||
if raise_on_failure:
|
||||
raise raise_on_failure(
|
||||
'Failed to run "{command}" command:\n{output}'.format(
|
||||
command=command, output=out))
|
||||
return False # failure
|
||||
return True # success
|
||||
|
||||
def run_latex(self, filename, raise_on_failure=LatexFailed):
|
||||
"""Run xelatex self.latex_count times."""
|
||||
|
||||
def log_error(command, out):
|
||||
self.log.critical(u"%s failed: %s\n%s", command[0], command, out)
|
||||
|
||||
return self.run_command(self.latex_command, filename,
|
||||
self.latex_count, log_error, raise_on_failure)
|
||||
|
||||
def run_bib(self, filename, raise_on_failure=False):
|
||||
"""Run bibtex one time."""
|
||||
filename = os.path.splitext(filename)[0]
|
||||
|
||||
def log_error(command, out):
|
||||
self.log.warning('%s had problems, most likely because there were no citations',
|
||||
command[0])
|
||||
self.log.debug(u"%s output: %s\n%s", command[0], command, out)
|
||||
|
||||
return self.run_command(self.bib_command, filename, 1, log_error, raise_on_failure)
|
||||
|
||||
def from_notebook_node(self, nb, resources=None, **kw):
|
||||
latex, resources = super().from_notebook_node(
|
||||
nb, resources=resources, **kw
|
||||
)
|
||||
# set texinputs directory, so that local files will be found
|
||||
if resources and resources.get('metadata', {}).get('path'):
|
||||
self.texinputs = resources['metadata']['path']
|
||||
else:
|
||||
self.texinputs = os.getcwd()
|
||||
|
||||
self._captured_outputs = []
|
||||
with TemporaryWorkingDirectory():
|
||||
notebook_name = 'notebook'
|
||||
resources['output_extension'] = '.tex'
|
||||
tex_file = self.writer.write(latex, resources, notebook_name=notebook_name)
|
||||
self.log.info("Building PDF")
|
||||
self.run_latex(tex_file)
|
||||
if self.run_bib(tex_file):
|
||||
self.run_latex(tex_file)
|
||||
|
||||
pdf_file = notebook_name + '.pdf'
|
||||
if not os.path.isfile(pdf_file):
|
||||
raise LatexFailed('\n'.join(self._captured_output))
|
||||
self.log.info('PDF successfully created')
|
||||
with open(pdf_file, 'rb') as f:
|
||||
pdf_data = f.read()
|
||||
|
||||
# convert output extension to pdf
|
||||
# the writer above required it to be tex
|
||||
resources['output_extension'] = '.pdf'
|
||||
# clear figure outputs, extracted by latex export,
|
||||
# so we don't claim to be a multi-file export.
|
||||
resources.pop('outputs', None)
|
||||
|
||||
return pdf_data, resources
|
||||
|
25
venv/Lib/site-packages/nbconvert/exporters/python.py
Normal file
25
venv/Lib/site-packages/nbconvert/exporters/python.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
"""Python script Exporter class"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from traitlets import default
|
||||
|
||||
from .templateexporter import TemplateExporter
|
||||
|
||||
|
||||
class PythonExporter(TemplateExporter):
|
||||
"""
|
||||
Exports a Python code file.
|
||||
Note that the file produced will have a shebang of '#!/usr/bin/env python'
|
||||
regardless of the actual python version used in the notebook.
|
||||
"""
|
||||
@default('file_extension')
|
||||
def _file_extension_default(self):
|
||||
return '.py'
|
||||
|
||||
@default('template_name')
|
||||
def _template_name_default(self):
|
||||
return 'python'
|
||||
|
||||
output_mimetype = 'text/x-python'
|
39
venv/Lib/site-packages/nbconvert/exporters/rst.py
Normal file
39
venv/Lib/site-packages/nbconvert/exporters/rst.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
"""reStructuredText Exporter class"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from traitlets import default
|
||||
from traitlets.config import Config
|
||||
|
||||
from .templateexporter import TemplateExporter
|
||||
|
||||
|
||||
class RSTExporter(TemplateExporter):
|
||||
"""
|
||||
Exports reStructuredText documents.
|
||||
"""
|
||||
|
||||
@default('file_extension')
|
||||
def _file_extension_default(self):
|
||||
return '.rst'
|
||||
|
||||
@default('template_name')
|
||||
def _template_name_default(self):
|
||||
return 'rst'
|
||||
|
||||
output_mimetype = 'text/restructuredtext'
|
||||
export_from_notebook = "reST"
|
||||
|
||||
@property
|
||||
def default_config(self):
|
||||
c = Config({
|
||||
'ExtractOutputPreprocessor':{
|
||||
'enabled':True
|
||||
},
|
||||
'HighlightMagicsPreprocessor': {
|
||||
'enabled':True
|
||||
},
|
||||
})
|
||||
c.merge(super().default_config)
|
||||
return c
|
69
venv/Lib/site-packages/nbconvert/exporters/script.py
Normal file
69
venv/Lib/site-packages/nbconvert/exporters/script.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
"""Generic script exporter class for any kernel language"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import entrypoints
|
||||
from .templateexporter import TemplateExporter
|
||||
|
||||
from traitlets import Dict, default
|
||||
from .base import get_exporter
|
||||
|
||||
|
||||
class ScriptExporter(TemplateExporter):
|
||||
# Caches of already looked-up and instantiated exporters for delegation:
|
||||
_exporters = Dict()
|
||||
_lang_exporters = Dict()
|
||||
export_from_notebook = "Script"
|
||||
|
||||
@default('template_file')
|
||||
def _template_file_default(self):
|
||||
return 'script.j2'
|
||||
|
||||
@default('template_name')
|
||||
def _template_name_default(self):
|
||||
return 'script'
|
||||
|
||||
def _get_language_exporter(self, lang_name):
|
||||
"""Find an exporter for the language name from notebook metadata.
|
||||
|
||||
Uses the nbconvert.exporters.script group of entry points.
|
||||
Returns None if no exporter is found.
|
||||
"""
|
||||
if lang_name not in self._lang_exporters:
|
||||
try:
|
||||
Exporter = entrypoints.get_single(
|
||||
'nbconvert.exporters.script', lang_name).load()
|
||||
except entrypoints.NoSuchEntryPoint:
|
||||
self._lang_exporters[lang_name] = None
|
||||
else:
|
||||
# TODO: passing config is wrong, but changing this revealed more complicated issues
|
||||
self._lang_exporters[lang_name] = Exporter(config=self.config, parent=self)
|
||||
return self._lang_exporters[lang_name]
|
||||
|
||||
def from_notebook_node(self, nb, resources=None, **kw):
|
||||
langinfo = nb.metadata.get('language_info', {})
|
||||
|
||||
# delegate to custom exporter, if specified
|
||||
exporter_name = langinfo.get('nbconvert_exporter')
|
||||
if exporter_name and exporter_name != 'script':
|
||||
self.log.debug("Loading script exporter: %s", exporter_name)
|
||||
if exporter_name not in self._exporters:
|
||||
Exporter = get_exporter(exporter_name)
|
||||
# TODO: passing config is wrong, but changing this revealed more complicated issues
|
||||
self._exporters[exporter_name] = Exporter(config=self.config, parent=self)
|
||||
exporter = self._exporters[exporter_name]
|
||||
return exporter.from_notebook_node(nb, resources, **kw)
|
||||
|
||||
# Look up a script exporter for this notebook's language
|
||||
lang_name = langinfo.get('name')
|
||||
if lang_name:
|
||||
self.log.debug("Using script exporter for language: %s", lang_name)
|
||||
exporter = self._get_language_exporter(lang_name)
|
||||
if exporter is not None:
|
||||
return exporter.from_notebook_node(nb, resources, **kw)
|
||||
|
||||
# Fall back to plain script export
|
||||
self.file_extension = langinfo.get('file_extension', '.txt')
|
||||
self.output_mimetype = langinfo.get('mimetype', 'text/plain')
|
||||
return super().from_notebook_node(nb, resources, **kw)
|
175
venv/Lib/site-packages/nbconvert/exporters/slides.py
Normal file
175
venv/Lib/site-packages/nbconvert/exporters/slides.py
Normal file
|
@ -0,0 +1,175 @@
|
|||
"""HTML slide show Exporter class"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from copy import deepcopy
|
||||
from warnings import warn
|
||||
|
||||
from traitlets import Bool, Unicode, default
|
||||
|
||||
from .html import HTMLExporter
|
||||
from ..preprocessors.base import Preprocessor
|
||||
|
||||
|
||||
class _RevealMetadataPreprocessor(Preprocessor):
|
||||
# A custom preprocessor adding convenience metadata to cells
|
||||
|
||||
def preprocess(self, nb, resources=None):
|
||||
nb = deepcopy(nb)
|
||||
|
||||
for cell in nb.cells:
|
||||
# Make sure every cell has a slide_type
|
||||
try:
|
||||
slide_type = cell.metadata.get(
|
||||
'slideshow', {}).get('slide_type', '-')
|
||||
except AttributeError:
|
||||
slide_type = '-'
|
||||
cell.metadata.slide_type = slide_type
|
||||
|
||||
# Find the first visible cell
|
||||
for index, cell in enumerate(nb.cells):
|
||||
if cell.metadata.slide_type not in {'notes', 'skip'}:
|
||||
cell.metadata.slide_type = 'slide'
|
||||
cell.metadata.slide_start = True
|
||||
cell.metadata.subslide_start = True
|
||||
first_slide_ix = index
|
||||
break
|
||||
else:
|
||||
raise ValueError("All cells are hidden, cannot create slideshow")
|
||||
|
||||
in_fragment = False
|
||||
|
||||
for index, cell in enumerate(nb.cells[first_slide_ix+1:],
|
||||
start=(first_slide_ix+1)):
|
||||
|
||||
previous_cell = nb.cells[index - 1]
|
||||
|
||||
# Slides are <section> elements in the HTML, subslides (the vertically
|
||||
# stacked slides) are also <section> elements inside the slides,
|
||||
# and fragments are <div>s within subslides. Subslide and fragment
|
||||
# elements can contain content:
|
||||
# <section>
|
||||
# <section>
|
||||
# (content)
|
||||
# <div class="fragment">(content)</div>
|
||||
# </section>
|
||||
# </section>
|
||||
|
||||
# Get the slide type. If type is subslide or slide,
|
||||
# end the last slide/subslide/fragment as applicable.
|
||||
if cell.metadata.slide_type == 'slide':
|
||||
previous_cell.metadata.slide_end = True
|
||||
cell.metadata.slide_start = True
|
||||
if cell.metadata.slide_type in {'subslide', 'slide'}:
|
||||
previous_cell.metadata.fragment_end = in_fragment
|
||||
previous_cell.metadata.subslide_end = True
|
||||
cell.metadata.subslide_start = True
|
||||
in_fragment = False
|
||||
|
||||
elif cell.metadata.slide_type == 'fragment':
|
||||
cell.metadata.fragment_start = True
|
||||
if in_fragment:
|
||||
previous_cell.metadata.fragment_end = True
|
||||
else:
|
||||
in_fragment = True
|
||||
|
||||
# The last cell will always be the end of a slide
|
||||
nb.cells[-1].metadata.fragment_end = in_fragment
|
||||
nb.cells[-1].metadata.subslide_end = True
|
||||
nb.cells[-1].metadata.slide_end = True
|
||||
|
||||
return nb, resources
|
||||
|
||||
|
||||
class SlidesExporter(HTMLExporter):
|
||||
"""Exports HTML slides with reveal.js"""
|
||||
|
||||
# Overrides from HTMLExporter
|
||||
#################################
|
||||
export_from_notebook = "Reveal.js slides"
|
||||
|
||||
@default('template_name')
|
||||
def _template_name_default(self):
|
||||
return 'reveal'
|
||||
|
||||
@default('file_extension')
|
||||
def _file_extension_default(self):
|
||||
return '.slides.html'
|
||||
|
||||
@default('template_extension')
|
||||
def _template_extension_default(self):
|
||||
return '.html.j2'
|
||||
|
||||
# Extra resources
|
||||
#################################
|
||||
reveal_url_prefix = Unicode(
|
||||
help="""The URL prefix for reveal.js (version 3.x).
|
||||
This defaults to the reveal CDN, but can be any url pointing to a copy
|
||||
of reveal.js.
|
||||
|
||||
For speaker notes to work, this must be a relative path to a local
|
||||
copy of reveal.js: e.g., "reveal.js".
|
||||
|
||||
If a relative path is given, it must be a subdirectory of the
|
||||
current directory (from which the server is run).
|
||||
|
||||
See the usage documentation
|
||||
(https://nbconvert.readthedocs.io/en/latest/usage.html#reveal-js-html-slideshow)
|
||||
for more details.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
@default('reveal_url_prefix')
|
||||
def _reveal_url_prefix_default(self):
|
||||
if 'RevealHelpPreprocessor.url_prefix' in self.config:
|
||||
warn("Please update RevealHelpPreprocessor.url_prefix to "
|
||||
"SlidesExporter.reveal_url_prefix in config files.")
|
||||
return self.config.RevealHelpPreprocessor.url_prefix
|
||||
return 'https://unpkg.com/reveal.js@4.0.2'
|
||||
|
||||
reveal_theme = Unicode('simple',
|
||||
help="""
|
||||
Name of the reveal.js theme to use.
|
||||
|
||||
We look for a file with this name under
|
||||
``reveal_url_prefix``/css/theme/``reveal_theme``.css.
|
||||
|
||||
https://github.com/hakimel/reveal.js/tree/master/css/theme has
|
||||
list of themes that ship by default with reveal.js.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
reveal_transition = Unicode('slide',
|
||||
help="""
|
||||
Name of the reveal.js transition to use.
|
||||
|
||||
The list of transitions that ships by default with reveal.js are:
|
||||
none, fade, slide, convex, concave and zoom.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
reveal_scroll = Bool(False,
|
||||
help="""
|
||||
If True, enable scrolling within each slide
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
font_awesome_url = Unicode(
|
||||
"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.css",
|
||||
help="""
|
||||
URL to load font awesome from.
|
||||
|
||||
Defaults to loading from cdnjs.
|
||||
"""
|
||||
).tag(config=True)
|
||||
|
||||
def _init_resources(self, resources):
|
||||
resources = super()._init_resources(resources)
|
||||
if 'reveal' not in resources:
|
||||
resources['reveal'] = {}
|
||||
resources['reveal']['url_prefix'] = self.reveal_url_prefix
|
||||
resources['reveal']['theme'] = self.reveal_theme
|
||||
resources['reveal']['transition'] = self.reveal_transition
|
||||
resources['reveal']['scroll'] = self.reveal_scroll
|
||||
return resources
|
624
venv/Lib/site-packages/nbconvert/exporters/templateexporter.py
Normal file
624
venv/Lib/site-packages/nbconvert/exporters/templateexporter.py
Normal file
|
@ -0,0 +1,624 @@
|
|||
"""This module defines TemplateExporter, a highly configurable converter
|
||||
that uses Jinja2 to export notebook files into different formats.
|
||||
"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from __future__ import print_function, absolute_import
|
||||
|
||||
import os
|
||||
import uuid
|
||||
import json
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
|
||||
from jupyter_core.paths import jupyter_path
|
||||
from traitlets import HasTraits, Unicode, List, Dict, Bool, default, observe, validate
|
||||
from traitlets.config import Config
|
||||
from traitlets.utils.importstring import import_item
|
||||
from jupyter_core.paths import jupyter_path
|
||||
from jupyter_core.utils import ensure_dir_exists
|
||||
from jinja2 import (
|
||||
TemplateNotFound, Environment, ChoiceLoader, FileSystemLoader, BaseLoader,
|
||||
DictLoader
|
||||
)
|
||||
|
||||
from nbconvert import filters
|
||||
from .exporter import Exporter
|
||||
|
||||
# Jinja2 extensions to load.
|
||||
JINJA_EXTENSIONS = ['jinja2.ext.loopcontrols']
|
||||
|
||||
ROOT = os.path.dirname(__file__)
|
||||
DEV_MODE = os.path.exists(os.path.join(ROOT, '../../setup.py')) and os.path.exists(os.path.join(ROOT, '../../share'))
|
||||
|
||||
|
||||
default_filters = {
|
||||
'indent': filters.indent,
|
||||
'markdown2html': filters.markdown2html,
|
||||
'markdown2asciidoc': filters.markdown2asciidoc,
|
||||
'ansi2html': filters.ansi2html,
|
||||
'filter_data_type': filters.DataTypeFilter,
|
||||
'get_lines': filters.get_lines,
|
||||
'highlight2html': filters.Highlight2HTML,
|
||||
'highlight2latex': filters.Highlight2Latex,
|
||||
'ipython2python': filters.ipython2python,
|
||||
'posix_path': filters.posix_path,
|
||||
'markdown2latex': filters.markdown2latex,
|
||||
'markdown2rst': filters.markdown2rst,
|
||||
'comment_lines': filters.comment_lines,
|
||||
'strip_ansi': filters.strip_ansi,
|
||||
'strip_dollars': filters.strip_dollars,
|
||||
'strip_files_prefix': filters.strip_files_prefix,
|
||||
'html2text': filters.html2text,
|
||||
'add_anchor': filters.add_anchor,
|
||||
'ansi2latex': filters.ansi2latex,
|
||||
'wrap_text': filters.wrap_text,
|
||||
'escape_latex': filters.escape_latex,
|
||||
'citation2latex': filters.citation2latex,
|
||||
'path2url': filters.path2url,
|
||||
'add_prompts': filters.add_prompts,
|
||||
'ascii_only': filters.ascii_only,
|
||||
'prevent_list_blocks': filters.prevent_list_blocks,
|
||||
'get_metadata': filters.get_metadata,
|
||||
'convert_pandoc': filters.convert_pandoc,
|
||||
'json_dumps': json.dumps,
|
||||
'strip_trailing_newline': filters.strip_trailing_newline,
|
||||
}
|
||||
|
||||
|
||||
# copy of https://github.com/jupyter/jupyter_server/blob/b62458a7f5ad6b5246d2f142258dedaa409de5d9/jupyter_server/config_manager.py#L19
|
||||
def recursive_update(target, new):
|
||||
"""Recursively update one dictionary using another.
|
||||
None values will delete their keys.
|
||||
"""
|
||||
for k, v in new.items():
|
||||
if isinstance(v, dict):
|
||||
if k not in target:
|
||||
target[k] = {}
|
||||
recursive_update(target[k], v)
|
||||
if not target[k]:
|
||||
# Prune empty subdicts
|
||||
del target[k]
|
||||
|
||||
elif v is None:
|
||||
target.pop(k, None)
|
||||
|
||||
else:
|
||||
target[k] = v
|
||||
return target # return for convenience
|
||||
|
||||
|
||||
# define function at the top level to avoid pickle errors
|
||||
def deprecated(msg):
|
||||
warnings.warn(msg, DeprecationWarning)
|
||||
|
||||
|
||||
class ExtensionTolerantLoader(BaseLoader):
|
||||
"""A template loader which optionally adds a given extension when searching.
|
||||
|
||||
Constructor takes two arguments: *loader* is another Jinja loader instance
|
||||
to wrap. *extension* is the extension, which will be added to the template
|
||||
name if finding the template without it fails. This should include the dot,
|
||||
e.g. '.tpl'.
|
||||
"""
|
||||
def __init__(self, loader, extension):
|
||||
self.loader = loader
|
||||
self.extension = extension
|
||||
|
||||
def get_source(self, environment, template):
|
||||
try:
|
||||
return self.loader.get_source(environment, template)
|
||||
except TemplateNotFound:
|
||||
if template.endswith(self.extension):
|
||||
raise TemplateNotFound(template)
|
||||
return self.loader.get_source(environment, template+self.extension)
|
||||
|
||||
def list_templates(self):
|
||||
return self.loader.list_templates()
|
||||
|
||||
|
||||
class TemplateExporter(Exporter):
|
||||
"""
|
||||
Exports notebooks into other file formats. Uses Jinja 2 templating engine
|
||||
to output new formats. Inherit from this class if you are creating a new
|
||||
template type along with new filters/preprocessors. If the filters/
|
||||
preprocessors provided by default suffice, there is no need to inherit from
|
||||
this class. Instead, override the template_file and file_extension
|
||||
traits via a config file.
|
||||
|
||||
Filters available by default for templates:
|
||||
|
||||
{filters}
|
||||
"""
|
||||
|
||||
# finish the docstring
|
||||
__doc__ = __doc__.format(filters='- ' + '\n - '.join(
|
||||
sorted(default_filters.keys())))
|
||||
|
||||
_template_cached = None
|
||||
|
||||
def _invalidate_template_cache(self, change=None):
|
||||
self._template_cached = None
|
||||
|
||||
@property
|
||||
def template(self):
|
||||
if self._template_cached is None:
|
||||
self._template_cached = self._load_template()
|
||||
return self._template_cached
|
||||
|
||||
_environment_cached = None
|
||||
|
||||
def _invalidate_environment_cache(self, change=None):
|
||||
self._environment_cached = None
|
||||
self._invalidate_template_cache()
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
if self._environment_cached is None:
|
||||
self._environment_cached = self._create_environment()
|
||||
return self._environment_cached
|
||||
|
||||
@property
|
||||
def default_config(self):
|
||||
c = Config({
|
||||
'RegexRemovePreprocessor': {
|
||||
'enabled': True
|
||||
},
|
||||
'TagRemovePreprocessor': {
|
||||
'enabled': True
|
||||
}
|
||||
})
|
||||
c.merge(super().default_config)
|
||||
return c
|
||||
|
||||
template_name = Unicode(help="Name of the template to use"
|
||||
).tag(config=True, affects_template=True)
|
||||
|
||||
template_file = Unicode(None, allow_none=True,
|
||||
help="Name of the template file to use"
|
||||
).tag(config=True, affects_template=True)
|
||||
|
||||
raw_template = Unicode('', help="raw template string").tag(affects_environment=True)
|
||||
|
||||
enable_async = Bool(False, help="Enable Jinja async template execution").tag(affects_environment=True)
|
||||
|
||||
_last_template_file = ""
|
||||
_raw_template_key = "<memory>"
|
||||
|
||||
@validate('template_name')
|
||||
def _template_name_validate(self, change):
|
||||
template_name = change['value']
|
||||
if template_name and template_name.endswith('.tpl'):
|
||||
warnings.warn(
|
||||
f"5.x style template name passed '{self.template_name}'. Use --template-name for the template directory with a index.<ext>.j2 file and/or --template-file to denote a different template.",
|
||||
DeprecationWarning)
|
||||
directory, self.template_file = os.path.split(self.template_name)
|
||||
if directory:
|
||||
directory, template_name = os.path.split(directory)
|
||||
if directory:
|
||||
if os.path.isabs(directory):
|
||||
self.extra_template_basedirs = [directory]
|
||||
return template_name
|
||||
|
||||
@observe('template_file')
|
||||
def _template_file_changed(self, change):
|
||||
new = change['new']
|
||||
if new == 'default':
|
||||
self.template_file = self.default_template
|
||||
return
|
||||
# check if template_file is a file path
|
||||
# rather than a name already on template_path
|
||||
full_path = os.path.abspath(new)
|
||||
if os.path.isfile(full_path):
|
||||
directory, self.template_file = os.path.split(full_path)
|
||||
self.extra_template_paths = [directory] + self.extra_template_paths
|
||||
# While not strictly an invalid template file name, the extension hints that there isn't a template directory involved
|
||||
if self.template_file.endswith('.tpl'):
|
||||
warnings.warn(
|
||||
f"5.x style template file passed '{new}'. Use --template-name for the template directory with a index.<ext>.j2 file and/or --template-file to denote a different template.",
|
||||
DeprecationWarning)
|
||||
|
||||
@default('template_file')
|
||||
def _template_file_default(self):
|
||||
if self.template_extension:
|
||||
return 'index' + self.template_extension
|
||||
|
||||
@observe('raw_template')
|
||||
def _raw_template_changed(self, change):
|
||||
if not change['new']:
|
||||
self.template_file = self._last_template_file
|
||||
self._invalidate_template_cache()
|
||||
|
||||
template_paths = List(['.']).tag(config=True, affects_environment=True)
|
||||
extra_template_basedirs = List().tag(config=True, affects_environment=True)
|
||||
extra_template_paths = List([]).tag(config=True, affects_environment=True)
|
||||
|
||||
@default('extra_template_basedirs')
|
||||
def _default_extra_template_basedirs(self):
|
||||
return [os.getcwd()]
|
||||
|
||||
#Extension that the template files use.
|
||||
template_extension = Unicode().tag(config=True, affects_environment=True)
|
||||
|
||||
template_data_paths = List(
|
||||
jupyter_path('nbconvert','templates'),
|
||||
help="Path where templates can be installed too."
|
||||
).tag(affects_environment=True)
|
||||
|
||||
#Extension that the template files use.
|
||||
template_extension = Unicode().tag(config=True, affects_environment=True)
|
||||
|
||||
@default('template_extension')
|
||||
def _template_extension_default(self):
|
||||
if self.file_extension:
|
||||
return self.file_extension + '.j2'
|
||||
else:
|
||||
return self.file_extension
|
||||
|
||||
exclude_input = Bool(False,
|
||||
help = "This allows you to exclude code cell inputs from all templates if set to True."
|
||||
).tag(config=True)
|
||||
|
||||
exclude_input_prompt = Bool(False,
|
||||
help = "This allows you to exclude input prompts from all templates if set to True."
|
||||
).tag(config=True)
|
||||
|
||||
exclude_output = Bool(False,
|
||||
help = "This allows you to exclude code cell outputs from all templates if set to True."
|
||||
).tag(config=True)
|
||||
|
||||
exclude_output_prompt = Bool(False,
|
||||
help = "This allows you to exclude output prompts from all templates if set to True."
|
||||
).tag(config=True)
|
||||
|
||||
exclude_code_cell = Bool(False,
|
||||
help = "This allows you to exclude code cells from all templates if set to True."
|
||||
).tag(config=True)
|
||||
|
||||
exclude_markdown = Bool(False,
|
||||
help = "This allows you to exclude markdown cells from all templates if set to True."
|
||||
).tag(config=True)
|
||||
|
||||
exclude_raw = Bool(False,
|
||||
help = "This allows you to exclude raw cells from all templates if set to True."
|
||||
).tag(config=True)
|
||||
|
||||
exclude_unknown = Bool(False,
|
||||
help = "This allows you to exclude unknown cells from all templates if set to True."
|
||||
).tag(config=True)
|
||||
|
||||
extra_loaders = List(
|
||||
help="Jinja loaders to find templates. Will be tried in order "
|
||||
"before the default FileSystem ones.",
|
||||
).tag(affects_environment=True)
|
||||
|
||||
filters = Dict(
|
||||
help="""Dictionary of filters, by name and namespace, to add to the Jinja
|
||||
environment."""
|
||||
).tag(config=True, affects_environment=True)
|
||||
|
||||
raw_mimetypes = List(
|
||||
help="""formats of raw cells to be included in this Exporter's output."""
|
||||
).tag(config=True)
|
||||
|
||||
@default('raw_mimetypes')
|
||||
def _raw_mimetypes_default(self):
|
||||
return [self.output_mimetype, '']
|
||||
|
||||
# TODO: passing config is wrong, but changing this revealed more complicated issues
|
||||
def __init__(self, config=None, **kw):
|
||||
"""
|
||||
Public constructor
|
||||
|
||||
Parameters
|
||||
----------
|
||||
config : config
|
||||
User configuration instance.
|
||||
extra_loaders : list[of Jinja Loaders]
|
||||
ordered list of Jinja loader to find templates. Will be tried in order
|
||||
before the default FileSystem ones.
|
||||
template_file : str (optional, kw arg)
|
||||
Template to use when exporting.
|
||||
"""
|
||||
super().__init__(config=config, **kw)
|
||||
|
||||
self.observe(self._invalidate_environment_cache,
|
||||
list(self.traits(affects_environment=True)))
|
||||
self.observe(self._invalidate_template_cache,
|
||||
list(self.traits(affects_template=True)))
|
||||
|
||||
|
||||
def _load_template(self):
|
||||
"""Load the Jinja template object from the template file
|
||||
|
||||
This is triggered by various trait changes that would change the template.
|
||||
"""
|
||||
|
||||
# this gives precedence to a raw_template if present
|
||||
with self.hold_trait_notifications():
|
||||
if self.template_file != self._raw_template_key:
|
||||
self._last_template_file = self.template_file
|
||||
if self.raw_template:
|
||||
self.template_file = self._raw_template_key
|
||||
|
||||
if not self.template_file:
|
||||
raise ValueError("No template_file specified!")
|
||||
|
||||
# First try to load the
|
||||
# template by name with extension added, then try loading the template
|
||||
# as if the name is explicitly specified.
|
||||
template_file = self.template_file
|
||||
self.log.debug("Attempting to load template %s", template_file)
|
||||
self.log.debug(" template_paths: %s", os.pathsep.join(self.template_paths))
|
||||
return self.environment.get_template(template_file)
|
||||
|
||||
def from_notebook_node(self, nb, resources=None, **kw):
|
||||
"""
|
||||
Convert a notebook from a notebook node instance.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
nb : `nbformat.NotebookNode`
|
||||
Notebook node
|
||||
resources : dict
|
||||
Additional resources that can be accessed read/write by
|
||||
preprocessors and filters.
|
||||
"""
|
||||
nb_copy, resources = super().from_notebook_node(nb, resources, **kw)
|
||||
resources.setdefault('raw_mimetypes', self.raw_mimetypes)
|
||||
resources['global_content_filter'] = {
|
||||
'include_code': not self.exclude_code_cell,
|
||||
'include_markdown': not self.exclude_markdown,
|
||||
'include_raw': not self.exclude_raw,
|
||||
'include_unknown': not self.exclude_unknown,
|
||||
'include_input': not self.exclude_input,
|
||||
'include_output': not self.exclude_output,
|
||||
'include_input_prompt': not self.exclude_input_prompt,
|
||||
'include_output_prompt': not self.exclude_output_prompt,
|
||||
'no_prompt': self.exclude_input_prompt and self.exclude_output_prompt,
|
||||
}
|
||||
|
||||
# Top level variables are passed to the template_exporter here.
|
||||
output = self.template.render(nb=nb_copy, resources=resources)
|
||||
output = output.lstrip('\r\n')
|
||||
return output, resources
|
||||
|
||||
def _register_filter(self, environ, name, jinja_filter):
|
||||
"""
|
||||
Register a filter.
|
||||
A filter is a function that accepts and acts on one string.
|
||||
The filters are accessible within the Jinja templating engine.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
name to give the filter in the Jinja engine
|
||||
filter : filter
|
||||
"""
|
||||
if jinja_filter is None:
|
||||
raise TypeError('filter')
|
||||
isclass = isinstance(jinja_filter, type)
|
||||
constructed = not isclass
|
||||
|
||||
#Handle filter's registration based on it's type
|
||||
if constructed and isinstance(jinja_filter, (str,)):
|
||||
#filter is a string, import the namespace and recursively call
|
||||
#this register_filter method
|
||||
filter_cls = import_item(jinja_filter)
|
||||
return self._register_filter(environ, name, filter_cls)
|
||||
|
||||
if constructed and hasattr(jinja_filter, '__call__'):
|
||||
#filter is a function, no need to construct it.
|
||||
environ.filters[name] = jinja_filter
|
||||
return jinja_filter
|
||||
|
||||
elif isclass and issubclass(jinja_filter, HasTraits):
|
||||
#filter is configurable. Make sure to pass in new default for
|
||||
#the enabled flag if one was specified.
|
||||
filter_instance = jinja_filter(parent=self)
|
||||
self._register_filter(environ, name, filter_instance)
|
||||
|
||||
elif isclass:
|
||||
#filter is not configurable, construct it
|
||||
filter_instance = jinja_filter()
|
||||
self._register_filter(environ, name, filter_instance)
|
||||
|
||||
else:
|
||||
#filter is an instance of something without a __call__
|
||||
#attribute.
|
||||
raise TypeError('filter')
|
||||
|
||||
def register_filter(self, name, jinja_filter):
|
||||
"""
|
||||
Register a filter.
|
||||
A filter is a function that accepts and acts on one string.
|
||||
The filters are accessible within the Jinja templating engine.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
name to give the filter in the Jinja engine
|
||||
filter : filter
|
||||
"""
|
||||
return self._register_filter(self.environment, name, jinja_filter)
|
||||
|
||||
def default_filters(self):
|
||||
"""Override in subclasses to provide extra filters.
|
||||
|
||||
This should return an iterable of 2-tuples: (name, class-or-function).
|
||||
You should call the method on the parent class and include the filters
|
||||
it provides.
|
||||
|
||||
If a name is repeated, the last filter provided wins. Filters from
|
||||
user-supplied config win over filters provided by classes.
|
||||
"""
|
||||
return default_filters.items()
|
||||
|
||||
def _create_environment(self):
|
||||
"""
|
||||
Create the Jinja templating environment.
|
||||
"""
|
||||
paths = self.template_paths
|
||||
self.log.debug('Template paths:\n\t%s', '\n\t'.join(paths))
|
||||
|
||||
loaders = self.extra_loaders + [
|
||||
ExtensionTolerantLoader(FileSystemLoader(paths), self.template_extension),
|
||||
DictLoader({self._raw_template_key: self.raw_template})
|
||||
]
|
||||
environment = Environment(
|
||||
loader=ChoiceLoader(loaders),
|
||||
extensions=JINJA_EXTENSIONS,
|
||||
enable_async=self.enable_async
|
||||
)
|
||||
|
||||
environment.globals['uuid4'] = uuid.uuid4
|
||||
|
||||
# Add default filters to the Jinja2 environment
|
||||
for key, value in self.default_filters():
|
||||
self._register_filter(environment, key, value)
|
||||
|
||||
# Load user filters. Overwrite existing filters if need be.
|
||||
if self.filters:
|
||||
for key, user_filter in self.filters.items():
|
||||
self._register_filter(environment, key, user_filter)
|
||||
|
||||
return environment
|
||||
|
||||
def _init_preprocessors(self):
|
||||
super()._init_preprocessors()
|
||||
conf = self._get_conf()
|
||||
preprocessors = conf.get('preprocessors', {})
|
||||
# preprocessors is a dict for three reasons
|
||||
# * We rely on recursive_update, which can only merge dicts, lists will be overwritten
|
||||
# * We can use the key with numerical prefixing to guarantee ordering (/etc/*.d/XY-file style)
|
||||
# * We can disable preprocessors by overwriting the value with None
|
||||
for key, preprocessor in sorted(preprocessors.items(), key=lambda x: x[0]):
|
||||
if preprocessor is not None:
|
||||
kwargs = preprocessor.copy()
|
||||
preprocessor_cls = kwargs.pop('type')
|
||||
preprocessor_cls = import_item(preprocessor_cls)
|
||||
preprocessor = preprocessor_cls(**kwargs)
|
||||
self.register_preprocessor(preprocessor)
|
||||
|
||||
def _get_conf(self):
|
||||
conf = {} # the configuration once all conf files are merged
|
||||
for path in map(Path, self.template_paths):
|
||||
conf_path = path / 'conf.json'
|
||||
if conf_path.exists():
|
||||
with conf_path.open() as f:
|
||||
conf = recursive_update(conf, json.load(f))
|
||||
return conf
|
||||
|
||||
@default('template_paths')
|
||||
def _template_paths(self, prune=True, root_dirs=None):
|
||||
paths = []
|
||||
root_dirs = self.get_prefix_root_dirs()
|
||||
template_names = self.get_template_names()
|
||||
for template_name in template_names:
|
||||
for base_dir in self.extra_template_basedirs:
|
||||
path = os.path.join(base_dir, template_name)
|
||||
if not prune or os.path.exists(path):
|
||||
paths.append(path)
|
||||
for root_dir in root_dirs:
|
||||
base_dir = os.path.join(root_dir, 'nbconvert', 'templates')
|
||||
path = os.path.join(base_dir, template_name)
|
||||
if not prune or os.path.exists(path):
|
||||
paths.append(path)
|
||||
|
||||
for root_dir in root_dirs:
|
||||
# we include root_dir for when we want to be very explicit, e.g.
|
||||
# {% extends 'nbconvert/templates/classic/base.html' %}
|
||||
paths.append(root_dir)
|
||||
# we include base_dir for when we want to be explicit, but less than root_dir, e.g.
|
||||
# {% extends 'classic/base.html' %}
|
||||
base_dir = os.path.join(root_dir, 'nbconvert', 'templates')
|
||||
paths.append(base_dir)
|
||||
|
||||
compatibility_dir = os.path.join(root_dir, 'nbconvert', 'templates', 'compatibility')
|
||||
paths.append(compatibility_dir)
|
||||
|
||||
additional_paths = self.template_data_paths
|
||||
for path in additional_paths:
|
||||
try:
|
||||
ensure_dir_exists(path, mode=0o700)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return self.extra_template_paths + additional_paths + paths
|
||||
|
||||
@classmethod
|
||||
def get_compatibility_base_template_conf(cls, name):
|
||||
# Hard-coded base template confs to use for backwards compatibility for 5.x-only templates
|
||||
if name == 'display_priority':
|
||||
return dict(base_template='base')
|
||||
if name == 'full':
|
||||
return dict(base_template='classic', mimetypes={"text/html": True})
|
||||
|
||||
def get_template_names(self):
|
||||
# finds a list of template names where each successive template name is the base template
|
||||
template_names = []
|
||||
root_dirs = self.get_prefix_root_dirs()
|
||||
base_template = self.template_name
|
||||
merged_conf = {} # the configuration once all conf files are merged
|
||||
while base_template is not None:
|
||||
template_names.append(base_template)
|
||||
conf = {}
|
||||
found_at_least_one = False
|
||||
for base_dir in self.extra_template_basedirs:
|
||||
template_dir = os.path.join(base_dir, base_template)
|
||||
if os.path.exists(template_dir):
|
||||
found_at_least_one = True
|
||||
conf_file = os.path.join(template_dir, 'conf.json')
|
||||
if os.path.exists(conf_file):
|
||||
with open(conf_file) as f:
|
||||
conf = recursive_update(json.load(f), conf)
|
||||
for root_dir in root_dirs:
|
||||
template_dir = os.path.join(root_dir, 'nbconvert', 'templates', base_template)
|
||||
if os.path.exists(template_dir):
|
||||
found_at_least_one = True
|
||||
conf_file = os.path.join(template_dir, 'conf.json')
|
||||
if os.path.exists(conf_file):
|
||||
with open(conf_file) as f:
|
||||
conf = recursive_update(json.load(f), conf)
|
||||
if not found_at_least_one:
|
||||
# Check for backwards compatibility template names
|
||||
for root_dir in root_dirs:
|
||||
compatibility_file = base_template + '.tpl'
|
||||
compatibility_path = os.path.join(root_dir, 'nbconvert', 'templates', 'compatibility', compatibility_file)
|
||||
if os.path.exists(compatibility_path):
|
||||
found_at_least_one = True
|
||||
warnings.warn(
|
||||
f"5.x template name passed '{self.template_name}'. Use 'lab' or 'classic' for new template usage.",
|
||||
DeprecationWarning)
|
||||
self.template_file = compatibility_file
|
||||
conf = self.get_compatibility_base_template_conf(base_template)
|
||||
self.template_name = conf.get('base_template')
|
||||
break
|
||||
if not found_at_least_one:
|
||||
paths = "\n\t".join(root_dirs)
|
||||
raise ValueError('No template sub-directory with name %r found in the following paths:\n\t%s' % (base_template, paths))
|
||||
merged_conf = recursive_update(dict(conf), merged_conf)
|
||||
base_template = conf.get('base_template')
|
||||
conf = merged_conf
|
||||
mimetypes = [mimetype for mimetype, enabled in conf.get('mimetypes', {}).items() if enabled]
|
||||
if self.output_mimetype and self.output_mimetype not in mimetypes and mimetypes:
|
||||
supported_mimetypes = '\n\t'.join(mimetypes)
|
||||
raise ValueError('Unsupported mimetype %r for template %r, mimetypes supported are: \n\t%s' %\
|
||||
(self.output_mimetype, self.template_name, supported_mimetypes))
|
||||
return template_names
|
||||
|
||||
def get_prefix_root_dirs(self):
|
||||
# We look at the usual jupyter locations, and for development purposes also
|
||||
# relative to the package directory (first entry, meaning with highest precedence)
|
||||
root_dirs = []
|
||||
if DEV_MODE:
|
||||
root_dirs.append(os.path.abspath(os.path.join(ROOT, '..', '..', 'share', 'jupyter')))
|
||||
root_dirs.extend(jupyter_path())
|
||||
return root_dirs
|
||||
|
||||
def _init_resources(self, resources):
|
||||
resources = super()._init_resources(resources)
|
||||
resources['deprecated'] = deprecated
|
||||
return resources
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
38
venv/Lib/site-packages/nbconvert/exporters/tests/base.py
Normal file
38
venv/Lib/site-packages/nbconvert/exporters/tests/base.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
"""Base TestCase class for testing Exporters"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import os
|
||||
|
||||
from ...tests.base import TestsBase
|
||||
|
||||
all_raw_mimetypes = {
|
||||
'text/x-python',
|
||||
'text/markdown',
|
||||
'text/html',
|
||||
'text/restructuredtext',
|
||||
'text/latex',
|
||||
}
|
||||
|
||||
class ExportersTestsBase(TestsBase):
|
||||
"""Contains base test functions for exporters"""
|
||||
|
||||
exporter_class = None
|
||||
should_include_raw = None
|
||||
|
||||
def _get_notebook(self, nb_name='notebook2.ipynb'):
|
||||
return os.path.join(self._get_files_path(), nb_name)
|
||||
|
||||
def test_raw_cell_inclusion(self):
|
||||
"""test raw cell inclusion based on raw_mimetype metadata"""
|
||||
if self.should_include_raw is None:
|
||||
return
|
||||
exporter = self.exporter_class()
|
||||
(output, resources) = exporter.from_filename(self._get_notebook('rawtest.ipynb'))
|
||||
for inc in self.should_include_raw:
|
||||
self.assertIn('raw %s' % inc, output, "should include %s" % inc)
|
||||
self.assertIn('no raw_mimetype metadata', output)
|
||||
for exc in all_raw_mimetypes.difference(self.should_include_raw):
|
||||
self.assertNotIn('raw %s' % exc, output, "should exclude %s" % exc)
|
||||
self.assertNotIn('never be included', output)
|
48
venv/Lib/site-packages/nbconvert/exporters/tests/cheese.py
Normal file
48
venv/Lib/site-packages/nbconvert/exporters/tests/cheese.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
"""
|
||||
Contains CheesePreprocessor
|
||||
"""
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2013, the IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from ...preprocessors.base import Preprocessor
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Classes
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class CheesePreprocessor(Preprocessor):
|
||||
"""
|
||||
Adds a cheese tag to the resources object
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, **kw):
|
||||
"""
|
||||
Public constructor
|
||||
"""
|
||||
super().__init__(**kw)
|
||||
|
||||
|
||||
def preprocess(self, nb, resources):
|
||||
"""
|
||||
Sphinx preprocessing to apply on each notebook.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
nb : NotebookNode
|
||||
Notebook being converted
|
||||
resources : dictionary
|
||||
Additional resources used in the conversion process. Allows
|
||||
preprocessors to pass variables into the Jinja engine.
|
||||
"""
|
||||
resources['cheese'] = 'real'
|
||||
return nb, resources
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,4 @@
|
|||
{%- extends 'lab/index.html.j2' -%}
|
||||
{%- block body_footer -%}
|
||||
UNIQUE
|
||||
{%- endblock body_footer -%}
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,240 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"[<matplotlib.lines.Line2D at 0x10f695240>]"
|
||||
]
|
||||
},
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"image/png": [
|
||||
"iVBORw0KGgoAAAANSUhEUgAAAu0AAAH/CAYAAADjSONqAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\n",
|
||||
"AAAWJQAAFiUBSVIk8AAAIABJREFUeJzt3X/M7ndd3/HXezmJG7QpEdqyrbD21OYgLMbNmRK6aM+w\n",
|
||||
"DKMhwDS4RNnmkM0xLIqMyTY5JSEwNy2jiJOxOu1MiEQLW8xoQTBDKttcIJlz/YE9BaFWZdACtoVp\n",
|
||||
"3/vjum56zn3u69w/rl/f73U9HsmVb3t9r/tzf+/e33Puz/3s5/p+q7sDAAAM159Z9wEAAADnZ9IO\n",
|
||||
"AAADZ9IOAAADZ9IOAAADZ9IOAAADZ9IOAAADZ9IOAAADZ9IOAAADZ9IOAAADZ9IOAAADZ9IOAAAD\n",
|
||||
"Z9IOAAADZ9IOAAADZ9IOAAADN9ekvaq+q6puqqoPV9UXquqxqrrliGNdVlU3V9X9VfVoVZ2uqhur\n",
|
||||
"6knzHCMAAIzdsTk//p8n+YYkX0zy6STPSNKHHaSqrkxyR5KLk7wnyZ1Jrk5yfZLnV9U13f25OY8V\n",
|
||||
"AABGad7lMa9KclV3X5TkB+cY5+2ZTNhf2d0v7u7Xdfdzk9yY5ESSN855nAAAMFrVfegwvvdAVdcm\n",
|
||||
"+WCS/9jdLz3Ex12Z5J4kp7v7yl37LkjyQCb1/tLufnghBwsAACMyhDeinpxub9+9o7u/lOQjSZ6Y\n",
|
||||
"5NmrPCgAABiKIUzaT0y3d8/Yf890e9UKjgUAAAZnCJP2i6bbh2bs33neVWQAANhK8149ZlCqajEL\n",
|
||||
"9AEAYB/dXav6XEMo7Tsl/aIZ+3eef3AFxwIAAIMzhNJ+53R7Ysb+nbXss9a876GTybXjX53knd2H\n",
|
||||
"v3Y8m6WqepW/DTN8zgn24rxgL84LdlvH6o4hlPYPTbfXVdVZfyCq6sIk1yT54yQfPcSYtya5MMk7\n",
|
||||
"ktxWlacv4kABAGAdVjZpr6pjVfWMqjp+5vPdfW8ml3u8Iskrdn3YDUmekOSW7n7kEJ/ubyX520n+\n",
|
||||
"b5Lrkvx2VX6gKn5LBgBgdOa6uVJVvTDJC6f/+tQkz0tyb5LfmD73R939mulrL5/u+2R3X7FrnONJ\n",
|
||||
"7khySZL3ZrJk5uok1ya5K8lzuvvzBzieTh5/U0BVLk3yM0leNH3J+5O8rDufOvQXy6j5X5vs5pxg\n",
|
||||
"L84L9uK8YLfdc86VfM45J+2vT/L65Jw14ztfwH3dfXz62sszmbR/9bldY12W5A1Jnp/kyUnuz2SZ\n",
|
||||
"yw3dPetykLvHOOc/4LSuvyTJ26bjWuu+hfyFy27OCfbivGAvzgt2G92kfWjO9x9Qdd9u/sJlN+cE\n",
|
||||
"e3FesBfnBbutY9I+hDeirkR3/iDWugMAMEJbM2lPku50d96V5FlxhZltc8O6D4DBcU6wF+cFe3Fe\n",
|
||||
"sHZbszzm3Nda6w4AwOFZ0z6no/wHtNYdAIDDsKZ9Dax1BwBg6La+tJ/98ao7AADnp7SvmeoOAMAQ\n",
|
||||
"Ke0zx1LdAQA4l9I+IKo7AABDobQfaFzVHQCACaV9oFR3AADWSWk/9OdQ3QEAtpnSPgKqOwAAq6a0\n",
|
||||
"z/X5VHcAgG2jtI+M6g4AwCoo7Qv73Ko7AMA2UNpHTHUHAGBZlPalHIfqDgCwqZT2DaG6AwCwSEr7\n",
|
||||
"kqnuAACbRWnfQKo7AADzUtpXSHUHABg/pX3Dqe4AAByF0r4mqjsAwDgp7VtEdQcA4KCU9gFQ3QEA\n",
|
||||
"xkNp31KqOwAA56O0D4zqDgAwbEo7qjsAAOdQ2gdMdQcAGB6lnbOo7gAAJEr7aKjuAADDoLQzk+oO\n",
|
||||
"ALC9lPYRUt0BANZHaedAVHcAgO2itI+c6g4AsFpKO4emugMAbD6lfYOo7gAAy6e0MxfVHQBgMynt\n",
|
||||
"G0p1BwBYDqWdhVHdAQA2h9K+BVR3AIDFUdpZCtUdAGDclPYto7oDAMxHaWfpVHcAgPFR2reY6g4A\n",
|
||||
"cHhKOyulugMAjIPSThLVHQDgoJR21kZ1BwAYLqWdc6juAACzKe0MguoOADAsSjvnpboDAJxNaWdw\n",
|
||||
"VHcAgPVT2jkw1R0AQGln4FR3AID1UNo5EtUdANhWSjujoboDAKyO0s7cVHcAYJso7YyS6g4AsFxK\n",
|
||||
"OwulugMAm05pZ/RUdwCAxVPaWRrVHQDYREo7G0V1BwBYDKWdlVDdAYBNobSzsVR3AICjU9pZOdUd\n",
|
||||
"ABgzpZ2toLoDAByO0s5aqe4AwNgo7Wwd1R0AYH9KO4OhugMAY6C0s9VUdwCAvSntDJLqDgAMldIO\n",
|
||||
"U6o7AMDjlHYGT3UHAIZEaYc9qO4AwLZbyKS9qi6rqpur6v6qerSqTlfVjVX1pEOO8x1VdXtVfbqq\n",
|
||||
"Hq6q362qX6qqZy/iOBmv7nR33pXkWUluTXJhknckua0qT1/rwQEALNncy2Oq6sokdyS5OMl7ktyZ\n",
|
||||
"5OokJ5PcleSa7v7cAcb5l0lek+Sz03E+m+SqJC9IcizJS7v7F/cZw/KYLTCt6y9J8rYkT07yxSSv\n",
|
||||
"TvLO7mzOei8AYJDWMedcxKT9tkyWK7yyu3/6jOd/MskPJ/nZ7v7BfcZ4apLPJPmDJN/Q3Z89Y9+1\n",
|
||||
"ST6Y5HR3X7nPOCbtW8RadwBgHUY3aZ9W9nuyx4S6qi5I8kCSTnJpdz98nnGuTvKbSd7b3S/aY/8X\n",
|
||||
"knR3X7TP8Zi0bxnVHQBYtTG+EfXkdHv77h3d/aUkH0nyxCT7rUm/O8lXklxdVU8+c0dVfUuSC5J8\n",
|
||||
"YM5jZQNZ6w4AbIN5J+0nptu7Z+y/Z7q96nyDdPfnk7w2yaVJfqeq3lFVb6qqX0pyWya/FPyDOY+V\n",
|
||||
"DeYKMwDAJpt30r6zXOWhGft3nt/3KjLd/W8ymXQdS/KyTCbx35Xk95L8/Jnr3GEvqjsAsKkGc532\n",
|
||||
"qvonSd6d5OeSHE/yhCTflOTeJL84vbrMQcfq8zxOLeP4GQ7VHQCYV1WdmjWfXMfxzDtp3ynps94g\n",
|
||||
"uvP8g+cbZHqFmDdn8kbUH+3u+7r70e7+WCZXBvlMkldX1RUHOajurvM8Th1kDMZNdQcA5tHdp2bN\n",
|
||||
"J9dxPPNO2u+cbk/M2L+zln3Wmvcd3zndfmj3ju5+JMn/yORYv/GwB8h2U90BgE0w76R9Z5J9XVWd\n",
|
||||
"NQGqqguTXJPkj5N8dJ9xvma6vWTG/oun268c5SDZbqo7ADB2c03au/veTK7sckWSV+zafUMm69Jv\n",
|
||||
"mdbyVNWxqnpGVR3f9dr/Ot2+vKr+wpk7qurbM5n8P5LJnVfhSFR3AGCsFnFH1OOZTKYvSfLeTJbM\n",
|
||||
"XJ3k2iR3JXnO9JKOqarLM3lj6Se7+4ozxqhMLu34bZncHOfWTO6O+vWZLJ3pJK/q7pv2ORY3V+JA\n",
|
||||
"3E0VADiq0d0R9auDVF2W5A1Jnp/JXSnvz2TifUN3P3TG6y7PZNJ+X3cf3zXGsUxq/fckeWYmlf7/\n",
|
||||
"JvnvSd7a3fveXMmkncNwN1UA4ChGO2kfCpN2jkJ1BwAOYx1zzsFcpx3WxVp3AGDolHY4g+oOAOxH\n",
|
||||
"aYc1U90BgCFS2mEG1R0A2IvSDgOiugMAQ6G0wwGo7gDADqUdBkp1BwDWSWmHQ1LdAWC7Ke0wAqo7\n",
|
||||
"ALBqSjvMQXUHgO2jtMPIqO4AwCoo7bAgqjsAbAelHUZMdQcAlkVphyVQ3QFgcyntsCFUdwBgkZR2\n",
|
||||
"WDLVHQA2i9IOG0h1BwDmpbTDCqnuADB+SjtsONUdADgKpR3WRHUHgHFS2mGLqO4AwEEp7TAAqjsA\n",
|
||||
"jIfSDltKdQcAzkdph4FR3QFg2JR2QHUHAM6htMOAqe4AMDxKO3AW1R0ASJR2GA3VHQCGQWkHZlLd\n",
|
||||
"AWB7Ke0wQqo7AKyP0g4ciOoOANtFaYeRU90BYLWUduDQVHcA2HxKO2wQ1R0Alk9pB+aiugPAZlLa\n",
|
||||
"YUOp7gCwHEo7sDCqOwBsDqUdtoDqDgCLo7QDS6G6A8C4Ke2wZVR3AJiP0g4sneoOAOOjtMMWU90B\n",
|
||||
"4PCUdmClVHcAGAelHUiiugPAQSntwNqo7gAwXEo7cA7VHQBmU9qBQVDdAWBYlHbgvFR3ADib0g4M\n",
|
||||
"juoOAOuntAMHproDgNIODJzqDgDrobQDR6K6A7CtlHZgNFR3AFgdpR2Ym+oOwDZR2oFRUt0BYLmU\n",
|
||||
"dmChVHcANp3SDoye6g4Ai6e0A0ujugOwiZR2YKOo7gCwGEo7sBKqOwCbQmkHNpbqDgBHp7QDK6e6\n",
|
||||
"AzBmSjuwFVR3ADgcpR1YK9UdgLFR2oGto7oDwP6UdmAwVHcAxkBpB7aa6g4Ae1PagUFS3QEYKqUd\n",
|
||||
"YEp1B4DHKe3A4KnuAAyJ0g6wB9UdgG2ntAOjoroDsG5KO8A+VHcAtpHSDoyW6g7AOoyytFfVZVV1\n",
|
||||
"c1XdX1WPVtXpqrqxqp50hLGeW1W3VtUD07E+U1Xvq6pvn/c4gc2jugOwLeYq7VV1ZZI7klyc5D1J\n",
|
||||
"7kxydZKTSe5Kck13f+6AY/1Ekh9N8ntJ/kuSzya5JMlfTfKB7v6nBxhDaYctpboDsCrrmHPOO2m/\n",
|
||||
"LZOy9cru/ukznv/JJD+c5Ge7+wcPMM4PJPnZJP8hycu7+0927T+2+7kZ45i0wxab1vWXJHlbkicn\n",
|
||||
"+WKSVyd5Z3c2Zy0gAGs1qkn7tLLfk+R0d1+5a98FSR5I0kku7e6HzzPO12RS1/84yVUHmZyfZyyT\n",
|
||||
"dkB1B2Cpxram/eR0e/vuHd39pSQfSfLEJM/eZ5zrkjwlya8k6ar6jqp6bVVdX1X7fSzAOax1B2DT\n",
|
||||
"zDNpPzHd3j1j/z3T7VX7jPPN0+2Xk3w8yX9O8qYkNya5o6p+vaqeMsdxAluoO92ddyV5VpJbk1yY\n",
|
||||
"5B1JbqvK09d6cABwSPNM2i+abh+asX/n+f2uInPJdPuaJH+a5K8nuSDJN2RS8b8lybuPfpjANlPd\n",
|
||||
"AdgEQ7i50s4x/L8kL+juO7r74e7+7UzWo346ybdaKgMcleoOwNjNM2nfKekXzdi/8/yD+4yzs/9j\n",
|
||||
"3X3Wm8S6+5Ekt03/9ZtzQFXV53mcOug4wGZR3QE4qKo6NWs+uY7jmWfSfud0e2LG/p217LPWvO8e\n",
|
||||
"Z9bkfuf5P3fA40p313kepw46DrB5VHcADqK7T82aT67jeOaZtH9our2uqs46+Kq6MMk1mVzG8aP7\n",
|
||||
"jPNrmVwa8pm7x5n6y9Pt6TmOFeAsqjsAY3LkSXt335vJG0WvSPKKXbtvSPKEJLdMl7ikqo5V1TOq\n",
|
||||
"6viucT6VyRVj/lKS68/cV1XPS/I3k3w+yfuOeqwAe1HdARiLee+IejzJHZlcAea9mSx1uTrJtUnu\n",
|
||||
"SvKc7v789LWXJ7k3ySe7+4pd4/zF6ThPy6S8fzyTXwZemMkVZb6nu289wPG4uRJwJO6mCsBBjeqO\n",
|
||||
"qF8doOqyJG9I8vxMftDdn0mxuqG7HzrjdZdnMmm/r7uP7zHOU5L8eJIXJPnzmbzR9cNJ3tTdv3XA\n",
|
||||
"YzFpB+bibqoA7GeUk/YhMWkHFkF1B+B8TNrnZNIOLJLqDsBe1jHnHMLNlQAGyRVmABgKpR3gAFR3\n",
|
||||
"AHYo7QADpboDsE5KO8Ahqe4A201pBxgB1R2AVVPaAeagugNsH6UdYGRUdwBWQWkHWBDVHWA7KO0A\n",
|
||||
"I6a6A7AsSjvAEqjuAJtLaQfYEKo7AIuktAMsmeoOsFmUdoANpLoDMC+lHWCFVHeA8VPaATac6g7A\n",
|
||||
"USjtAGuiugOMk9IOsEVUdwAOSmkHGADVHWA8lHaALaW6A3A+SjvAwKjuAMOmtAOgugNwDqUdYMBU\n",
|
||||
"d4DhUdoBOIvqDkCitAOMhuoOMAxKOwAzqe4A20tpBxgh1R1gfZR2AA5EdQfYLko7wMip7gCrpbQD\n",
|
||||
"cGiqO8DmU9oBNojqDrB8SjsAc1HdATaT0g6woVR3gOVQ2gFYGNUdYHMo7QBbQHUHWBylHYClUN0B\n",
|
||||
"xk1pB9gyqjvAfJR2AJZOdQcYH6UdYIup7gCHp7QDsFKqO8A4KO0AJFHdAQ5KaQdgbVR3gOFS2gE4\n",
|
||||
"h+oOMJvSDsAgqO4Aw6K0A3BeqjvA2ZR2AAZHdQdYP6UdgANT3QGUdgAGTnUHWA+lHYAjUd2BbaW0\n",
|
||||
"AzAaqjvA6ijtAMxNdQe2idIOwCip7gDLpbQDsFCqO7DplHYARk91B1g8pR2ApVHdgU2ktAOwUVR3\n",
|
||||
"gMVQ2gFYCdUd2BRKOwAbS3UHODqlHYCVU92BMVPaAdgKqjvA4SjtAKyV6g6MjdIOwNZR3QH2p7QD\n",
|
||||
"MBiqOzAGSjsAW011B9ib0g7AIKnuwFAp7QAwpboDPE5pB2DwVHdgSJR2ANiD6g5sO6UdgFFR3YF1\n",
|
||||
"U9oBYB+qO7CNlHYARkt1B9ZBaQeAQ1DdgW2htAOwEVR3YFWUdgA4ItUd2GRzT9qr6rKqurmq7q+q\n",
|
||||
"R6vqdFXdWFVPmmPM762qx6aPvz/vMQKwHbrT3XlXkmcluTXJhUnekeS2qjx9rQcHMIe5Ju1VdWWS\n",
|
||||
"/5nk7yb5aJKfSnJvkuuT/GZVfe0Rxnxakrcl+dL0qc1ZvwPASqjuwKaZt7S/PcnFSV7Z3S/u7td1\n",
|
||||
"93OT3JjkRJI3HmawqqokP5fkj5L82zmPDYAtproDm+TIk/ZpZb8uyenu/uldu1+f5OEk31tVTzjE\n",
|
||||
"sD+U5GSSvzf9eACYi+oObIJ5SvvJ6fb23Tu6+0tJPpLkiUmefZDBqurrk7w5yVu6+zfmOC4AOIvq\n",
|
||||
"DozdPJP2E9Pt3TP23zPdXrXfQFV1LMktSe5L8ro5jgkAZlLdgbGaZ9J+0XT70Iz9O88f5CoyP57k\n",
|
||||
"G5P83e7+8hzHBADnpboDY7T267RX1dVJfizJv+ru/7bu4wFgO6juwJjMM2nfKekXzdi/8/yDswaY\n",
|
||||
"Lov5hSR3ZfLm1T1fdtgDq6o+z+PUYccDYDOp7sAsVXVq1nxyHcczz6T9zun2xIz9O2vZZ615T5IL\n",
|
||||
"pq97ZpJHz7ih0mOZLJlJkn83fe7Ggx5Yd9d5HqcOOg4A20F1B3br7lOz5pPrOJ7qPtovC1V1PMkn\n",
|
||||
"kpxO8nV9xkBVdWGS38/kxkiXdPcjM8b4s0luyt43UPqmJH8lyYczKfHv7+5373NMnUwm7Yf+ggAg\n",
|
||||
"SVUuTfIzSV40fer9SV7WnU+t76iAIVnHnPPIk/Ykqar3JXlekh/q7red8fxPJXlVkn/b3f9o+tyx\n",
|
||||
"JF+X5Cvdfe8Bxj6VSW1/WXfffMDjMWkHYG7Tuv6STO7Q/eQkX0zy6iTv7Hanbth265hzzvtG1H+U\n",
|
||||
"5A+TvLWqbq2qN1XVBzOZsN+V5J+d8drLkvxOkl+b83MCwFJZ6w4MzVyT9mkx/2tJ/kOSq5P8SJIr\n",
|
||||
"krwlybO7+/N7fdhBhz/EawFg4ax1B4ZiruUxQ2N5DADLYq07sGOMy2MAYCuo7sA6Ke0AcEiqO2w3\n",
|
||||
"pR0ARkB1B1ZNaQeAOajusH2UdgAYGdUdWAWlHQAWRHWH7aC0A8CIqe7AsijtALAEqjtsLqUdADaE\n",
|
||||
"6g4sktIOAEumusNmUdoBYAOp7sC8lHYAWCHVHcZPaQeADae6A0ehtAPAmqjuME5KOwBsEdUdOCil\n",
|
||||
"HQAGQHWH8VDaAWBLqe7A+SjtADAwqjsMm9IOAKjuwDmUdgAYMNUdhkdpBwDOoroDidIOAKOhusMw\n",
|
||||
"KO0AwEyqO2wvpR0ARkh1h/VR2gGAA1HdYbso7QAwcqo7rJbSDgAcmuoOm09pB4ANorrD8intAMBc\n",
|
||||
"VHfYTEo7AGwo1R2WQ2kHABZGdYfNobQDwBZQ3WFxlHYAYClUdxg3pR0AtozqDvNR2gGApVPdYXyU\n",
|
||||
"dgDYYqo7HJ7SDgCslOoO46C0AwBJVHc4KKUdAFgb1R2GS2kHAM6husNsSjsAMAiqOwyL0g4AnJfq\n",
|
||||
"DmdT2gGAwVHdYf2UdgDgwFR3UNoBgIFT3WE9lHYA4EhUd7aV0g4AjIbqDqujtAMAc1Pd2SZKOwAw\n",
|
||||
"Sqo7LJfSDgAslOrOplPaAYDRU91h8ZR2AGBpVHc2kdIOAGwU1R0WQ2kHAFZCdWdTKO0AwMZS3eHo\n",
|
||||
"lHYAYOVUd8ZMaQcAtoLqDoejtAMAa6W6MzZKOwCwdVR32J/SDgAMhurOGCjtAMBWU91hb0o7ADBI\n",
|
||||
"qjtDpbQDAEyp7vA4pR0AGDzVnSFR2gEA9qC6s+2UdgBgVFR31k1pBwDYh+rONlLaAYDRUt1ZB6Ud\n",
|
||||
"AOAQVHe2hdIOAGwE1Z1VUdoBAI5IdWeTKe0AwMZR3VkmpR0AYAFUdzaN0g4AbDTVnUVT2gEAFkx1\n",
|
||||
"ZxMsZNJeVZdV1c1VdX9VPVpVp6vqxqp60gE//mur6mVVdWtVfaKqHq6qB6vqw1X1/VXlDxQAcGTd\n",
|
||||
"6e68K8mzktya5MIk70hyW1WevtaDgwOYe3lMVV2Z5I4kFyd5T5I7k1yd5GSSu5Jc092f22eMf5jk\n",
|
||||
"7UnuT/KhJJ9K8tQkL05yUZJf7u7vPsCxWB4DAJzXtK6/JMnbkjw5yReTvDrJO7uzOeuGWZp1zDkX\n",
|
||||
"MWm/LZP/zfTK7v7pM57/ySQ/nORnu/sH9xnjZJIndPev7nr+0iT/PcnTknxXd//KPuOYtAMAB2Kt\n",
|
||||
"O0c1ukn7tLLfk+R0d1+5a98FSR5I0kku7e6Hj/g5fizJG5Pc1N3X7/Nak3YA4MBUd45ijG9EPTnd\n",
|
||||
"3r57R3d/KclHkjwxybPn+Bx/smsLALAQ1rozFvNO2k9Mt3fP2H/PdHvVUQavqmNJXjr91/cdZQwA\n",
|
||||
"gP24wgxDN++k/aLp9qEZ+3eeP9BVZPbw5kx+8/3V7n7/EccAANiX6s6QDfY67VX1Q0l+JMn/SfJ9\n",
|
||||
"h/zYPs/j1DKOFwDYDKo7SVJVp2bNJ9dxPPNO2ndK+kUz9u88/+BhBq2qf5zkLUn+d5KT3X2oj+/u\n",
|
||||
"Os/j1GHGAgC2j+pOd5+aNZ9cx/HMO2m/c7o9MWP/zlr2WWvez1FVr0ry1iT/K5MJ+x8e/fAAAI5O\n",
|
||||
"dWco5r3k4/Ekn0hyOsnX9RmDVdWFSX4/k0s+XtLdjxxgvNcmeVOSjyW5br+bMu3x8S75CAAsheu6\n",
|
||||
"s2N0l3zs7nszudzjFUlesWv3DUmekOSWnQl7VR2rqmdMJ/tnqap/kcmE/beSPPewE3YAgGVS3Vmn\n",
|
||||
"RdwR9XiSO5JckuS9mSyZuTrJtUnuSvKc7v789LWXJ7k3ySe7+4ozxvg7SX4uyZ8muSnJF/b4VKe7\n",
|
||||
"++f3ORalHQBYOtV9u43ujqhfHaTqsiRvSPL8TO4mdn8mb9q4obsfOuN1l2cyab+vu4+f8fzrk7w+\n",
|
||||
"k6U0s774X+/uv7HPcZi0AwAr4W6q22u0k/ahMGkHAFZNdd8+o1vTDgCw7ax1ZxWUdgCABVHdt4PS\n",
|
||||
"DgAwYqo7y6K0AwAsgeq+uZR2AIANobqzSEo7AMCSqe6bRWkHANhAqjvzUtoBAFZIdR8/pR0AYMOp\n",
|
||||
"7hyF0g4AsCaq+zgp7QAAW0R156CUdgCAAVDdx0NpBwDYUqo756O0AwAMjOo+bEo7AACqO+dQ2gEA\n",
|
||||
"Bkx1Hx6lHQCAs6juJEo7AMBoqO7DoLQDADCT6r69lHYAgBFS3ddHaQcA4EBU9+2itAMAjJzqvlpK\n",
|
||||
"OwAAh6a6bz6lHQBgg6juy6e0AwAwF9V9MyntAAAbSnVfDqUdAICFUd03h9IOALAFVPfFUdoBAFgK\n",
|
||||
"1X3clHYAgC2jus9HaQcAYOlU9/FR2gEAtpjqfnhKOwAAK6W6j4PSDgBAEtX9oJR2AADWRnUfLqUd\n",
|
||||
"AIBzqO6zKe0AAAyC6j4sSjsAAOelup9NaQcAYHBU9/VT2gEAODDVXWkHAGDgVPf1UNoBADiSba3u\n",
|
||||
"SjsAAKOhuq+O0g4AwNy2qbor7QAAjJLqvlxKOwAAC7Xp1V1pBwBg9FT3xVPaAQBYmk2s7ko7AAAb\n",
|
||||
"RXVfDKUdAICV2JTqrrQDALCxVPejU9oBAFi5MVd3pR0AgK2guh+O0g4AwFqNrbor7QAAbB3VfX9K\n",
|
||||
"OwAAgzGG6q60AwCw1VT3vSntAAAM0lCru9IOAABTqvvjlHYAAAZvSNVdaQcAgD1se3VX2gEAGJV1\n",
|
||||
"V3elHQAA9rGN1V1pBwBgtNZR3ZV2AAA4hG2p7ko7AAAbYVXVXWkHAIAj2uTqrrQDALBxllndlXYA\n",
|
||||
"AFiATavuSjsAABtt0dVdaQcAgAXbhOqutAMAsDUWUd2VdgAAWKKxVnelHQCArXTU6j7K0l5Vl1XV\n",
|
||||
"zVV1f1U9WlWnq+rGqnrSOsaBvVTVqXUfA8PinGAvzgv24rzYXGOq7nOV9qq6MskdSS5O8p4kdya5\n",
|
||||
"OsnJJHcluaa7P7fCcZR29lRV7bzgTM4J9uK8YC/Oi+1wmOo+xtL+9kwm2q/s7hd39+u6+7lJbkxy\n",
|
||||
"IskbVzwOAAAc2tCr+5FL+7SO35PkdHdfuWvfBUkeSNJJLu3uh5c9zvT1Sjt7UknYzTnBXpwX7MV5\n",
|
||||
"sX32q+5jK+0np9vbd+/o7i8l+UiSJyZ59orGAQCAuQ2xus8zaT8x3d49Y/890+1VKxoHAAAWojvd\n",
|
||||
"nXcleVaSW5NcmOQdSW5bx/HMM2m/aLp9aMb+nef3u/rLosYBAICFmlHdV+7YOj7psu2sM4IzOS/Y\n",
|
||||
"zTnBXpwX7MV5wbrNU9p3CvhFM/bvPP/gisYBAICNNE9pv3O6PTFj/84a9Flr1Rc9jqvGAACwkea5\n",
|
||||
"5OPxJJ9IcjrJ1/UZA1XVhUl+P5NLNV7S3Y8sexwAANhUR14e0933ZnKZxiuSvGLX7huSPCHJLTsT\n",
|
||||
"7ao6VlXPmE7SjzwOAABsmyOX9uSrlfyOJJckeW8mS12uTnJtkruSPKe7Pz997eVJ7k3yye6+4qjj\n",
|
||||
"AADAtplr0p4kVXVZkjckeX6SJye5P5NrWd7Q3Q+d8brLM5m039fdx486DgAAbJu5J+0AAMByzXPJ\n",
|
||||
"RwAAYAVM2gEAYOAGPWmvqsuq6uaqur+qHq2q01V1Y1U9aR3jMAzzfj+r6mur6mVVdWtVfaKqHq6q\n",
|
||||
"B6vqw1X1/VXlev8jtIw/51X1vVX12PTx9xd5vKzGIs+Lqnru9O+NB6Zjfaaq3ldV376MY2d5Fji/\n",
|
||||
"+I6qur2qPj39WfK7VfVLVfXsZR07i1dV31VVN03nAV+Y/p1/yxHHWtqcc7Br2qvqykyuKHNxkvfk\n",
|
||||
"8SvKnMzkijLXdPfnVjUOw7CI72dV/cMkb8/kzc4fSvKpJE9N8uJM7sD7y9393cv6Gli8Zfw5r6qn\n",
|
||||
"JflfmcSNC5K8rLtvXuRxs1yLPC+q6ieS/GiS30vyX5J8NpMrnv3VJB/o7n+68C+ApVjg/OJfJnlN\n",
|
||||
"JufCe6ZH+b3pAAAGc0lEQVTbq5K8IJObV760u39xGV8Di1VVH0/yDUm+mOQzSZ6R5D9290sPOc5y\n",
|
||||
"55zdPchHktuSPJbkFbue/8np8z+zynE8hvFYxPdz+ofnO/Z4/tIkn5yO8+J1f60eqz0vdn1cJflA\n",
|
||||
"knuS/MR0jO9f99fpsZ7zIskPTF9/c5Jje+w/5zmP4T4W9HPkqUn+NJP485Rd+66djvO76/5aPQ58\n",
|
||||
"Tlyb5MrpP3/r9Pv3C0cYZ6lzzkGW9ulvKvckOd3dV+7ad0GSBzK5S+ql3f3wssdhGFbx/ayqH0vy\n",
|
||||
"xiQ3dff1cx4yK7CM86Kqrk/yU5n85f1tSX48SvuoLPDnyNdkUtf/OMlV3f0nyztqlm2B58XVSX4z\n",
|
||||
"yXu7+0V77P9Cku7uixZ5/CxfVV2b5IM5ZGlfxRxlqGvaT063t+/e0d1fSvKRJE9Mst+asUWNwzCs\n",
|
||||
"4vv5J7u2DN9Cz4uq+vokb07ylu7+jUUdJCu3qPPiuiRPSfIrSXq6hvm1VXW9dcujtKjz4u4kX0ly\n",
|
||||
"dVU9+cwdVfUtmSyp+8DcR8uYLH2OMtRJ+4np9u4Z+++Zbq9a0TgMw1K/n1V1LMnOb9XvO8oYrMXC\n",
|
||||
"zovpOXBLkvuSvG7uI2OdFnVefPN0++UkH0/yn5O8KcmNSe6oql+vqqfMc6Cs1ELOi57cpf21mSyr\n",
|
||||
"/J2qekdVvamqfimTJRK3J/kHCzhexmPpc85jR/3AJdv530mz7oS68/x+78Rd1DgMw7K/n29O8qwk\n",
|
||||
"v9rd7z/iGKzeIs+LH0/yjZm8WejL8x4Ya7Wo8+KS6fY1Sf53kr+eyeT9eJJ/neR5Sd6dxysbw7aw\n",
|
||||
"vy+6+99U1SeT/PskLztj1yeS/Hx3f/bIR8kYLX3OOdTSDitVVT+U5EeS/J8k37fmw2ENpmtUfyzJ\n",
|
||||
"v+ru/7bu42Ewdn5O/r8kL+juO7r74e7+7SQvSvLpJN9qqcz2qap/kskvbD+XyS9xT0jyTUnuTfKL\n",
|
||||
"06vLwMIMddK+89vIrDdw7Dz/4IrGYRiW8v2sqn+c5C2ZVLST3e18GJe5z4vpsphfyOSSXK+f9bIj\n",
|
||||
"HR3rsqi/L3b2f6y7P3Xmju5+JJOlEMnjy2gYtoWcF9M3K745kzei/mh339fdj3b3xzL5Ze4zSV5d\n",
|
||||
"VVcs4JgZh6XPOYc6ab9zuj0xY//OeqBZ64YWPQ7DsPDvZ1W9KslbM7ke98nu/sOjHx5rsojz4oLp\n",
|
||||
"656Z5NEzbqj0WCZLZpLk302fu3HuI2YVFv1zZNYP2p3n/9wBj4v1WtR58Z3T7Yd275j+Mvc/Mplj\n",
|
||||
"feNhD5DRWvqcc6hr2nf+EFxXVdVnXJeyqi5Mck0ml9/66IrGYRgW+v2sqtdm8oayjyW5rt1ka6wW\n",
|
||||
"cV48msm61L2ugftNSf5Kkg9nUuLvWMRBs3SL+vvi1zI5L565e5ypvzzdnl7AMbN8izovvma6vWTG\n",
|
||||
"/oun268c9UAZnaXPOQdZ2rv73kzeeX1Fklfs2n1DJuvGbpn+NpuqOlZVz6iq4/OMw7At6ryY7vsX\n",
|
||||
"mUzYfyvJc03Yx2sR58X0f2v/QHe/fPcjk6uFJJM3lr28u9+9/K+KeS3w58inMjkH/lKSs+7dUFXP\n",
|
||||
"S/I3k3w+rjg1Cgv8OfJfp9uXV9VfOHNHVX17JhO0R+KX/I2zzjnnIG+ulCTT/xh3ZPJb7Hvz+K1g\n",
|
||||
"r82kdj1nesmlVNXlmbzx45PdfcVRx2H4FnFeVNXfyeSNQ3+a5KYkX9jjU53u7p9f1tfBYi3q74sZ\n",
|
||||
"Y5+KmyuN0gJ/jvzF6ThPy6S8fzyTH8wvzOTvke/p7luX/gWxEAv6OVKZvJ/h25J8McmtSf4gyddn\n",
|
||||
"snSmk7yqu29axdfEfKrqhZn8eU4md7t9Xibf9517dfxRd79m+trLs64552Fun7rqR5LLMrlt9P2Z\n",
|
||||
"XCP3dCZ3Kbxo1+suz+T2sPfOM47HOB7znheZvNHwsUx+2D424/HBdX+dHqs9L84z7uun58r3r/tr\n",
|
||||
"9FjfeZHJDZbemsk1/L+c5A+T/HKSv7bur9FjPedFJkuMr8/kzqgPZXKFoQeS/Kck37bur9HjUOfD\n",
|
||||
"mfOCMx+P7f7+r3POOdjSDgAATAxyTTsAAPA4k3YAABg4k3YAABg4k3YAABg4k3YAABg4k3YAABg4\n",
|
||||
"k3YAABg4k3YAABg4k3YAABg4k3YAABg4k3YAABg4k3YAABg4k3YAABg4k3YAABg4k3YAABg4k3YA\n",
|
||||
"ABg4k3YAABi4/w/Y3UZ5IHmVbAAAAABJRU5ErkJggg==\n"
|
||||
],
|
||||
"text/plain": [
|
||||
"<matplotlib.figure.Figure at 0x10d0da080>"
|
||||
]
|
||||
},
|
||||
"metadata": {
|
||||
"image/png": {
|
||||
"height": 255,
|
||||
"width": 374
|
||||
}
|
||||
},
|
||||
"output_type": "display_data"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"%matplotlib inline\n",
|
||||
"%config InlineBackend.figure_formats = set(['retina'])\n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"plt.plot([0,1],[1,0])"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "IPython mydev (Python 3)",
|
||||
"name": "python3_mydev"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.4.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"(100,)"
|
||||
]
|
||||
},
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"evs = np.zeros(100)\n",
|
||||
"evs.shape"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
" "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
" "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 0,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
" "
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.1"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 1
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "raw",
|
||||
"metadata": {
|
||||
"raw_mimetype": "text/html"
|
||||
},
|
||||
"source": [
|
||||
"<b>raw html</b>"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "raw",
|
||||
"metadata": {
|
||||
"raw_mimetype": "text/markdown"
|
||||
},
|
||||
"source": [
|
||||
"* raw markdown\n",
|
||||
"* bullet\n",
|
||||
"* list"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "raw",
|
||||
"metadata": {
|
||||
"raw_mimetype": "text/restructuredtext"
|
||||
},
|
||||
"source": [
|
||||
"``raw rst``\n",
|
||||
"\n",
|
||||
".. sourcecode:: python\n",
|
||||
"\n",
|
||||
" def foo(): pass\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "raw",
|
||||
"metadata": {
|
||||
"raw_mimetype": "text/x-python"
|
||||
},
|
||||
"source": [
|
||||
"def bar():\n",
|
||||
" \"\"\"raw python\"\"\"\n",
|
||||
" pass"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "raw",
|
||||
"metadata": {
|
||||
"raw_mimetype": "text/latex"
|
||||
},
|
||||
"source": [
|
||||
"\\LaTeX\n",
|
||||
"% raw latex"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "raw",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# no raw_mimetype metadata, should be included by default"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "raw",
|
||||
"metadata": {
|
||||
"raw_mimetype": "doesnotexist"
|
||||
},
|
||||
"source": [
|
||||
"garbage format defined, should never be included"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
504
venv/Lib/site-packages/nbconvert/exporters/tests/files/svg.ipynb
Normal file
504
venv/Lib/site-packages/nbconvert/exporters/tests/files/svg.ipynb
Normal file
|
@ -0,0 +1,504 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"[<matplotlib.lines.Line2D at 0x7f4c63ec5518>]"
|
||||
]
|
||||
},
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"image/svg+xml": [
|
||||
"<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>\n",
|
||||
"<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"\n",
|
||||
" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\n",
|
||||
"<!-- Created with matplotlib (http://matplotlib.org/) -->\n",
|
||||
"<svg height=\"252.018125pt\" version=\"1.1\" viewBox=\"0 0 375.603125 252.018125\" width=\"375.603125pt\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\">\n",
|
||||
" <defs>\n",
|
||||
" <style type=\"text/css\">\n",
|
||||
"*{stroke-linecap:butt;stroke-linejoin:round;}\n",
|
||||
" </style>\n",
|
||||
" </defs>\n",
|
||||
" <g id=\"figure_1\">\n",
|
||||
" <g id=\"patch_1\">\n",
|
||||
" <path d=\"M 0 252.018125 \n",
|
||||
"L 375.603125 252.018125 \n",
|
||||
"L 375.603125 0 \n",
|
||||
"L 0 0 \n",
|
||||
"z\n",
|
||||
"\" style=\"fill:none;\"/>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"axes_1\">\n",
|
||||
" <g id=\"patch_2\">\n",
|
||||
" <path d=\"M 30.103125 228.14 \n",
|
||||
"L 364.903125 228.14 \n",
|
||||
"L 364.903125 10.7 \n",
|
||||
"L 30.103125 10.7 \n",
|
||||
"z\n",
|
||||
"\" style=\"fill:#ffffff;\"/>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"matplotlib.axis_1\">\n",
|
||||
" <g id=\"xtick_1\">\n",
|
||||
" <g id=\"line2d_1\">\n",
|
||||
" <defs>\n",
|
||||
" <path d=\"M 0 0 \n",
|
||||
"L 0 3.5 \n",
|
||||
"\" id=\"mbaa5d3ac27\" style=\"stroke:#000000;stroke-width:0.8;\"/>\n",
|
||||
" </defs>\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"45.321307\" xlink:href=\"#mbaa5d3ac27\" y=\"228.14\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_1\">\n",
|
||||
" <!-- 0 -->\n",
|
||||
" <defs>\n",
|
||||
" <path d=\"M 31.78125 66.40625 \n",
|
||||
"Q 24.171875 66.40625 20.328125 58.90625 \n",
|
||||
"Q 16.5 51.421875 16.5 36.375 \n",
|
||||
"Q 16.5 21.390625 20.328125 13.890625 \n",
|
||||
"Q 24.171875 6.390625 31.78125 6.390625 \n",
|
||||
"Q 39.453125 6.390625 43.28125 13.890625 \n",
|
||||
"Q 47.125 21.390625 47.125 36.375 \n",
|
||||
"Q 47.125 51.421875 43.28125 58.90625 \n",
|
||||
"Q 39.453125 66.40625 31.78125 66.40625 \n",
|
||||
"z\n",
|
||||
"M 31.78125 74.21875 \n",
|
||||
"Q 44.046875 74.21875 50.515625 64.515625 \n",
|
||||
"Q 56.984375 54.828125 56.984375 36.375 \n",
|
||||
"Q 56.984375 17.96875 50.515625 8.265625 \n",
|
||||
"Q 44.046875 -1.421875 31.78125 -1.421875 \n",
|
||||
"Q 19.53125 -1.421875 13.0625 8.265625 \n",
|
||||
"Q 6.59375 17.96875 6.59375 36.375 \n",
|
||||
"Q 6.59375 54.828125 13.0625 64.515625 \n",
|
||||
"Q 19.53125 74.21875 31.78125 74.21875 \n",
|
||||
"z\n",
|
||||
"\" id=\"DejaVuSans-30\"/>\n",
|
||||
" </defs>\n",
|
||||
" <g transform=\"translate(42.140057 242.738437)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-30\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"xtick_2\">\n",
|
||||
" <g id=\"line2d_2\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"106.194034\" xlink:href=\"#mbaa5d3ac27\" y=\"228.14\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_2\">\n",
|
||||
" <!-- 1 -->\n",
|
||||
" <defs>\n",
|
||||
" <path d=\"M 12.40625 8.296875 \n",
|
||||
"L 28.515625 8.296875 \n",
|
||||
"L 28.515625 63.921875 \n",
|
||||
"L 10.984375 60.40625 \n",
|
||||
"L 10.984375 69.390625 \n",
|
||||
"L 28.421875 72.90625 \n",
|
||||
"L 38.28125 72.90625 \n",
|
||||
"L 38.28125 8.296875 \n",
|
||||
"L 54.390625 8.296875 \n",
|
||||
"L 54.390625 0 \n",
|
||||
"L 12.40625 0 \n",
|
||||
"z\n",
|
||||
"\" id=\"DejaVuSans-31\"/>\n",
|
||||
" </defs>\n",
|
||||
" <g transform=\"translate(103.012784 242.738437)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-31\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"xtick_3\">\n",
|
||||
" <g id=\"line2d_3\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"167.066761\" xlink:href=\"#mbaa5d3ac27\" y=\"228.14\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_3\">\n",
|
||||
" <!-- 2 -->\n",
|
||||
" <defs>\n",
|
||||
" <path d=\"M 19.1875 8.296875 \n",
|
||||
"L 53.609375 8.296875 \n",
|
||||
"L 53.609375 0 \n",
|
||||
"L 7.328125 0 \n",
|
||||
"L 7.328125 8.296875 \n",
|
||||
"Q 12.9375 14.109375 22.625 23.890625 \n",
|
||||
"Q 32.328125 33.6875 34.8125 36.53125 \n",
|
||||
"Q 39.546875 41.84375 41.421875 45.53125 \n",
|
||||
"Q 43.3125 49.21875 43.3125 52.78125 \n",
|
||||
"Q 43.3125 58.59375 39.234375 62.25 \n",
|
||||
"Q 35.15625 65.921875 28.609375 65.921875 \n",
|
||||
"Q 23.96875 65.921875 18.8125 64.3125 \n",
|
||||
"Q 13.671875 62.703125 7.8125 59.421875 \n",
|
||||
"L 7.8125 69.390625 \n",
|
||||
"Q 13.765625 71.78125 18.9375 73 \n",
|
||||
"Q 24.125 74.21875 28.421875 74.21875 \n",
|
||||
"Q 39.75 74.21875 46.484375 68.546875 \n",
|
||||
"Q 53.21875 62.890625 53.21875 53.421875 \n",
|
||||
"Q 53.21875 48.921875 51.53125 44.890625 \n",
|
||||
"Q 49.859375 40.875 45.40625 35.40625 \n",
|
||||
"Q 44.1875 33.984375 37.640625 27.21875 \n",
|
||||
"Q 31.109375 20.453125 19.1875 8.296875 \n",
|
||||
"z\n",
|
||||
"\" id=\"DejaVuSans-32\"/>\n",
|
||||
" </defs>\n",
|
||||
" <g transform=\"translate(163.885511 242.738437)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-32\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"xtick_4\">\n",
|
||||
" <g id=\"line2d_4\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"227.939489\" xlink:href=\"#mbaa5d3ac27\" y=\"228.14\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_4\">\n",
|
||||
" <!-- 3 -->\n",
|
||||
" <defs>\n",
|
||||
" <path d=\"M 40.578125 39.3125 \n",
|
||||
"Q 47.65625 37.796875 51.625 33 \n",
|
||||
"Q 55.609375 28.21875 55.609375 21.1875 \n",
|
||||
"Q 55.609375 10.40625 48.1875 4.484375 \n",
|
||||
"Q 40.765625 -1.421875 27.09375 -1.421875 \n",
|
||||
"Q 22.515625 -1.421875 17.65625 -0.515625 \n",
|
||||
"Q 12.796875 0.390625 7.625 2.203125 \n",
|
||||
"L 7.625 11.71875 \n",
|
||||
"Q 11.71875 9.328125 16.59375 8.109375 \n",
|
||||
"Q 21.484375 6.890625 26.8125 6.890625 \n",
|
||||
"Q 36.078125 6.890625 40.9375 10.546875 \n",
|
||||
"Q 45.796875 14.203125 45.796875 21.1875 \n",
|
||||
"Q 45.796875 27.640625 41.28125 31.265625 \n",
|
||||
"Q 36.765625 34.90625 28.71875 34.90625 \n",
|
||||
"L 20.21875 34.90625 \n",
|
||||
"L 20.21875 43.015625 \n",
|
||||
"L 29.109375 43.015625 \n",
|
||||
"Q 36.375 43.015625 40.234375 45.921875 \n",
|
||||
"Q 44.09375 48.828125 44.09375 54.296875 \n",
|
||||
"Q 44.09375 59.90625 40.109375 62.90625 \n",
|
||||
"Q 36.140625 65.921875 28.71875 65.921875 \n",
|
||||
"Q 24.65625 65.921875 20.015625 65.03125 \n",
|
||||
"Q 15.375 64.15625 9.8125 62.3125 \n",
|
||||
"L 9.8125 71.09375 \n",
|
||||
"Q 15.4375 72.65625 20.34375 73.4375 \n",
|
||||
"Q 25.25 74.21875 29.59375 74.21875 \n",
|
||||
"Q 40.828125 74.21875 47.359375 69.109375 \n",
|
||||
"Q 53.90625 64.015625 53.90625 55.328125 \n",
|
||||
"Q 53.90625 49.265625 50.4375 45.09375 \n",
|
||||
"Q 46.96875 40.921875 40.578125 39.3125 \n",
|
||||
"z\n",
|
||||
"\" id=\"DejaVuSans-33\"/>\n",
|
||||
" </defs>\n",
|
||||
" <g transform=\"translate(224.758239 242.738437)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-33\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"xtick_5\">\n",
|
||||
" <g id=\"line2d_5\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"288.812216\" xlink:href=\"#mbaa5d3ac27\" y=\"228.14\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_5\">\n",
|
||||
" <!-- 4 -->\n",
|
||||
" <defs>\n",
|
||||
" <path d=\"M 37.796875 64.3125 \n",
|
||||
"L 12.890625 25.390625 \n",
|
||||
"L 37.796875 25.390625 \n",
|
||||
"z\n",
|
||||
"M 35.203125 72.90625 \n",
|
||||
"L 47.609375 72.90625 \n",
|
||||
"L 47.609375 25.390625 \n",
|
||||
"L 58.015625 25.390625 \n",
|
||||
"L 58.015625 17.1875 \n",
|
||||
"L 47.609375 17.1875 \n",
|
||||
"L 47.609375 0 \n",
|
||||
"L 37.796875 0 \n",
|
||||
"L 37.796875 17.1875 \n",
|
||||
"L 4.890625 17.1875 \n",
|
||||
"L 4.890625 26.703125 \n",
|
||||
"z\n",
|
||||
"\" id=\"DejaVuSans-34\"/>\n",
|
||||
" </defs>\n",
|
||||
" <g transform=\"translate(285.630966 242.738437)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-34\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"xtick_6\">\n",
|
||||
" <g id=\"line2d_6\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"349.684943\" xlink:href=\"#mbaa5d3ac27\" y=\"228.14\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_6\">\n",
|
||||
" <!-- 5 -->\n",
|
||||
" <defs>\n",
|
||||
" <path d=\"M 10.796875 72.90625 \n",
|
||||
"L 49.515625 72.90625 \n",
|
||||
"L 49.515625 64.59375 \n",
|
||||
"L 19.828125 64.59375 \n",
|
||||
"L 19.828125 46.734375 \n",
|
||||
"Q 21.96875 47.46875 24.109375 47.828125 \n",
|
||||
"Q 26.265625 48.1875 28.421875 48.1875 \n",
|
||||
"Q 40.625 48.1875 47.75 41.5 \n",
|
||||
"Q 54.890625 34.8125 54.890625 23.390625 \n",
|
||||
"Q 54.890625 11.625 47.5625 5.09375 \n",
|
||||
"Q 40.234375 -1.421875 26.90625 -1.421875 \n",
|
||||
"Q 22.3125 -1.421875 17.546875 -0.640625 \n",
|
||||
"Q 12.796875 0.140625 7.71875 1.703125 \n",
|
||||
"L 7.71875 11.625 \n",
|
||||
"Q 12.109375 9.234375 16.796875 8.0625 \n",
|
||||
"Q 21.484375 6.890625 26.703125 6.890625 \n",
|
||||
"Q 35.15625 6.890625 40.078125 11.328125 \n",
|
||||
"Q 45.015625 15.765625 45.015625 23.390625 \n",
|
||||
"Q 45.015625 31 40.078125 35.4375 \n",
|
||||
"Q 35.15625 39.890625 26.703125 39.890625 \n",
|
||||
"Q 22.75 39.890625 18.8125 39.015625 \n",
|
||||
"Q 14.890625 38.140625 10.796875 36.28125 \n",
|
||||
"z\n",
|
||||
"\" id=\"DejaVuSans-35\"/>\n",
|
||||
" </defs>\n",
|
||||
" <g transform=\"translate(346.503693 242.738437)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-35\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"matplotlib.axis_2\">\n",
|
||||
" <g id=\"ytick_1\">\n",
|
||||
" <g id=\"line2d_7\">\n",
|
||||
" <defs>\n",
|
||||
" <path d=\"M 0 0 \n",
|
||||
"L -3.5 0 \n",
|
||||
"\" id=\"m7fb83757f4\" style=\"stroke:#000000;stroke-width:0.8;\"/>\n",
|
||||
" </defs>\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"30.103125\" xlink:href=\"#m7fb83757f4\" y=\"218.256364\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_7\">\n",
|
||||
" <!-- 0.0 -->\n",
|
||||
" <defs>\n",
|
||||
" <path d=\"M 10.6875 12.40625 \n",
|
||||
"L 21 12.40625 \n",
|
||||
"L 21 0 \n",
|
||||
"L 10.6875 0 \n",
|
||||
"z\n",
|
||||
"\" id=\"DejaVuSans-2e\"/>\n",
|
||||
" </defs>\n",
|
||||
" <g transform=\"translate(7.2 222.055582)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-30\"/>\n",
|
||||
" <use x=\"63.623047\" xlink:href=\"#DejaVuSans-2e\"/>\n",
|
||||
" <use x=\"95.410156\" xlink:href=\"#DejaVuSans-30\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"ytick_2\">\n",
|
||||
" <g id=\"line2d_8\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"30.103125\" xlink:href=\"#m7fb83757f4\" y=\"193.547273\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_8\">\n",
|
||||
" <!-- 0.5 -->\n",
|
||||
" <g transform=\"translate(7.2 197.346491)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-30\"/>\n",
|
||||
" <use x=\"63.623047\" xlink:href=\"#DejaVuSans-2e\"/>\n",
|
||||
" <use x=\"95.410156\" xlink:href=\"#DejaVuSans-35\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"ytick_3\">\n",
|
||||
" <g id=\"line2d_9\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"30.103125\" xlink:href=\"#m7fb83757f4\" y=\"168.838182\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_9\">\n",
|
||||
" <!-- 1.0 -->\n",
|
||||
" <g transform=\"translate(7.2 172.637401)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-31\"/>\n",
|
||||
" <use x=\"63.623047\" xlink:href=\"#DejaVuSans-2e\"/>\n",
|
||||
" <use x=\"95.410156\" xlink:href=\"#DejaVuSans-30\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"ytick_4\">\n",
|
||||
" <g id=\"line2d_10\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"30.103125\" xlink:href=\"#m7fb83757f4\" y=\"144.129091\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_10\">\n",
|
||||
" <!-- 1.5 -->\n",
|
||||
" <g transform=\"translate(7.2 147.92831)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-31\"/>\n",
|
||||
" <use x=\"63.623047\" xlink:href=\"#DejaVuSans-2e\"/>\n",
|
||||
" <use x=\"95.410156\" xlink:href=\"#DejaVuSans-35\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"ytick_5\">\n",
|
||||
" <g id=\"line2d_11\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"30.103125\" xlink:href=\"#m7fb83757f4\" y=\"119.42\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_11\">\n",
|
||||
" <!-- 2.0 -->\n",
|
||||
" <g transform=\"translate(7.2 123.219219)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-32\"/>\n",
|
||||
" <use x=\"63.623047\" xlink:href=\"#DejaVuSans-2e\"/>\n",
|
||||
" <use x=\"95.410156\" xlink:href=\"#DejaVuSans-30\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"ytick_6\">\n",
|
||||
" <g id=\"line2d_12\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"30.103125\" xlink:href=\"#m7fb83757f4\" y=\"94.710909\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_12\">\n",
|
||||
" <!-- 2.5 -->\n",
|
||||
" <g transform=\"translate(7.2 98.510128)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-32\"/>\n",
|
||||
" <use x=\"63.623047\" xlink:href=\"#DejaVuSans-2e\"/>\n",
|
||||
" <use x=\"95.410156\" xlink:href=\"#DejaVuSans-35\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"ytick_7\">\n",
|
||||
" <g id=\"line2d_13\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"30.103125\" xlink:href=\"#m7fb83757f4\" y=\"70.001818\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_13\">\n",
|
||||
" <!-- 3.0 -->\n",
|
||||
" <g transform=\"translate(7.2 73.801037)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-33\"/>\n",
|
||||
" <use x=\"63.623047\" xlink:href=\"#DejaVuSans-2e\"/>\n",
|
||||
" <use x=\"95.410156\" xlink:href=\"#DejaVuSans-30\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"ytick_8\">\n",
|
||||
" <g id=\"line2d_14\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"30.103125\" xlink:href=\"#m7fb83757f4\" y=\"45.292727\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_14\">\n",
|
||||
" <!-- 3.5 -->\n",
|
||||
" <g transform=\"translate(7.2 49.091946)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-33\"/>\n",
|
||||
" <use x=\"63.623047\" xlink:href=\"#DejaVuSans-2e\"/>\n",
|
||||
" <use x=\"95.410156\" xlink:href=\"#DejaVuSans-35\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"ytick_9\">\n",
|
||||
" <g id=\"line2d_15\">\n",
|
||||
" <g>\n",
|
||||
" <use style=\"stroke:#000000;stroke-width:0.8;\" x=\"30.103125\" xlink:href=\"#m7fb83757f4\" y=\"20.583636\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"text_15\">\n",
|
||||
" <!-- 4.0 -->\n",
|
||||
" <g transform=\"translate(7.2 24.382855)scale(0.1 -0.1)\">\n",
|
||||
" <use xlink:href=\"#DejaVuSans-34\"/>\n",
|
||||
" <use x=\"63.623047\" xlink:href=\"#DejaVuSans-2e\"/>\n",
|
||||
" <use x=\"95.410156\" xlink:href=\"#DejaVuSans-30\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"line2d_16\">\n",
|
||||
" <path clip-path=\"url(#pf878579141)\" d=\"M 45.321307 218.256364 \n",
|
||||
"L 106.194034 70.001818 \n",
|
||||
"L 167.066761 20.583636 \n",
|
||||
"L 227.939489 20.583636 \n",
|
||||
"L 288.812216 70.001818 \n",
|
||||
"L 349.684943 218.256364 \n",
|
||||
"\" style=\"fill:none;stroke:#1f77b4;stroke-linecap:square;stroke-width:1.5;\"/>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"patch_3\">\n",
|
||||
" <path d=\"M 30.103125 228.14 \n",
|
||||
"L 30.103125 10.7 \n",
|
||||
"\" style=\"fill:none;stroke:#000000;stroke-linecap:square;stroke-linejoin:miter;stroke-width:0.8;\"/>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"patch_4\">\n",
|
||||
" <path d=\"M 364.903125 228.14 \n",
|
||||
"L 364.903125 10.7 \n",
|
||||
"\" style=\"fill:none;stroke:#000000;stroke-linecap:square;stroke-linejoin:miter;stroke-width:0.8;\"/>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"patch_5\">\n",
|
||||
" <path d=\"M 30.103125 228.14 \n",
|
||||
"L 364.903125 228.14 \n",
|
||||
"\" style=\"fill:none;stroke:#000000;stroke-linecap:square;stroke-linejoin:miter;stroke-width:0.8;\"/>\n",
|
||||
" </g>\n",
|
||||
" <g id=\"patch_6\">\n",
|
||||
" <path d=\"M 30.103125 10.7 \n",
|
||||
"L 364.903125 10.7 \n",
|
||||
"\" style=\"fill:none;stroke:#000000;stroke-linecap:square;stroke-linejoin:miter;stroke-width:0.8;\"/>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" </g>\n",
|
||||
" <defs>\n",
|
||||
" <clipPath id=\"pf878579141\">\n",
|
||||
" <rect height=\"217.44\" width=\"334.8\" x=\"30.103125\" y=\"10.7\"/>\n",
|
||||
" </clipPath>\n",
|
||||
" </defs>\n",
|
||||
"</svg>\n"
|
||||
],
|
||||
"text/plain": [
|
||||
"<Figure size 432x288 with 1 Axes>"
|
||||
]
|
||||
},
|
||||
"metadata": {
|
||||
"needs_background": "light"
|
||||
},
|
||||
"output_type": "display_data"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"%matplotlib inline\n",
|
||||
"%config InlineBackend.figure_formats = ['svg'] \n",
|
||||
"import matplotlib.pyplot as plt\n",
|
||||
"plt.plot((0,1,2,3,4,5),(0,3,4,4,3,0))"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.3"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
"""Tests for ASCIIDocExporter`"""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2016, the IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import re
|
||||
|
||||
from traitlets.config import Config
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ..asciidoc import ASCIIDocExporter
|
||||
from ...tests.utils import onlyif_cmds_exist
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Class
|
||||
#-----------------------------------------------------------------------------
|
||||
in_regex = r"In\[(.*)\]:"
|
||||
out_regex = r"Out\[(.*)\]:"
|
||||
|
||||
class TestASCIIDocExporter(ExportersTestsBase):
|
||||
"""Tests for ASCIIDocExporter"""
|
||||
|
||||
exporter_class = ASCIIDocExporter
|
||||
|
||||
def test_constructor(self):
|
||||
"""
|
||||
Can a ASCIIDocExporter be constructed?
|
||||
"""
|
||||
ASCIIDocExporter()
|
||||
|
||||
|
||||
@onlyif_cmds_exist('pandoc')
|
||||
def test_export(self):
|
||||
"""
|
||||
Can a ASCIIDocExporter export something?
|
||||
"""
|
||||
(output, resources) = ASCIIDocExporter().from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
assert re.findall(in_regex, output)
|
||||
assert re.findall(out_regex, output)
|
||||
|
||||
@onlyif_cmds_exist('pandoc')
|
||||
def test_export_no_prompt(self):
|
||||
"""
|
||||
Can a ASCIIDocExporter export something without prompts?
|
||||
"""
|
||||
no_prompt = {
|
||||
"TemplateExporter":{
|
||||
"exclude_input_prompt": True,
|
||||
"exclude_output_prompt": True,
|
||||
}
|
||||
}
|
||||
c_no_prompt = Config(no_prompt)
|
||||
exporter = ASCIIDocExporter(config=c_no_prompt)
|
||||
(output, resources) = exporter.from_filename(
|
||||
self._get_notebook(nb_name="prompt_numbers.ipynb"))
|
||||
|
||||
assert not re.findall(in_regex, output)
|
||||
assert not re.findall(out_regex, output)
|
118
venv/Lib/site-packages/nbconvert/exporters/tests/test_export.py
Normal file
118
venv/Lib/site-packages/nbconvert/exporters/tests/test_export.py
Normal file
|
@ -0,0 +1,118 @@
|
|||
"""
|
||||
Module with tests for export.py
|
||||
"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import nbformat
|
||||
import nbconvert.tests
|
||||
import pytest
|
||||
|
||||
from traitlets.config import Config
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ..base import get_exporter, export, ExporterNameError, ExporterDisabledError, get_export_names
|
||||
from ..exporter import Exporter
|
||||
from ..python import PythonExporter
|
||||
|
||||
|
||||
class TestExport(ExportersTestsBase):
|
||||
"""Contains test functions for export.py"""
|
||||
|
||||
|
||||
def test_export_wrong_name(self):
|
||||
"""
|
||||
Is the right error thrown when a bad template name is used?
|
||||
"""
|
||||
try:
|
||||
exporter = get_exporter('not_a_name')
|
||||
export(exporter, self._get_notebook())
|
||||
except ExporterNameError as e:
|
||||
pass
|
||||
|
||||
|
||||
def test_export_disabled(self):
|
||||
"""
|
||||
Trying to use a disabled exporter should raise ExporterDisbledError
|
||||
"""
|
||||
config = Config({'NotebookExporter': {'enabled': False}})
|
||||
with pytest.raises(ExporterDisabledError):
|
||||
get_exporter('notebook', config=config)
|
||||
|
||||
|
||||
def test_export_filename(self):
|
||||
"""
|
||||
Can a notebook be exported by filename?
|
||||
"""
|
||||
exporter = get_exporter('python')
|
||||
(output, resources) = export(exporter, self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
def test_export_nbnode(self):
|
||||
"""
|
||||
Can a notebook be exported by a notebook node handle?
|
||||
"""
|
||||
with open(self._get_notebook(), 'r') as f:
|
||||
notebook = nbformat.read(f, 4)
|
||||
exporter = get_exporter('python')
|
||||
(output, resources) = export(exporter, notebook)
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
def test_export_filestream(self):
|
||||
"""
|
||||
Can a notebook be exported by a filesteam?
|
||||
"""
|
||||
with open(self._get_notebook(), 'r') as f:
|
||||
exporter = get_exporter('python')
|
||||
(output, resources) = export(exporter, f)
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
def test_export_using_exporter(self):
|
||||
"""
|
||||
Can a notebook be exported using an instanciated exporter?
|
||||
"""
|
||||
(output, resources) = export(PythonExporter(), self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
def test_export_using_exporter_class(self):
|
||||
"""
|
||||
Can a notebook be exported using an exporter class type?
|
||||
"""
|
||||
(output, resources) = export(PythonExporter, self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
def test_export_resources(self):
|
||||
"""
|
||||
Can a notebook be exported along with a custom resources dict?
|
||||
"""
|
||||
(output, resources) = export(PythonExporter, self._get_notebook(), resources={})
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
def test_no_exporter(self):
|
||||
"""
|
||||
Is the right error thrown if no exporter is provided?
|
||||
"""
|
||||
try:
|
||||
(output, resources) = export(None, self._get_notebook())
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
def test_get_exporter_entrypoint():
|
||||
p = os.path.join(os.path.dirname(nbconvert.tests.__file__), 'exporter_entrypoint')
|
||||
sys.path.insert(0, p)
|
||||
assert 'entrypoint_test' in get_export_names()
|
||||
try:
|
||||
cls = get_exporter('entrypoint_test')
|
||||
assert issubclass(cls, Exporter), cls
|
||||
finally:
|
||||
del sys.path[0]
|
|
@ -0,0 +1,75 @@
|
|||
"""
|
||||
Module with tests for exporter.py
|
||||
"""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2013, the IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from traitlets.config import Config
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ...preprocessors.base import Preprocessor
|
||||
from ..exporter import Exporter
|
||||
from ..base import get_export_names, ExporterDisabledError
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Class
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class PizzaPreprocessor(Preprocessor):
|
||||
"""Simple preprocessor that adds a 'pizza' entry to the NotebookNode. Used
|
||||
to test Exporter.
|
||||
"""
|
||||
|
||||
def preprocess(self, nb, resources):
|
||||
nb['pizza'] = 'cheese'
|
||||
return nb, resources
|
||||
|
||||
|
||||
class TestExporter(ExportersTestsBase):
|
||||
"""Contains test functions for exporter.py"""
|
||||
|
||||
|
||||
def test_constructor(self):
|
||||
"""Can an Exporter be constructed?"""
|
||||
Exporter()
|
||||
|
||||
|
||||
def test_export(self):
|
||||
"""Can an Exporter export something?"""
|
||||
exporter = Exporter()
|
||||
(notebook, resources) = exporter.from_filename(self._get_notebook())
|
||||
assert isinstance(notebook, dict)
|
||||
|
||||
|
||||
def test_preprocessor(self):
|
||||
"""Do preprocessors work?"""
|
||||
config = Config({'Exporter': {'preprocessors': [PizzaPreprocessor()]}})
|
||||
exporter = Exporter(config=config)
|
||||
(notebook, resources) = exporter.from_filename(self._get_notebook())
|
||||
self.assertEqual(notebook['pizza'], 'cheese')
|
||||
|
||||
def test_get_export_names_disable(self):
|
||||
"""Can we disable a specific importer?"""
|
||||
config = Config({'Exporter': {'enabled': False}})
|
||||
export_names = get_export_names()
|
||||
self.assertFalse('Exporter' in export_names)
|
||||
|
||||
def test_get_export_names_disable(self):
|
||||
"""Can we disable all exporters then enable a single one"""
|
||||
config = Config({
|
||||
'Exporter': {'enabled': False},
|
||||
'NotebookExporter': {'enabled': True}
|
||||
})
|
||||
export_names = get_export_names(config=config)
|
||||
self.assertEqual(export_names, ['notebook'])
|
148
venv/Lib/site-packages/nbconvert/exporters/tests/test_html.py
Normal file
148
venv/Lib/site-packages/nbconvert/exporters/tests/test_html.py
Normal file
|
@ -0,0 +1,148 @@
|
|||
"""Tests for HTMLExporter"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import re
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ..html import HTMLExporter
|
||||
|
||||
from traitlets.config import Config
|
||||
from nbformat import v4
|
||||
|
||||
|
||||
class TestHTMLExporter(ExportersTestsBase):
|
||||
"""Tests for HTMLExporter"""
|
||||
|
||||
exporter_class = HTMLExporter
|
||||
should_include_raw = ['html']
|
||||
|
||||
def test_constructor(self):
|
||||
"""
|
||||
Can a HTMLExporter be constructed?
|
||||
"""
|
||||
HTMLExporter()
|
||||
|
||||
|
||||
def test_export(self):
|
||||
"""
|
||||
Can a HTMLExporter export something?
|
||||
"""
|
||||
(output, resources) = HTMLExporter().from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
def test_export_classic(self):
|
||||
"""
|
||||
Can a HTMLExporter export using the 'classic' template?
|
||||
"""
|
||||
(output, resources) = HTMLExporter(template_name='classic').from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
def test_export_notebook(self):
|
||||
"""
|
||||
Can a HTMLExporter export using the 'lab' template?
|
||||
"""
|
||||
(output, resources) = HTMLExporter(template_name='lab').from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
def test_prompt_number(self):
|
||||
"""
|
||||
Does HTMLExporter properly format input and output prompts?
|
||||
"""
|
||||
(output, resources) = HTMLExporter(template_name='lab').from_filename(
|
||||
self._get_notebook(nb_name="prompt_numbers.ipynb"))
|
||||
in_regex = r"In \[(.*)\]:"
|
||||
out_regex = r"Out\[(.*)\]:"
|
||||
|
||||
ins = ["2", "10", " ", " ", "0"]
|
||||
outs = ["10"]
|
||||
|
||||
assert re.findall(in_regex, output) == ins
|
||||
assert re.findall(out_regex, output) == outs
|
||||
|
||||
def test_prompt_number(self):
|
||||
"""
|
||||
Does HTMLExporter properly format input and output prompts?
|
||||
"""
|
||||
no_prompt_conf = Config(
|
||||
{"TemplateExporter":{
|
||||
"exclude_input_prompt": True,
|
||||
"exclude_output_prompt": True,
|
||||
}
|
||||
}
|
||||
)
|
||||
exporter = HTMLExporter(config=no_prompt_conf, template_name='lab')
|
||||
(output, resources) = exporter.from_filename(
|
||||
self._get_notebook(nb_name="prompt_numbers.ipynb"))
|
||||
in_regex = r"In \[(.*)\]:"
|
||||
out_regex = r"Out\[(.*)\]:"
|
||||
|
||||
assert not re.findall(in_regex, output)
|
||||
assert not re.findall(out_regex, output)
|
||||
|
||||
def test_png_metadata(self):
|
||||
"""
|
||||
Does HTMLExporter with the 'classic' template treat pngs with width/height metadata correctly?
|
||||
"""
|
||||
(output, resources) = HTMLExporter(template_name='classic').from_filename(
|
||||
self._get_notebook(nb_name="pngmetadata.ipynb"))
|
||||
check_for_png = re.compile(r'<img src="[^"]*?"([^>]*?)>')
|
||||
result = check_for_png.search(output)
|
||||
attr_string = result.group(1)
|
||||
assert 'width' in attr_string
|
||||
assert 'height' in attr_string
|
||||
|
||||
def test_javascript_output(self):
|
||||
nb = v4.new_notebook(
|
||||
cells=[
|
||||
v4.new_code_cell(
|
||||
outputs=[v4.new_output(
|
||||
output_type='display_data',
|
||||
data={
|
||||
'application/javascript': "javascript_output();"
|
||||
}
|
||||
)]
|
||||
)
|
||||
]
|
||||
)
|
||||
(output, resources) = HTMLExporter(template_name='classic').from_notebook_node(nb)
|
||||
self.assertIn('javascript_output', output)
|
||||
|
||||
def test_attachments(self):
|
||||
(output, resources) = HTMLExporter(template_name='classic').from_file(
|
||||
self._get_notebook(nb_name='attachment.ipynb')
|
||||
)
|
||||
check_for_png = re.compile(r'<img src="[^"]*?"([^>]*?)>')
|
||||
result = check_for_png.search(output)
|
||||
self.assertTrue(result.group(0).strip().startswith('<img src="data:image/png;base64,iVBOR'))
|
||||
self.assertTrue(result.group(1).strip().startswith('alt="image.png"'))
|
||||
|
||||
check_for_data = re.compile(r'<img src="(?P<url>[^"]*?)"')
|
||||
results = check_for_data.findall(output)
|
||||
assert results[0] != results[1], 'attachments only need to be unique within a cell'
|
||||
assert 'image/svg' in results[1], 'second image should use svg'
|
||||
|
||||
|
||||
def test_custom_filter_highlight_code(self):
|
||||
# Overwriting filters takes place at: Exporter.from_notebook_node
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
|
||||
def custom_highlight_code(source, language="python", metadata=None):
|
||||
return source + " ADDED_TEXT"
|
||||
|
||||
filters = {
|
||||
"highlight_code": custom_highlight_code
|
||||
}
|
||||
(output, resources) = HTMLExporter(template_name='classic', filters=filters).from_notebook_node(nb)
|
||||
self.assertTrue("ADDED_TEXT" in output)
|
||||
|
||||
def test_basic_name(self):
|
||||
"""
|
||||
Can a HTMLExporter export using the 'basic' template?
|
||||
"""
|
||||
(output, resources) = HTMLExporter(template_name='basic').from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
191
venv/Lib/site-packages/nbconvert/exporters/tests/test_latex.py
Normal file
191
venv/Lib/site-packages/nbconvert/exporters/tests/test_latex.py
Normal file
|
@ -0,0 +1,191 @@
|
|||
"""Tests for Latex exporter"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import os.path
|
||||
import textwrap
|
||||
import re
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ..latex import LatexExporter
|
||||
from ...tests.utils import onlyif_cmds_exist
|
||||
|
||||
from traitlets.config import Config
|
||||
from nbformat import write
|
||||
from nbformat import v4
|
||||
from testpath.tempdir import TemporaryDirectory
|
||||
|
||||
from jinja2 import DictLoader
|
||||
|
||||
|
||||
current_dir = os.path.dirname(__file__)
|
||||
|
||||
|
||||
class TestLatexExporter(ExportersTestsBase):
|
||||
"""Contains test functions for latex.py"""
|
||||
|
||||
exporter_class = LatexExporter
|
||||
should_include_raw = ['latex']
|
||||
|
||||
def test_constructor(self):
|
||||
"""
|
||||
Can a LatexExporter be constructed?
|
||||
"""
|
||||
LatexExporter()
|
||||
|
||||
|
||||
@onlyif_cmds_exist('pandoc')
|
||||
def test_export(self):
|
||||
"""
|
||||
Can a LatexExporter export something?
|
||||
"""
|
||||
(output, resources) = LatexExporter().from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
@onlyif_cmds_exist('pandoc')
|
||||
def test_export_book(self):
|
||||
"""
|
||||
Can a LatexExporter export using 'report' template?
|
||||
"""
|
||||
(output, resources) = LatexExporter(template_file='report').from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
@onlyif_cmds_exist('pandoc')
|
||||
def test_very_long_cells(self):
|
||||
"""
|
||||
Torture test that long cells do not cause issues
|
||||
"""
|
||||
lorem_ipsum_text = textwrap.dedent("""\
|
||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec
|
||||
dignissim, ipsum non facilisis tempus, dui felis tincidunt metus,
|
||||
nec pulvinar neque odio eget risus. Nulla nisi lectus, cursus
|
||||
suscipit interdum at, ultrices sit amet orci. Mauris facilisis
|
||||
imperdiet elit, vitae scelerisque ipsum dignissim non. Integer
|
||||
consequat malesuada neque sit amet pulvinar. Curabitur pretium
|
||||
ut turpis eget aliquet. Maecenas sagittis lacus sed lectus
|
||||
volutpat, eu adipiscing purus pulvinar. Maecenas consequat
|
||||
luctus urna, eget cursus quam mollis a. Aliquam vitae ornare
|
||||
erat, non hendrerit urna. Sed eu diam nec massa egestas pharetra
|
||||
at nec tellus. Fusce feugiat lacus quis urna sollicitudin volutpat.
|
||||
Quisque at sapien non nibh feugiat tempus ac ultricies purus.
|
||||
""")
|
||||
lorem_ipsum_text = lorem_ipsum_text.replace("\n"," ") + "\n\n"
|
||||
large_lorem_ipsum_text = "".join([lorem_ipsum_text]*3000)
|
||||
|
||||
notebook_name = "lorem_ipsum_long.ipynb"
|
||||
nb = v4.new_notebook(
|
||||
cells=[
|
||||
v4.new_markdown_cell(source=large_lorem_ipsum_text)
|
||||
]
|
||||
)
|
||||
|
||||
with TemporaryDirectory() as td:
|
||||
nbfile = os.path.join(td, notebook_name)
|
||||
with open(nbfile, 'w') as f:
|
||||
write(nb, f, 4)
|
||||
|
||||
(output, resources) = LatexExporter().from_filename(nbfile)
|
||||
assert len(output) > 0
|
||||
|
||||
@onlyif_cmds_exist('pandoc')
|
||||
def test_prompt_number_color(self):
|
||||
"""
|
||||
Does LatexExporter properly format input and output prompts in color?
|
||||
"""
|
||||
(output, resources) = LatexExporter().from_filename(
|
||||
self._get_notebook(nb_name="prompt_numbers.ipynb"))
|
||||
|
||||
in_regex = r"\\prompt\{In\}\{incolor\}\{(\d+|\s*)\}"
|
||||
out_regex = r"\\prompt\{Out\}\{outcolor\}\{(\d+|\s*)\}"
|
||||
|
||||
ins = ["2", "10", " ", " ", "0"]
|
||||
outs = ["10"]
|
||||
|
||||
assert re.findall(in_regex, output) == ins
|
||||
assert re.findall(out_regex, output) == outs
|
||||
|
||||
@onlyif_cmds_exist('pandoc')
|
||||
def test_prompt_number_color_ipython(self):
|
||||
"""
|
||||
Does LatexExporter properly format input and output prompts in color?
|
||||
|
||||
Uses an in memory latex template to load style_ipython as the cell style.
|
||||
"""
|
||||
my_loader_tplx = DictLoader({'my_template':
|
||||
r"""
|
||||
((* extends 'style_ipython.tex.j2' *))
|
||||
|
||||
((* block docclass *))
|
||||
\documentclass[11pt]{article}
|
||||
((* endblock docclass *))
|
||||
"""})
|
||||
|
||||
class MyExporter(LatexExporter):
|
||||
template_file = 'my_template'
|
||||
|
||||
(output, resources) = MyExporter(extra_loaders=[my_loader_tplx]).from_filename(
|
||||
self._get_notebook(nb_name="prompt_numbers.ipynb"))
|
||||
|
||||
in_regex = r"In \[\{\\color\{incolor\}(.*)\}\]:"
|
||||
out_regex = r"Out\[\{\\color\{outcolor\}(.*)\}\]:"
|
||||
|
||||
ins = ["2", "10", " ", " ", "0"]
|
||||
outs = ["10"]
|
||||
|
||||
assert re.findall(in_regex, output) == ins
|
||||
assert re.findall(out_regex, output) == outs
|
||||
|
||||
@onlyif_cmds_exist('pandoc')
|
||||
def test_no_prompt_yes_input(self):
|
||||
no_prompt = {
|
||||
"TemplateExporter":{
|
||||
"exclude_input_prompt": True,
|
||||
"exclude_output_prompt": True,
|
||||
}
|
||||
}
|
||||
c_no_prompt = Config(no_prompt)
|
||||
|
||||
exporter = LatexExporter(config=c_no_prompt)
|
||||
(output, resources) = exporter.from_filename(
|
||||
self._get_notebook(nb_name="prompt_numbers.ipynb"))
|
||||
assert "shape" in output
|
||||
assert "evs" in output
|
||||
|
||||
@onlyif_cmds_exist('pandoc', 'inkscape')
|
||||
def test_svg(self):
|
||||
"""
|
||||
Can a LatexExporter export when it recieves raw binary strings form svg?
|
||||
"""
|
||||
filename = os.path.join(current_dir, 'files', 'svg.ipynb')
|
||||
(output, resources) = LatexExporter().from_filename(filename)
|
||||
assert len(output) > 0
|
||||
|
||||
def test_in_memory_template_tplx(self):
|
||||
# Loads in an in memory latex template (.tplx) using jinja2.DictLoader
|
||||
# creates a class that uses this template with the template_file argument
|
||||
# converts an empty notebook using this mechanism
|
||||
my_loader_tplx = DictLoader({'my_template': "{%- extends 'index' -%}"})
|
||||
|
||||
class MyExporter(LatexExporter):
|
||||
template_file = 'my_template'
|
||||
|
||||
exporter = MyExporter(extra_loaders=[my_loader_tplx])
|
||||
nb = v4.new_notebook()
|
||||
out, resources = exporter.from_notebook_node(nb)
|
||||
|
||||
def test_custom_filter_highlight_code(self):
|
||||
# Overwriting filters takes place at: Exporter.from_notebook_node
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
|
||||
def custom_highlight_code(source, language="python", metadata=None, strip_verbatim=False):
|
||||
return source + " ADDED_TEXT"
|
||||
|
||||
filters = {
|
||||
"highlight_code": custom_highlight_code
|
||||
}
|
||||
(output, resources) = LatexExporter(filters=filters).from_notebook_node(nb)
|
||||
self.assertTrue("ADDED_TEXT" in output)
|
|
@ -0,0 +1,40 @@
|
|||
"""Tests for MarkdownExporter"""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2013, the IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ..markdown import MarkdownExporter
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Class
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class TestMarkdownExporter(ExportersTestsBase):
|
||||
"""Tests for MarkdownExporter"""
|
||||
|
||||
exporter_class = MarkdownExporter
|
||||
should_include_raw = ['markdown', 'html']
|
||||
|
||||
def test_constructor(self):
|
||||
"""
|
||||
Can a MarkdownExporter be constructed?
|
||||
"""
|
||||
MarkdownExporter()
|
||||
|
||||
|
||||
def test_export(self):
|
||||
"""
|
||||
Can a MarkdownExporter export something?
|
||||
"""
|
||||
(output, resources) = MarkdownExporter().from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
|
@ -0,0 +1,39 @@
|
|||
"""Tests for notebook.py"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import json
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ..notebook import NotebookExporter
|
||||
|
||||
from nbformat import validate
|
||||
from nbconvert.tests.base import assert_big_text_equal
|
||||
|
||||
class TestNotebookExporter(ExportersTestsBase):
|
||||
"""Contains test functions for notebook.py"""
|
||||
|
||||
exporter_class = NotebookExporter
|
||||
|
||||
def test_export(self):
|
||||
"""
|
||||
Does the NotebookExporter return the file unchanged?
|
||||
"""
|
||||
with open(self._get_notebook()) as f:
|
||||
file_contents = f.read()
|
||||
(output, resources) = self.exporter_class().from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
assert_big_text_equal(output, file_contents)
|
||||
|
||||
def test_downgrade_3(self):
|
||||
exporter = self.exporter_class(nbformat_version=3)
|
||||
(output, resources) = exporter.from_filename(self._get_notebook())
|
||||
nb = json.loads(output)
|
||||
validate(nb)
|
||||
|
||||
def test_downgrade_2(self):
|
||||
exporter = self.exporter_class(nbformat_version=2)
|
||||
(output, resources) = exporter.from_filename(self._get_notebook())
|
||||
nb = json.loads(output)
|
||||
self.assertEqual(nb['nbformat'], 2)
|
42
venv/Lib/site-packages/nbconvert/exporters/tests/test_pdf.py
Normal file
42
venv/Lib/site-packages/nbconvert/exporters/tests/test_pdf.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
"""Tests for PDF export"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from testpath import tempdir
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ..pdf import PDFExporter
|
||||
from ...tests.utils import onlyif_cmds_exist
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Class
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
class TestPDF(ExportersTestsBase):
|
||||
"""Test PDF export"""
|
||||
|
||||
exporter_class = PDFExporter
|
||||
|
||||
def test_constructor(self):
|
||||
"""Can a PDFExporter be constructed?"""
|
||||
self.exporter_class()
|
||||
|
||||
|
||||
@onlyif_cmds_exist('xelatex', 'pandoc')
|
||||
def test_export(self):
|
||||
"""Smoke test PDFExporter"""
|
||||
with tempdir.TemporaryDirectory() as td:
|
||||
file_name = os.path.basename(self._get_notebook())
|
||||
newpath = os.path.join(td, file_name)
|
||||
shutil.copy(self._get_notebook(), newpath)
|
||||
(output, resources) = self.exporter_class(latex_count=1).from_filename(newpath)
|
||||
self.assertIsInstance(output, bytes)
|
||||
assert len(output) > 0
|
||||
# all temporary file should be cleaned up
|
||||
assert {file_name} == set(os.listdir(td))
|
|
@ -0,0 +1,24 @@
|
|||
"""Tests for PythonExporter"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ..python import PythonExporter
|
||||
|
||||
|
||||
class TestPythonExporter(ExportersTestsBase):
|
||||
"""Tests for PythonExporter"""
|
||||
|
||||
exporter_class = PythonExporter
|
||||
should_include_raw = ['python']
|
||||
|
||||
def test_constructor(self):
|
||||
"""Can a PythonExporter be constructed?"""
|
||||
self.exporter_class()
|
||||
|
||||
def test_export(self):
|
||||
"""Can a PythonExporter export something?"""
|
||||
(output, resources) = self.exporter_class().from_filename(self._get_notebook())
|
||||
self.assertIn("coding: utf-8", output)
|
||||
self.assertIn("#!/usr/bin/env python", output)
|
73
venv/Lib/site-packages/nbconvert/exporters/tests/test_rst.py
Normal file
73
venv/Lib/site-packages/nbconvert/exporters/tests/test_rst.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
"""Tests for RSTExporter"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import io
|
||||
import pytest
|
||||
import re
|
||||
|
||||
import nbformat
|
||||
from nbformat import v4
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ..rst import RSTExporter
|
||||
from ...tests.utils import onlyif_cmds_exist
|
||||
|
||||
|
||||
class TestRSTExporter(ExportersTestsBase):
|
||||
"""Tests for RSTExporter"""
|
||||
|
||||
exporter_class = RSTExporter
|
||||
should_include_raw = ['rst']
|
||||
|
||||
def test_constructor(self):
|
||||
"""
|
||||
Can a RSTExporter be constructed?
|
||||
"""
|
||||
RSTExporter()
|
||||
|
||||
|
||||
@onlyif_cmds_exist('pandoc')
|
||||
def test_export(self):
|
||||
"""
|
||||
Can a RSTExporter export something?
|
||||
"""
|
||||
(output, resources) = RSTExporter().from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
@onlyif_cmds_exist('pandoc')
|
||||
def test_empty_code_cell(self):
|
||||
"""No empty code cells in rst"""
|
||||
nbname = self._get_notebook()
|
||||
with io.open(nbname, encoding='utf8') as f:
|
||||
nb = nbformat.read(f, 4)
|
||||
|
||||
exporter = self.exporter_class()
|
||||
|
||||
(output, resources) = exporter.from_notebook_node(nb)
|
||||
# add an empty code cell
|
||||
nb.cells.append(
|
||||
v4.new_code_cell(source="")
|
||||
)
|
||||
(output2, resources) = exporter.from_notebook_node(nb)
|
||||
# adding an empty code cell shouldn't change output
|
||||
self.assertEqual(output.strip(), output2.strip())
|
||||
|
||||
@onlyif_cmds_exist('pandoc')
|
||||
def test_png_metadata(self):
|
||||
"""
|
||||
Does RSTExporter treat pngs with width/height metadata correctly?
|
||||
"""
|
||||
(output, resources) = RSTExporter().from_filename(
|
||||
self._get_notebook(nb_name="pngmetadata.ipynb"))
|
||||
assert len(output) > 0
|
||||
check_for_png = re.compile(
|
||||
r'.. image::.*?\n\s+(.*?)\n\s*\n',
|
||||
re.DOTALL)
|
||||
result = check_for_png.search(output)
|
||||
assert result is not None
|
||||
attr_string = result.group(1)
|
||||
assert ':width:' in attr_string
|
||||
assert ':height:' in attr_string
|
||||
assert 'px' in attr_string
|
|
@ -0,0 +1,73 @@
|
|||
"""Tests for ScriptExporter"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from nbformat import v4
|
||||
import nbconvert
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ..script import ScriptExporter
|
||||
|
||||
|
||||
class TestScriptExporter(ExportersTestsBase):
|
||||
"""Tests for ScriptExporter"""
|
||||
|
||||
exporter_class = ScriptExporter
|
||||
|
||||
def test_constructor(self):
|
||||
"""Construct ScriptExporter"""
|
||||
e = self.exporter_class()
|
||||
|
||||
def test_export(self):
|
||||
"""ScriptExporter can export something"""
|
||||
(output, resources) = self.exporter_class().from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
def test_export_python(self):
|
||||
"""delegate to custom exporter from language_info"""
|
||||
exporter = self.exporter_class()
|
||||
|
||||
pynb = v4.new_notebook()
|
||||
(output, resources) = self.exporter_class().from_notebook_node(pynb)
|
||||
self.assertNotIn('# coding: utf-8', output)
|
||||
|
||||
pynb.metadata.language_info = {
|
||||
'name': 'python',
|
||||
'mimetype': 'text/x-python',
|
||||
'nbconvert_exporter': 'python',
|
||||
}
|
||||
(output, resources) = self.exporter_class().from_notebook_node(pynb)
|
||||
self.assertIn('# coding: utf-8', output)
|
||||
|
||||
def test_export_config_transfer(self):
|
||||
"""delegate config to custom exporter from language_info"""
|
||||
nb = v4.new_notebook()
|
||||
nb.metadata.language_info = {
|
||||
'name': 'python',
|
||||
'mimetype': 'text/x-python',
|
||||
'nbconvert_exporter': 'python',
|
||||
}
|
||||
|
||||
exporter = self.exporter_class()
|
||||
exporter.from_notebook_node(nb)
|
||||
assert exporter._exporters['python'] != exporter
|
||||
assert exporter._exporters['python'].config == exporter.config
|
||||
|
||||
def test_script_exporter_entrypoint():
|
||||
nb = v4.new_notebook()
|
||||
nb.metadata.language_info = {
|
||||
'name': 'dummy',
|
||||
'mimetype': 'text/x-dummy',
|
||||
}
|
||||
|
||||
p = os.path.join(os.path.dirname(nbconvert.tests.__file__), 'exporter_entrypoint')
|
||||
sys.path.insert(0, p)
|
||||
try:
|
||||
output, _ = ScriptExporter().from_notebook_node(nb)
|
||||
assert output == 'dummy-script-exported'
|
||||
finally:
|
||||
sys.path.remove(p)
|
|
@ -0,0 +1,81 @@
|
|||
"""Tests for SlidesExporter"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from nbformat import v4 as nbformat
|
||||
from .base import ExportersTestsBase
|
||||
from ..slides import SlidesExporter, _RevealMetadataPreprocessor
|
||||
|
||||
|
||||
class TestSlidesExporter(ExportersTestsBase):
|
||||
"""Tests for SlidesExporter"""
|
||||
|
||||
exporter_class = SlidesExporter
|
||||
should_include_raw = ['html']
|
||||
|
||||
def test_constructor(self):
|
||||
"""
|
||||
Can a SlidesExporter be constructed?
|
||||
"""
|
||||
SlidesExporter()
|
||||
|
||||
|
||||
def test_export(self):
|
||||
"""
|
||||
Can a SlidesExporter export something?
|
||||
"""
|
||||
(output, resources) = SlidesExporter().from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
def test_export_reveal(self):
|
||||
"""
|
||||
Can a SlidesExporter export using the 'reveal' template?
|
||||
"""
|
||||
(output, resources) = SlidesExporter().from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
def build_notebook(self):
|
||||
"""Build a reveal slides notebook in memory for use with tests."""
|
||||
outputs = [nbformat.new_output(output_type="stream", name="stdout", text="a")]
|
||||
|
||||
slide_metadata = {'slideshow' : {'slide_type': 'slide'}}
|
||||
subslide_metadata = {'slideshow' : {'slide_type': 'subslide'}}
|
||||
fragment_metadata = {'slideshow' : {'slide_type': 'fragment'}}
|
||||
|
||||
cells=[nbformat.new_code_cell(source="", execution_count=1, outputs=outputs),
|
||||
nbformat.new_markdown_cell(source="", metadata=slide_metadata),
|
||||
nbformat.new_code_cell(source="", execution_count=2, outputs=outputs),
|
||||
nbformat.new_markdown_cell(source="", metadata=slide_metadata),
|
||||
nbformat.new_markdown_cell(source="", metadata=subslide_metadata),
|
||||
nbformat.new_markdown_cell(source="", metadata=fragment_metadata),
|
||||
nbformat.new_code_cell(source="", execution_count=1, outputs=outputs)]
|
||||
|
||||
return nbformat.new_notebook(cells=cells)
|
||||
|
||||
def test_metadata_preprocessor(self):
|
||||
preprocessor = _RevealMetadataPreprocessor()
|
||||
nb = self.build_notebook()
|
||||
nb, resources = preprocessor.preprocess(nb)
|
||||
cells = nb.cells
|
||||
|
||||
# Make sure correct metadata tags are available on every cell.
|
||||
for cell in cells:
|
||||
assert 'slide_type' in cell.metadata
|
||||
|
||||
# Make sure slide end is only applied to the cells preceeding slide
|
||||
# cells.
|
||||
assert not cells[1].metadata.get('slide_end', False)
|
||||
|
||||
# Verify 'slide-end'
|
||||
assert cells[0].metadata['slide_end']
|
||||
assert cells[2].metadata['slide_end']
|
||||
assert cells[2].metadata['subslide_end']
|
||||
|
||||
assert not cells[3].metadata.get('slide_end', False)
|
||||
assert cells[3].metadata['subslide_end']
|
||||
|
||||
assert cells[-1].metadata['fragment_end']
|
||||
assert cells[-1].metadata['subslide_end']
|
||||
assert cells[-1].metadata['slide_end']
|
|
@ -0,0 +1,591 @@
|
|||
"""
|
||||
Module with tests for templateexporter.py
|
||||
"""
|
||||
|
||||
# Copyright (c) Jupyter Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
from traitlets import default
|
||||
from traitlets.config import Config
|
||||
from jinja2 import DictLoader, TemplateNotFound
|
||||
from nbformat import v4
|
||||
from unittest.mock import patch
|
||||
from concurrent.futures import ProcessPoolExecutor
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from .cheese import CheesePreprocessor
|
||||
from ..templateexporter import TemplateExporter
|
||||
from ..rst import RSTExporter
|
||||
from ..html import HTMLExporter
|
||||
from ..markdown import MarkdownExporter
|
||||
from testpath import tempdir
|
||||
|
||||
import pytest
|
||||
|
||||
raw_template = """{%- extends 'index.rst.j2' -%}
|
||||
{%- block in_prompt -%}
|
||||
blah
|
||||
{%- endblock in_prompt -%}
|
||||
"""
|
||||
|
||||
class SampleExporter(TemplateExporter):
|
||||
"""
|
||||
Exports a Python code file.
|
||||
"""
|
||||
@default('file_extension')
|
||||
def _file_extension_default(self):
|
||||
return '.py'
|
||||
|
||||
@default('template_name')
|
||||
def _template_name_default(self):
|
||||
return 'python'
|
||||
|
||||
class TestExporter(ExportersTestsBase):
|
||||
"""Contains test functions for exporter.py"""
|
||||
|
||||
|
||||
def test_constructor(self):
|
||||
"""
|
||||
Can a TemplateExporter be constructed?
|
||||
"""
|
||||
TemplateExporter()
|
||||
|
||||
|
||||
def test_export(self):
|
||||
"""
|
||||
Can a TemplateExporter export something?
|
||||
"""
|
||||
exporter = self._make_exporter()
|
||||
(output, resources) = exporter.from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
|
||||
def test_extract_outputs(self):
|
||||
"""
|
||||
If the ExtractOutputPreprocessor is enabled, are outputs extracted?
|
||||
"""
|
||||
config = Config({'ExtractOutputPreprocessor': {'enabled': True}})
|
||||
exporter = self._make_exporter(config=config)
|
||||
(output, resources) = exporter.from_filename(self._get_notebook())
|
||||
assert resources is not None
|
||||
assert isinstance(resources['outputs'], dict)
|
||||
assert len(resources['outputs']) > 0
|
||||
|
||||
|
||||
def test_preprocessor_class(self):
|
||||
"""
|
||||
Can a preprocessor be added to the preprocessors list by class type?
|
||||
"""
|
||||
config = Config({'Exporter': {'preprocessors': [CheesePreprocessor]}})
|
||||
exporter = self._make_exporter(config=config)
|
||||
(output, resources) = exporter.from_filename(self._get_notebook())
|
||||
assert resources is not None
|
||||
assert resources['cheese'] == 'real'
|
||||
|
||||
|
||||
def test_preprocessor_instance(self):
|
||||
"""
|
||||
Can a preprocessor be added to the preprocessors list by instance?
|
||||
"""
|
||||
config = Config({'Exporter': {'preprocessors': [CheesePreprocessor()]}})
|
||||
exporter = self._make_exporter(config=config)
|
||||
(output, resources) = exporter.from_filename(self._get_notebook())
|
||||
assert resources is not None
|
||||
assert resources['cheese'] == 'real'
|
||||
|
||||
|
||||
def test_preprocessor_dottedobjectname(self):
|
||||
"""
|
||||
Can a preprocessor be added to the preprocessors list by dotted object name?
|
||||
"""
|
||||
config = Config({'Exporter': {'preprocessors': ['nbconvert.exporters.tests.cheese.CheesePreprocessor']}})
|
||||
exporter = self._make_exporter(config=config)
|
||||
(output, resources) = exporter.from_filename(self._get_notebook())
|
||||
assert resources is not None
|
||||
assert resources['cheese'] == 'real'
|
||||
|
||||
|
||||
def test_preprocessor_via_method(self):
|
||||
"""
|
||||
Can a preprocessor be added via the Exporter convenience method?
|
||||
"""
|
||||
exporter = self._make_exporter()
|
||||
exporter.register_preprocessor(CheesePreprocessor, enabled=True)
|
||||
(output, resources) = exporter.from_filename(self._get_notebook())
|
||||
assert resources is not None
|
||||
assert resources['cheese'] == 'real'
|
||||
|
||||
def test_pickle(self):
|
||||
"""
|
||||
Can exporters be pickled & called across processes?
|
||||
"""
|
||||
exporter = self._make_exporter()
|
||||
executor = ProcessPoolExecutor()
|
||||
(output, resources) = executor.submit(exporter.from_filename, self._get_notebook()).result()
|
||||
assert len(output) > 0
|
||||
|
||||
def test_absolute_template_file(self):
|
||||
with tempdir.TemporaryDirectory() as td:
|
||||
template = os.path.join(td, 'abstemplate.ext.j2')
|
||||
test_output = 'absolute!'
|
||||
with open(template, 'w') as f:
|
||||
f.write(test_output)
|
||||
config = Config()
|
||||
config.TemplateExporter.template_file = template
|
||||
exporter = self._make_exporter(config=config)
|
||||
assert exporter.template.filename == template
|
||||
assert os.path.dirname(template) in exporter.template_paths
|
||||
|
||||
def test_relative_template_file(self):
|
||||
with tempdir.TemporaryWorkingDirectory() as td:
|
||||
with patch('os.getcwd', return_value=os.path.abspath(td)):
|
||||
template = os.path.join('relative', 'relative_template.ext.j2')
|
||||
template_abs = os.path.abspath(os.path.join(td, template))
|
||||
os.mkdir(os.path.dirname(template_abs))
|
||||
test_output = 'relative!'
|
||||
with open(template_abs, 'w') as f:
|
||||
f.write(test_output)
|
||||
config = Config()
|
||||
config.TemplateExporter.template_file = template
|
||||
exporter = self._make_exporter(config=config)
|
||||
assert os.path.abspath(exporter.template.filename) == template_abs
|
||||
assert os.path.dirname(template_abs) in [os.path.abspath(d) for d in exporter.template_paths]
|
||||
|
||||
def test_absolute_template_file_compatibility(self):
|
||||
with tempdir.TemporaryDirectory() as td:
|
||||
template = os.path.join(td, 'abstemplate.tpl')
|
||||
test_output = 'absolute!'
|
||||
with open(template, 'w') as f:
|
||||
f.write(test_output)
|
||||
config = Config()
|
||||
config.TemplateExporter.template_file = template
|
||||
with pytest.warns(DeprecationWarning):
|
||||
exporter = self._make_exporter(config=config)
|
||||
assert exporter.template.filename == template
|
||||
assert os.path.dirname(template) in exporter.template_paths
|
||||
|
||||
def test_relative_template_file_compatibility(self):
|
||||
with tempdir.TemporaryWorkingDirectory() as td:
|
||||
with patch('os.getcwd', return_value=os.path.abspath(td)):
|
||||
template = os.path.join('relative', 'relative_template.tpl')
|
||||
template_abs = os.path.abspath(os.path.join(td, template))
|
||||
os.mkdir(os.path.dirname(template_abs))
|
||||
test_output = 'relative!'
|
||||
with open(template_abs, 'w') as f:
|
||||
f.write(test_output)
|
||||
config = Config()
|
||||
config.TemplateExporter.template_file = template
|
||||
with pytest.warns(DeprecationWarning):
|
||||
exporter = self._make_exporter(config=config)
|
||||
assert os.path.abspath(exporter.template.filename) == template_abs
|
||||
assert os.path.dirname(template_abs) in [os.path.abspath(d) for d in exporter.template_paths]
|
||||
|
||||
def test_absolute_template_name_tpl_compatibility(self):
|
||||
with tempdir.TemporaryDirectory() as td:
|
||||
template = os.path.join(td, 'abstemplate.tpl')
|
||||
test_output = 'absolute!'
|
||||
with open(template, 'w') as f:
|
||||
f.write(test_output)
|
||||
config = Config()
|
||||
# We're setting the template_name instead of the template_file
|
||||
config.TemplateExporter.template_name = template
|
||||
with pytest.warns(DeprecationWarning):
|
||||
exporter = self._make_exporter(config=config)
|
||||
assert exporter.template.filename == template
|
||||
assert os.path.dirname(template) in exporter.template_paths
|
||||
|
||||
# Can't use @pytest.mark.parametrize without removing all self.assert calls in all tests... repeating some here
|
||||
def absolute_template_name_5x_compatibility_test(self, template, mimetype=None):
|
||||
config = Config()
|
||||
# We're setting the template_name instead of the template_file
|
||||
config.TemplateExporter.template_name = template
|
||||
with pytest.warns(DeprecationWarning):
|
||||
exporter = self._make_exporter(config=config)
|
||||
template_dir, template_file = os.path.split(exporter.template.filename)
|
||||
_, compat_dir = os.path.split(template_dir)
|
||||
assert compat_dir == 'compatibility'
|
||||
assert template_file == template + '.tpl'
|
||||
assert template_dir in exporter.template_paths
|
||||
|
||||
def test_absolute_template_name_5x_compatibility_full(self):
|
||||
self.absolute_template_name_5x_compatibility_test('full', 'text/html')
|
||||
|
||||
def test_absolute_template_name_5x_compatibility_display_priority(self):
|
||||
self.absolute_template_name_5x_compatibility_test('display_priority')
|
||||
|
||||
# Can't use @pytest.mark.parametrize without removing all self.assert calls in all tests... repeating some here
|
||||
def relative_template_test(self, template):
|
||||
with tempdir.TemporaryWorkingDirectory() as td:
|
||||
with patch('os.getcwd', return_value=os.path.abspath(td)):
|
||||
template_abs = os.path.abspath(os.path.join(td, template))
|
||||
dirname = os.path.dirname(template_abs)
|
||||
if not os.path.exists(dirname):
|
||||
os.mkdir(dirname)
|
||||
test_output = 'relative!'
|
||||
with open(template_abs, 'w') as f:
|
||||
f.write(test_output)
|
||||
config = Config()
|
||||
# We're setting the template_name instead of the template_file
|
||||
config.TemplateExporter.template_name = template
|
||||
with pytest.warns(DeprecationWarning):
|
||||
exporter = self._make_exporter(config=config)
|
||||
assert os.path.abspath(exporter.template.filename) == template_abs
|
||||
assert os.path.dirname(template_abs) in [os.path.abspath(d) for d in exporter.template_paths]
|
||||
|
||||
def test_relative_template_name_tpl_compatibility_local(self):
|
||||
self.relative_template_test('relative_template.tpl')
|
||||
|
||||
def test_relative_template_name_tpl_compatibility_nested(self):
|
||||
self.relative_template_test(os.path.join('relative', 'relative_template.tpl'))
|
||||
|
||||
def test_relative_template_name_tpl_compatibility_dot(self):
|
||||
self.relative_template_test(os.path.join('.', 'relative_template.tpl'))
|
||||
|
||||
def test_relative_template_name_tpl_compatibility_dot_nested(self):
|
||||
self.relative_template_test(os.path.join('.', 'relative', 'relative_template.tpl'))
|
||||
|
||||
def test_absolute_template_dir(self):
|
||||
with tempdir.TemporaryDirectory() as td:
|
||||
template = 'mytemplate'
|
||||
template_file = os.path.join(td, template, 'index.py.j2')
|
||||
template_dir = os.path.dirname(template_file)
|
||||
os.mkdir(template_dir)
|
||||
test_output = 'absolute!'
|
||||
with open(template_file, 'w') as f:
|
||||
f.write(test_output)
|
||||
config = Config()
|
||||
config.TemplateExporter.template_name = template
|
||||
config.TemplateExporter.extra_template_basedirs = [td]
|
||||
exporter = self._make_exporter(config=config)
|
||||
assert exporter.template.filename == template_file
|
||||
assert exporter.template_name == template
|
||||
assert os.path.join(td, template) in exporter.template_paths
|
||||
|
||||
def test_local_template_dir(self):
|
||||
with tempdir.TemporaryWorkingDirectory() as td:
|
||||
with patch('os.getcwd', return_value=os.path.abspath(td)):
|
||||
template = 'mytemplate'
|
||||
template_file = os.path.join(template, 'index.py.j2')
|
||||
template_abs = os.path.abspath(os.path.join(td, template_file))
|
||||
template_conf = os.path.abspath(os.path.join(td, template, 'conf.json'))
|
||||
os.mkdir(os.path.dirname(template_abs))
|
||||
test_output = 'local!'
|
||||
with open(template_abs, 'w') as f:
|
||||
f.write(test_output)
|
||||
with open(template_conf, 'w') as f:
|
||||
# Mimic having a superset of accepted mimetypes
|
||||
f.write(json.dumps(Config(mimetypes={
|
||||
"text/x-python": True,
|
||||
"text/html": True,
|
||||
}
|
||||
)))
|
||||
config = Config()
|
||||
config.TemplateExporter.template_name = template
|
||||
exporter = self._make_exporter(config=config)
|
||||
assert os.path.abspath(exporter.template.filename) == template_abs
|
||||
assert exporter.template_name == template
|
||||
assert os.path.join(td, template) in exporter.template_paths
|
||||
|
||||
def test_local_template_file_extending_lab(self):
|
||||
template_file = os.path.join(self._get_files_path(), 'lablike.html.j2')
|
||||
exporter = HTMLExporter(template_file=template_file, template_name='lab')
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
output, resources = exporter.from_notebook_node(nb)
|
||||
assert "UNIQUE" in output
|
||||
|
||||
|
||||
def test_raw_template_attr(self):
|
||||
"""
|
||||
Verify that you can assign a in memory template string by overwriting
|
||||
`raw_template` as simple(non-traitlet) attribute
|
||||
"""
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
|
||||
class AttrExporter(TemplateExporter):
|
||||
raw_template = raw_template
|
||||
|
||||
exporter_attr = AttrExporter(template_name='rst')
|
||||
output_attr, _ = exporter_attr.from_notebook_node(nb)
|
||||
assert "blah" in output_attr
|
||||
|
||||
def test_raw_template_init(self):
|
||||
"""
|
||||
Test that template_file and raw_template traitlets play nicely together.
|
||||
- source assigns template_file default first, then raw_template
|
||||
- checks that the raw_template overrules template_file if set
|
||||
- checks that once raw_template is set to '', template_file returns
|
||||
"""
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
|
||||
class AttrExporter(RSTExporter):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.raw_template = raw_template
|
||||
|
||||
exporter_init = AttrExporter()
|
||||
output_init, _ = exporter_init.from_notebook_node(nb)
|
||||
assert "blah" in output_init
|
||||
exporter_init.raw_template = ''
|
||||
assert exporter_init.template_file == "index.rst.j2"
|
||||
output_init, _ = exporter_init.from_notebook_node(nb)
|
||||
assert "blah" not in output_init
|
||||
|
||||
def test_raw_template_dynamic_attr(self):
|
||||
"""
|
||||
Test that template_file and raw_template traitlets play nicely together.
|
||||
- source assigns template_file default first, then raw_template
|
||||
- checks that the raw_template overrules template_file if set
|
||||
- checks that once raw_template is set to '', template_file returns
|
||||
"""
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
|
||||
class AttrDynamicExporter(TemplateExporter):
|
||||
@default('default_template_file')
|
||||
def _template_file_default(self):
|
||||
return "index.rst.j2"
|
||||
|
||||
@default('raw_template')
|
||||
def _raw_template_default(self):
|
||||
return raw_template
|
||||
|
||||
exporter_attr_dynamic = AttrDynamicExporter(template_name='rst')
|
||||
output_attr_dynamic, _ = exporter_attr_dynamic.from_notebook_node(nb)
|
||||
assert "blah" in output_attr_dynamic
|
||||
exporter_attr_dynamic.raw_template = ''
|
||||
assert exporter_attr_dynamic.template_file == "index.rst.j2"
|
||||
output_attr_dynamic, _ = exporter_attr_dynamic.from_notebook_node(nb)
|
||||
assert "blah" not in output_attr_dynamic
|
||||
|
||||
def test_raw_template_dynamic_attr_reversed(self):
|
||||
"""
|
||||
Test that template_file and raw_template traitlets play nicely together.
|
||||
- source assigns raw_template default first, then template_file
|
||||
- checks that the raw_template overrules template_file if set
|
||||
- checks that once raw_template is set to '', template_file returns
|
||||
"""
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
|
||||
class AttrDynamicExporter(TemplateExporter):
|
||||
@default('raw_template')
|
||||
def _raw_template_default(self):
|
||||
return raw_template
|
||||
|
||||
@default('default_template_file')
|
||||
def _template_file_default(self):
|
||||
return 'index.rst.j2'
|
||||
|
||||
exporter_attr_dynamic = AttrDynamicExporter(template_name='rst')
|
||||
output_attr_dynamic, _ = exporter_attr_dynamic.from_notebook_node(nb)
|
||||
assert "blah" in output_attr_dynamic
|
||||
exporter_attr_dynamic.raw_template = ''
|
||||
assert exporter_attr_dynamic.template_file == 'index.rst.j2'
|
||||
output_attr_dynamic, _ = exporter_attr_dynamic.from_notebook_node(nb)
|
||||
assert "blah" not in output_attr_dynamic
|
||||
|
||||
|
||||
def test_raw_template_constructor(self):
|
||||
"""
|
||||
Test `raw_template` as a keyword argument in the exporter constructor.
|
||||
"""
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
|
||||
output_constructor, _ = TemplateExporter(template_name='rst',
|
||||
raw_template=raw_template).from_notebook_node(nb)
|
||||
assert "blah" in output_constructor
|
||||
|
||||
def test_raw_template_assignment(self):
|
||||
"""
|
||||
Test `raw_template` assigned after the fact on non-custom Exporter.
|
||||
"""
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
exporter_assign = TemplateExporter(template_name='rst')
|
||||
exporter_assign.raw_template = raw_template
|
||||
output_assign, _ = exporter_assign.from_notebook_node(nb)
|
||||
assert "blah" in output_assign
|
||||
|
||||
def test_raw_template_reassignment(self):
|
||||
"""
|
||||
Test `raw_template` reassigned after the fact on non-custom Exporter.
|
||||
"""
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
exporter_reassign = TemplateExporter(template_name='rst')
|
||||
exporter_reassign.raw_template = raw_template
|
||||
output_reassign, _ = exporter_reassign.from_notebook_node(nb)
|
||||
assert "blah" in output_reassign
|
||||
exporter_reassign.raw_template = raw_template.replace("blah", "baz")
|
||||
output_reassign, _ = exporter_reassign.from_notebook_node(nb)
|
||||
assert "baz" in output_reassign
|
||||
|
||||
def test_raw_template_deassignment(self):
|
||||
"""
|
||||
Test `raw_template` does not overwrite template_file if deassigned after
|
||||
being assigned to a non-custom Exporter.
|
||||
"""
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
exporter_deassign = RSTExporter()
|
||||
exporter_deassign.raw_template = raw_template
|
||||
output_deassign, _ = exporter_deassign.from_notebook_node(nb)
|
||||
assert "blah" in output_deassign
|
||||
exporter_deassign.raw_template = ''
|
||||
assert exporter_deassign.template_file == 'index.rst.j2'
|
||||
output_deassign, _ = exporter_deassign.from_notebook_node(nb)
|
||||
assert "blah" not in output_deassign
|
||||
|
||||
def test_raw_template_dereassignment(self):
|
||||
"""
|
||||
Test `raw_template` does not overwrite template_file if deassigned after
|
||||
being assigned to a non-custom Exporter.
|
||||
"""
|
||||
nb = v4.new_notebook()
|
||||
nb.cells.append(v4.new_code_cell("some_text"))
|
||||
exporter_dereassign = RSTExporter()
|
||||
exporter_dereassign.raw_template = raw_template
|
||||
output_dereassign, _ = exporter_dereassign.from_notebook_node(nb)
|
||||
assert "blah" in output_dereassign
|
||||
exporter_dereassign.raw_template = raw_template.replace("blah", "baz")
|
||||
output_dereassign, _ = exporter_dereassign.from_notebook_node(nb)
|
||||
assert "baz" in output_dereassign
|
||||
exporter_dereassign.raw_template = ''
|
||||
assert exporter_dereassign.template_file == 'index.rst.j2'
|
||||
output_dereassign, _ = exporter_dereassign.from_notebook_node(nb)
|
||||
assert "blah" not in output_dereassign
|
||||
|
||||
def test_fail_to_find_template_file(self):
|
||||
# Create exporter with invalid template file, check that it doesn't
|
||||
# exist in the environment, try to convert empty notebook. Failure is
|
||||
# expected due to nonexistant template file.
|
||||
|
||||
template = 'does_not_exist.tpl'
|
||||
exporter = TemplateExporter(template_file=template)
|
||||
assert template not in exporter.environment.list_templates(extensions=['tpl'])
|
||||
nb = v4.new_notebook()
|
||||
with pytest.raises(TemplateNotFound):
|
||||
out, resources = exporter.from_notebook_node(nb)
|
||||
|
||||
def test_exclude_code_cell(self):
|
||||
no_io = {
|
||||
"TemplateExporter":{
|
||||
"exclude_output": True,
|
||||
"exclude_input": True,
|
||||
"exclude_input_prompt": False,
|
||||
"exclude_output_prompt": False,
|
||||
"exclude_markdown": False,
|
||||
"exclude_code_cell": False,
|
||||
}
|
||||
}
|
||||
c_no_io = Config(no_io)
|
||||
exporter_no_io = TemplateExporter(config=c_no_io, template_name='markdown')
|
||||
exporter_no_io.template_file = 'index.md.j2'
|
||||
nb_no_io, resources_no_io = exporter_no_io.from_filename(self._get_notebook())
|
||||
|
||||
assert not resources_no_io['global_content_filter']['include_input']
|
||||
assert not resources_no_io['global_content_filter']['include_output']
|
||||
|
||||
no_code = {
|
||||
"TemplateExporter":{
|
||||
"exclude_output": False,
|
||||
"exclude_input": False,
|
||||
"exclude_input_prompt": False,
|
||||
"exclude_output_prompt": False,
|
||||
"exclude_markdown": False,
|
||||
"exclude_code_cell": True,
|
||||
}
|
||||
}
|
||||
c_no_code = Config(no_code)
|
||||
exporter_no_code = TemplateExporter(config=c_no_code, template_name='markdown')
|
||||
exporter_no_code.template_file = 'index.md.j2'
|
||||
nb_no_code, resources_no_code = exporter_no_code.from_filename(self._get_notebook())
|
||||
|
||||
assert not resources_no_code['global_content_filter']['include_code']
|
||||
assert nb_no_io == nb_no_code
|
||||
|
||||
|
||||
def test_exclude_input_prompt(self):
|
||||
no_input_prompt = {
|
||||
"TemplateExporter":{
|
||||
"exclude_output": False,
|
||||
"exclude_input": False,
|
||||
"exclude_input_prompt": True,
|
||||
"exclude_output_prompt": False,
|
||||
"exclude_markdown": False,
|
||||
"exclude_code_cell": False,
|
||||
}
|
||||
}
|
||||
c_no_input_prompt = Config(no_input_prompt)
|
||||
exporter_no_input_prompt = MarkdownExporter(config=c_no_input_prompt)
|
||||
nb_no_input_prompt, resources_no_input_prompt = exporter_no_input_prompt.from_filename(self._get_notebook())
|
||||
|
||||
assert not resources_no_input_prompt['global_content_filter']['include_input_prompt']
|
||||
assert "# In[" not in nb_no_input_prompt
|
||||
|
||||
def test_exclude_markdown(self):
|
||||
|
||||
no_md= {
|
||||
"TemplateExporter":{
|
||||
"exclude_output": False,
|
||||
"exclude_input": False,
|
||||
"exclude_input_prompt": False,
|
||||
"exclude_output_prompt": False,
|
||||
"exclude_markdown": True,
|
||||
"exclude_code_cell": False,
|
||||
}
|
||||
}
|
||||
|
||||
c_no_md = Config(no_md)
|
||||
exporter_no_md = TemplateExporter(config=c_no_md, template_name='python')
|
||||
exporter_no_md.template_file = 'index.py.j2'
|
||||
nb_no_md, resources_no_md = exporter_no_md.from_filename(self._get_notebook())
|
||||
|
||||
assert not resources_no_md['global_content_filter']['include_markdown']
|
||||
assert "First import NumPy and Matplotlib" not in nb_no_md
|
||||
|
||||
def test_exclude_output_prompt(self):
|
||||
no_output_prompt = {
|
||||
"TemplateExporter":{
|
||||
"exclude_output": False,
|
||||
"exclude_input": False,
|
||||
"exclude_input_prompt": False,
|
||||
"exclude_output_prompt": True,
|
||||
"exclude_markdown": False,
|
||||
"exclude_code_cell": False,
|
||||
}
|
||||
}
|
||||
c_no_output_prompt = Config(no_output_prompt)
|
||||
exporter_no_output_prompt = HTMLExporter(config=c_no_output_prompt)
|
||||
nb_no_output_prompt, resources_no_output_prompt = exporter_no_output_prompt.from_filename(self._get_notebook())
|
||||
|
||||
assert not resources_no_output_prompt['global_content_filter']['include_output_prompt']
|
||||
assert "Out[1]" not in nb_no_output_prompt
|
||||
|
||||
def test_remove_elements_with_tags(self):
|
||||
|
||||
conf = Config({
|
||||
"TagRemovePreprocessor": {
|
||||
"remove_cell_tags": ["remove_cell"],
|
||||
"remove_all_outputs_tags": ["remove_output"],
|
||||
"remove_input_tags": ["remove_input"]
|
||||
},
|
||||
})
|
||||
|
||||
exporter = MarkdownExporter(config=conf)
|
||||
nb, resources = exporter.from_filename(self._get_notebook())
|
||||
|
||||
assert "hist(evs.real)" not in nb
|
||||
assert "cell is just markdown testing whether" not in nb
|
||||
assert "(100,)" not in nb
|
||||
|
||||
def _make_exporter(self, config=None):
|
||||
exporter = SampleExporter(config=config)
|
||||
return exporter
|
|
@ -0,0 +1,44 @@
|
|||
"""Tests for the latex preprocessor"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import io
|
||||
import pytest
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from .base import ExportersTestsBase
|
||||
from ..webpdf import WebPDFExporter
|
||||
|
||||
class TestWebPDFExporter(ExportersTestsBase):
|
||||
"""Contains test functions for webpdf.py"""
|
||||
|
||||
exporter_class = WebPDFExporter
|
||||
|
||||
def test_export(self):
|
||||
"""
|
||||
Can a TemplateExporter export something?
|
||||
"""
|
||||
(output, resources) = WebPDFExporter(allow_chromium_download=True).from_filename(self._get_notebook())
|
||||
assert len(output) > 0
|
||||
|
||||
@patch('pyppeteer.util.check_chromium', return_value=False)
|
||||
def test_webpdf_without_chromium(self, mock_check_chromium):
|
||||
"""
|
||||
Generate PDFs if chromium not present?
|
||||
"""
|
||||
with pytest.raises(RuntimeError):
|
||||
WebPDFExporter(allow_chromium_download=False).from_filename(self._get_notebook())
|
||||
|
||||
def test_webpdf_without_pyppeteer(self):
|
||||
"""
|
||||
Generate PDFs if chromium not present?
|
||||
"""
|
||||
with pytest.raises(RuntimeError):
|
||||
exporter = WebPDFExporter()
|
||||
with io.open(self._get_notebook(), encoding='utf-8') as f:
|
||||
nb = exporter.from_file(f, resources={})
|
||||
# Have to do this as the very last action as traitlets do dynamic importing often
|
||||
with patch('builtins.__import__', side_effect=ModuleNotFoundError("Fake missing")):
|
||||
exporter.from_notebook_node(nb)
|
100
venv/Lib/site-packages/nbconvert/exporters/webpdf.py
Normal file
100
venv/Lib/site-packages/nbconvert/exporters/webpdf.py
Normal file
|
@ -0,0 +1,100 @@
|
|||
"""Export to PDF via a headless browser"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import asyncio
|
||||
|
||||
from traitlets import Bool
|
||||
import concurrent.futures
|
||||
|
||||
from .html import HTMLExporter
|
||||
|
||||
|
||||
class WebPDFExporter(HTMLExporter):
|
||||
"""Writer designed to write to PDF files.
|
||||
|
||||
This inherits from :class:`HTMLExporter`. It creates the HTML using the
|
||||
template machinery, and then run pyppeteer to create a pdf.
|
||||
"""
|
||||
export_from_notebook = "PDF via pyppeteer"
|
||||
|
||||
allow_chromium_download = Bool(False,
|
||||
help='Whether to allow downloading Chromium if no suitable version is found on the system.'
|
||||
).tag(config=True)
|
||||
|
||||
def _check_launch_reqs(self):
|
||||
try:
|
||||
from pyppeteer import launch
|
||||
from pyppeteer.util import check_chromium
|
||||
except ModuleNotFoundError as e:
|
||||
raise RuntimeError("Pyppeteer is not installed to support Web PDF conversion. "
|
||||
"Please install `nbconvert[webpdf]` to enable.") from e
|
||||
if not self.allow_chromium_download and not check_chromium():
|
||||
raise RuntimeError("No suitable chromium executable found on the system. "
|
||||
"Please use '--allow-chromium-download' to allow downloading one.")
|
||||
return launch
|
||||
|
||||
def run_pyppeteer(self, html):
|
||||
"""Run pyppeteer."""
|
||||
|
||||
async def main():
|
||||
browser = await self._check_launch_reqs()(
|
||||
handleSIGINT=False,
|
||||
handleSIGTERM=False,
|
||||
handleSIGHUP=False,
|
||||
)
|
||||
page = await browser.newPage()
|
||||
await page.waitFor(100)
|
||||
await page.goto('data:text/html,'+html, waitUntil='networkidle0')
|
||||
await page.waitFor(100)
|
||||
|
||||
# Floating point precision errors cause the printed
|
||||
# PDF from spilling over a new page by a pixel fraction.
|
||||
dimensions = await page.evaluate(
|
||||
"""() => {
|
||||
const rect = document.body.getBoundingClientRect();
|
||||
return {
|
||||
width: Math.ceil(rect.width) + 1,
|
||||
height: Math.ceil(rect.height) + 1,
|
||||
}
|
||||
}"""
|
||||
)
|
||||
width = dimensions['width']
|
||||
height = dimensions['height']
|
||||
# 200 inches is the maximum size for Adobe Acrobat Reader.
|
||||
pdf_data = await page.pdf(
|
||||
{
|
||||
'width': min(width, 200 * 72),
|
||||
'height': min(height, 200 * 72),
|
||||
'printBackground': True,
|
||||
}
|
||||
)
|
||||
await browser.close()
|
||||
return pdf_data
|
||||
|
||||
pool = concurrent.futures.ThreadPoolExecutor()
|
||||
# TODO: when dropping Python 3.6, use
|
||||
# pdf_data = pool.submit(asyncio.run, main()).result()
|
||||
def run_coroutine(coro):
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
return loop.run_until_complete(coro)
|
||||
pdf_data = pool.submit(run_coroutine, main()).result()
|
||||
return pdf_data
|
||||
|
||||
def from_notebook_node(self, nb, resources=None, **kw):
|
||||
self._check_launch_reqs()
|
||||
html, resources = super().from_notebook_node(
|
||||
nb, resources=resources, **kw
|
||||
)
|
||||
|
||||
self.log.info('Building PDF')
|
||||
pdf_data = self.run_pyppeteer(html)
|
||||
self.log.info('PDF successfully created')
|
||||
|
||||
# convert output extension to pdf
|
||||
# the writer above required it to be html
|
||||
resources['output_extension'] = '.pdf'
|
||||
|
||||
return pdf_data, resources
|
11
venv/Lib/site-packages/nbconvert/filters/__init__.py
Normal file
11
venv/Lib/site-packages/nbconvert/filters/__init__.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
from .ansi import *
|
||||
from .citation import *
|
||||
from .datatypefilter import *
|
||||
from .highlight import *
|
||||
from .latex import *
|
||||
from .markdown import *
|
||||
from .strings import *
|
||||
from .metadata import *
|
||||
from .pandoc import *
|
||||
|
||||
from ipython_genutils.text import indent
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
296
venv/Lib/site-packages/nbconvert/filters/ansi.py
Normal file
296
venv/Lib/site-packages/nbconvert/filters/ansi.py
Normal file
|
@ -0,0 +1,296 @@
|
|||
"""Filters for processing ANSI colors within Jinja templates."""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
import re
|
||||
import jinja2
|
||||
|
||||
__all__ = [
|
||||
'strip_ansi',
|
||||
'ansi2html',
|
||||
'ansi2latex'
|
||||
]
|
||||
|
||||
_ANSI_RE = re.compile('\x1b\\[(.*?)([@-~])')
|
||||
|
||||
_ANSI_COLORS = (
|
||||
'ansi-black',
|
||||
'ansi-red',
|
||||
'ansi-green',
|
||||
'ansi-yellow',
|
||||
'ansi-blue',
|
||||
'ansi-magenta',
|
||||
'ansi-cyan',
|
||||
'ansi-white',
|
||||
'ansi-black-intense',
|
||||
'ansi-red-intense',
|
||||
'ansi-green-intense',
|
||||
'ansi-yellow-intense',
|
||||
'ansi-blue-intense',
|
||||
'ansi-magenta-intense',
|
||||
'ansi-cyan-intense',
|
||||
'ansi-white-intense',
|
||||
)
|
||||
|
||||
|
||||
def strip_ansi(source):
|
||||
"""
|
||||
Remove ANSI escape codes from text.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
source : str
|
||||
Source to remove the ANSI from
|
||||
|
||||
"""
|
||||
return _ANSI_RE.sub('', source)
|
||||
|
||||
|
||||
def ansi2html(text):
|
||||
"""
|
||||
Convert ANSI colors to HTML colors.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
text : unicode
|
||||
Text containing ANSI colors to convert to HTML
|
||||
|
||||
"""
|
||||
text = jinja2.utils.escape(text)
|
||||
return _ansi2anything(text, _htmlconverter)
|
||||
|
||||
|
||||
def ansi2latex(text):
|
||||
"""
|
||||
Convert ANSI colors to LaTeX colors.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
text : unicode
|
||||
Text containing ANSI colors to convert to LaTeX
|
||||
|
||||
"""
|
||||
return _ansi2anything(text, _latexconverter)
|
||||
|
||||
|
||||
def _htmlconverter(fg, bg, bold, underline, inverse):
|
||||
"""
|
||||
Return start and end tags for given foreground/background/bold/underline.
|
||||
|
||||
"""
|
||||
if (fg, bg, bold, underline, inverse) == (None, None, False, False, False):
|
||||
return '', ''
|
||||
|
||||
classes = []
|
||||
styles = []
|
||||
|
||||
if inverse:
|
||||
fg, bg = bg, fg
|
||||
|
||||
if isinstance(fg, int):
|
||||
classes.append(_ANSI_COLORS[fg] + '-fg')
|
||||
elif fg:
|
||||
styles.append('color: rgb({},{},{})'.format(*fg))
|
||||
elif inverse:
|
||||
classes.append('ansi-default-inverse-fg')
|
||||
|
||||
if isinstance(bg, int):
|
||||
classes.append(_ANSI_COLORS[bg] + '-bg')
|
||||
elif bg:
|
||||
styles.append('background-color: rgb({},{},{})'.format(*bg))
|
||||
elif inverse:
|
||||
classes.append('ansi-default-inverse-bg')
|
||||
|
||||
if bold:
|
||||
classes.append('ansi-bold')
|
||||
|
||||
if underline:
|
||||
classes.append('ansi-underline')
|
||||
|
||||
starttag = '<span'
|
||||
if classes:
|
||||
starttag += ' class="' + ' '.join(classes) + '"'
|
||||
if styles:
|
||||
starttag += ' style="' + '; '.join(styles) + '"'
|
||||
starttag += '>'
|
||||
return starttag, '</span>'
|
||||
|
||||
|
||||
def _latexconverter(fg, bg, bold, underline, inverse):
|
||||
"""
|
||||
Return start and end markup given foreground/background/bold/underline.
|
||||
|
||||
"""
|
||||
if (fg, bg, bold, underline, inverse) == (None, None, False, False, False):
|
||||
return '', ''
|
||||
|
||||
starttag, endtag = '', ''
|
||||
|
||||
if inverse:
|
||||
fg, bg = bg, fg
|
||||
|
||||
if isinstance(fg, int):
|
||||
starttag += r'\textcolor{' + _ANSI_COLORS[fg] + '}{'
|
||||
endtag = '}' + endtag
|
||||
elif fg:
|
||||
# See http://tex.stackexchange.com/a/291102/13684
|
||||
starttag += r'\def\tcRGB{\textcolor[RGB]}\expandafter'
|
||||
starttag += r'\tcRGB\expandafter{\detokenize{%s,%s,%s}}{' % fg
|
||||
endtag = '}' + endtag
|
||||
elif inverse:
|
||||
starttag += r'\textcolor{ansi-default-inverse-fg}{'
|
||||
endtag = '}' + endtag
|
||||
|
||||
if isinstance(bg, int):
|
||||
starttag += r'\setlength{\fboxsep}{0pt}'
|
||||
starttag += r'\colorbox{' + _ANSI_COLORS[bg] + '}{'
|
||||
endtag = r'\strut}' + endtag
|
||||
elif bg:
|
||||
starttag += r'\setlength{\fboxsep}{0pt}'
|
||||
# See http://tex.stackexchange.com/a/291102/13684
|
||||
starttag += r'\def\cbRGB{\colorbox[RGB]}\expandafter'
|
||||
starttag += r'\cbRGB\expandafter{\detokenize{%s,%s,%s}}{' % bg
|
||||
endtag = r'\strut}' + endtag
|
||||
elif inverse:
|
||||
starttag += r'\setlength{\fboxsep}{0pt}'
|
||||
starttag += r'\colorbox{ansi-default-inverse-bg}{'
|
||||
endtag = r'\strut}' + endtag
|
||||
|
||||
if bold:
|
||||
starttag += r'\textbf{'
|
||||
endtag = '}' + endtag
|
||||
|
||||
if underline:
|
||||
starttag += r'\underline{'
|
||||
endtag = '}' + endtag
|
||||
|
||||
return starttag, endtag
|
||||
|
||||
|
||||
def _ansi2anything(text, converter):
|
||||
r"""
|
||||
Convert ANSI colors to HTML or LaTeX.
|
||||
|
||||
See https://en.wikipedia.org/wiki/ANSI_escape_code
|
||||
|
||||
Accepts codes like '\x1b[32m' (red) and '\x1b[1;32m' (bold, red).
|
||||
|
||||
Non-color escape sequences (not ending with 'm') are filtered out.
|
||||
|
||||
Ideally, this should have the same behavior as the function
|
||||
fixConsole() in notebook/notebook/static/base/js/utils.js.
|
||||
|
||||
"""
|
||||
fg, bg = None, None
|
||||
bold = False
|
||||
underline = False
|
||||
inverse = False
|
||||
numbers = []
|
||||
out = []
|
||||
|
||||
while text:
|
||||
m = _ANSI_RE.search(text)
|
||||
if m:
|
||||
if m.group(2) == 'm':
|
||||
try:
|
||||
# Empty code is same as code 0
|
||||
numbers = [int(n) if n else 0
|
||||
for n in m.group(1).split(';')]
|
||||
except ValueError:
|
||||
pass # Invalid color specification
|
||||
else:
|
||||
pass # Not a color code
|
||||
chunk, text = text[:m.start()], text[m.end():]
|
||||
else:
|
||||
chunk, text = text, ''
|
||||
|
||||
if chunk:
|
||||
starttag, endtag = converter(
|
||||
fg + 8 if bold and fg in range(8) else fg,
|
||||
bg, bold, underline, inverse)
|
||||
out.append(starttag)
|
||||
out.append(chunk)
|
||||
out.append(endtag)
|
||||
|
||||
while numbers:
|
||||
n = numbers.pop(0)
|
||||
if n == 0:
|
||||
# Code 0 (same as empty code): reset everything
|
||||
fg = bg = None
|
||||
bold = underline = inverse = False
|
||||
elif n == 1:
|
||||
bold = True
|
||||
elif n == 4:
|
||||
underline = True
|
||||
elif n == 5:
|
||||
# Code 5: blinking
|
||||
bold = True
|
||||
elif n == 7:
|
||||
inverse = True
|
||||
elif n in (21, 22):
|
||||
bold = False
|
||||
elif n == 24:
|
||||
underline = False
|
||||
elif n == 27:
|
||||
inverse = False
|
||||
elif 30 <= n <= 37:
|
||||
fg = n - 30
|
||||
elif n == 38:
|
||||
try:
|
||||
fg = _get_extended_color(numbers)
|
||||
except ValueError:
|
||||
numbers.clear()
|
||||
elif n == 39:
|
||||
fg = None
|
||||
elif 40 <= n <= 47:
|
||||
bg = n - 40
|
||||
elif n == 48:
|
||||
try:
|
||||
bg = _get_extended_color(numbers)
|
||||
except ValueError:
|
||||
numbers.clear()
|
||||
elif n == 49:
|
||||
bg = None
|
||||
elif 90 <= n <= 97:
|
||||
fg = n - 90 + 8
|
||||
elif 100 <= n <= 107:
|
||||
bg = n - 100 + 8
|
||||
else:
|
||||
pass # Unknown codes are ignored
|
||||
return ''.join(out)
|
||||
|
||||
|
||||
def _get_extended_color(numbers):
|
||||
n = numbers.pop(0)
|
||||
if n == 2 and len(numbers) >= 3:
|
||||
# 24-bit RGB
|
||||
r = numbers.pop(0)
|
||||
g = numbers.pop(0)
|
||||
b = numbers.pop(0)
|
||||
if not all(0 <= c <= 255 for c in (r, g, b)):
|
||||
raise ValueError()
|
||||
elif n == 5 and len(numbers) >= 1:
|
||||
# 256 colors
|
||||
idx = numbers.pop(0)
|
||||
if idx < 0:
|
||||
raise ValueError()
|
||||
elif idx < 16:
|
||||
# 16 default terminal colors
|
||||
return idx
|
||||
elif idx < 232:
|
||||
# 6x6x6 color cube, see http://stackoverflow.com/a/27165165/500098
|
||||
r = (idx - 16) // 36
|
||||
r = 55 + r * 40 if r > 0 else 0
|
||||
g = ((idx - 16) % 36) // 6
|
||||
g = 55 + g * 40 if g > 0 else 0
|
||||
b = (idx - 16) % 6
|
||||
b = 55 + b * 40 if b > 0 else 0
|
||||
elif idx < 256:
|
||||
# grayscale, see http://stackoverflow.com/a/27165165/500098
|
||||
r = g = b = (idx - 232) * 10 + 8
|
||||
else:
|
||||
raise ValueError()
|
||||
else:
|
||||
raise ValueError()
|
||||
return r, g, b
|
107
venv/Lib/site-packages/nbconvert/filters/citation.py
Normal file
107
venv/Lib/site-packages/nbconvert/filters/citation.py
Normal file
|
@ -0,0 +1,107 @@
|
|||
"""Citation handling for LaTeX output."""
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2013, the IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
from html.parser import HTMLParser
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Functions
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
__all__ = ['citation2latex']
|
||||
|
||||
|
||||
def citation2latex(s):
|
||||
"""Parse citations in Markdown cells.
|
||||
|
||||
This looks for HTML tags having a data attribute names ``data-cite``
|
||||
and replaces it by the call to LaTeX cite command. The transformation
|
||||
looks like this::
|
||||
|
||||
<cite data-cite="granger">(Granger, 2013)</cite>
|
||||
|
||||
Becomes ::
|
||||
|
||||
\\cite{granger}
|
||||
|
||||
Any HTML tag can be used, which allows the citations to be formatted
|
||||
in HTML in any manner.
|
||||
"""
|
||||
parser = CitationParser()
|
||||
parser.feed(s)
|
||||
parser.close()
|
||||
outtext = u''
|
||||
startpos = 0
|
||||
for citation in parser.citelist:
|
||||
outtext += s[startpos:citation[1]]
|
||||
outtext += '\\cite{%s}'%citation[0]
|
||||
startpos = citation[2] if len(citation)==3 else -1
|
||||
outtext += s[startpos:] if startpos != -1 else ''
|
||||
return outtext
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Classes
|
||||
#-----------------------------------------------------------------------------
|
||||
class CitationParser(HTMLParser):
|
||||
"""Citation Parser
|
||||
|
||||
Replaces html tags with data-cite attribute with respective latex \\cite.
|
||||
|
||||
Inherites from HTMLParser, overrides:
|
||||
- handle_starttag
|
||||
- handle_endtag
|
||||
"""
|
||||
# number of open tags
|
||||
opentags = None
|
||||
# list of found citations
|
||||
citelist = None
|
||||
# active citation tag
|
||||
citetag = None
|
||||
|
||||
def __init__(self):
|
||||
self.citelist = []
|
||||
self.opentags = 0
|
||||
HTMLParser.__init__(self)
|
||||
|
||||
def get_offset(self):
|
||||
# Compute startposition in source
|
||||
lin, offset = self.getpos()
|
||||
pos = 0
|
||||
for i in range(lin-1):
|
||||
pos = self.data.find('\n',pos) + 1
|
||||
return pos + offset
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
# for each tag check if attributes are present and if no citation is active
|
||||
if self.opentags == 0 and len(attrs)>0:
|
||||
for atr, data in attrs:
|
||||
if atr.lower() == 'data-cite':
|
||||
self.citetag = tag
|
||||
self.opentags = 1
|
||||
self.citelist.append([data, self.get_offset()])
|
||||
return
|
||||
|
||||
if tag == self.citetag:
|
||||
# found an open citation tag but not the starting one
|
||||
self.opentags += 1
|
||||
|
||||
def handle_endtag(self, tag):
|
||||
if tag == self.citetag:
|
||||
# found citation tag check if starting one
|
||||
if self.opentags == 1:
|
||||
pos = self.get_offset()
|
||||
self.citelist[-1].append(pos+len(tag)+3)
|
||||
self.opentags -= 1
|
||||
|
||||
def feed(self, data):
|
||||
self.data = data
|
||||
HTMLParser.feed(self, data)
|
44
venv/Lib/site-packages/nbconvert/filters/datatypefilter.py
Normal file
44
venv/Lib/site-packages/nbconvert/filters/datatypefilter.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
"""Filter used to select the first preferred output format available.
|
||||
|
||||
The filter contained in the file allows the converter templates to select
|
||||
the output format that is most valuable to the active export format. The
|
||||
value of the different formats is set via
|
||||
NbConvertBase.display_data_priority
|
||||
"""
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2013, the IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Classes and functions
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from ..utils.base import NbConvertBase
|
||||
from warnings import warn
|
||||
|
||||
__all__ = ['DataTypeFilter']
|
||||
|
||||
class DataTypeFilter(NbConvertBase):
|
||||
""" Returns the preferred display format """
|
||||
|
||||
def __call__(self, output):
|
||||
""" Return the first available format in the priority.
|
||||
|
||||
Produces a UserWarning if no compatible mimetype is found.
|
||||
|
||||
`output` is dict with structure {mimetype-of-element: value-of-element}
|
||||
|
||||
"""
|
||||
for fmt in self.display_data_priority:
|
||||
if fmt in output:
|
||||
return [fmt]
|
||||
warn("Your element with mimetype(s) {mimetypes}"
|
||||
" is not able to be represented.".format(
|
||||
mimetypes=output.keys())
|
||||
)
|
||||
|
||||
return []
|
39
venv/Lib/site-packages/nbconvert/filters/filter_links.py
Normal file
39
venv/Lib/site-packages/nbconvert/filters/filter_links.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env python3
|
||||
"""A pandoc filter used in converting notebooks to Latex.
|
||||
Converts links between notebooks to Latex cross-references.
|
||||
"""
|
||||
import re
|
||||
|
||||
from pandocfilters import RawInline, applyJSONFilters, stringify
|
||||
|
||||
def resolve_references(source):
|
||||
"""
|
||||
This applies the resolve_one_reference to the text passed in via the source argument.
|
||||
|
||||
This expects content in the form of a string encoded JSON object as represented
|
||||
internally in ``pandoc``.
|
||||
"""
|
||||
return applyJSONFilters([resolve_one_reference], source)
|
||||
|
||||
def resolve_one_reference(key, val, fmt, meta):
|
||||
"""
|
||||
This takes a tuple of arguments that are compatible with ``pandocfilters.walk()`` that
|
||||
allows identifying hyperlinks in the document and transforms them into valid LaTeX
|
||||
\\hyperref{} calls so that linking to headers between cells is possible.
|
||||
|
||||
See the documentation in ``pandocfilters.walk()`` for further information on the meaning
|
||||
and specification of ``key``, ``val``, ``fmt``, and ``meta``.
|
||||
"""
|
||||
|
||||
if key == 'Link':
|
||||
text = stringify(val[1])
|
||||
target = val[2][0]
|
||||
m = re.match(r'#(.+)$', target)
|
||||
if m:
|
||||
# pandoc automatically makes labels for headings.
|
||||
label = m.group(1).lower()
|
||||
label = re.sub(r'[^\w-]+', '', label) # Strip HTML entities
|
||||
return RawInline('tex', r'\hyperref[{label}]{{{text}}}'.format(label=label, text=text))
|
||||
|
||||
# Other elements will be returned unchanged.
|
||||
|
152
venv/Lib/site-packages/nbconvert/filters/highlight.py
Normal file
152
venv/Lib/site-packages/nbconvert/filters/highlight.py
Normal file
|
@ -0,0 +1,152 @@
|
|||
"""
|
||||
Module containing filter functions that allow code to be highlighted
|
||||
from within Jinja templates.
|
||||
"""
|
||||
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
# pygments must not be imported at the module level
|
||||
# because errors should be raised at runtime if it's actually needed,
|
||||
# not import time, when it may not be needed.
|
||||
|
||||
from nbconvert.utils.base import NbConvertBase
|
||||
from warnings import warn
|
||||
|
||||
from traitlets import observe
|
||||
|
||||
MULTILINE_OUTPUTS = ['text', 'html', 'svg', 'latex', 'javascript', 'json']
|
||||
|
||||
__all__ = [
|
||||
'Highlight2HTML',
|
||||
'Highlight2Latex'
|
||||
]
|
||||
|
||||
class Highlight2HTML(NbConvertBase):
|
||||
def __init__(self, pygments_lexer=None, **kwargs):
|
||||
self.pygments_lexer = pygments_lexer or 'ipython3'
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@observe('default_language')
|
||||
def _default_language_changed(self, change):
|
||||
warn('Setting default_language in config is deprecated as of 5.0, '
|
||||
'please use language_info metadata instead.')
|
||||
self.pygments_lexer = change['new']
|
||||
|
||||
def __call__(self, source, language=None, metadata=None):
|
||||
"""
|
||||
Return a syntax-highlighted version of the input source as html output.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
source : str
|
||||
source of the cell to highlight
|
||||
language : str
|
||||
language to highlight the syntax of
|
||||
metadata : NotebookNode cell metadata
|
||||
metadata of the cell to highlight
|
||||
"""
|
||||
from pygments.formatters import HtmlFormatter
|
||||
|
||||
if not language:
|
||||
language=self.pygments_lexer
|
||||
|
||||
return _pygments_highlight(source if len(source) > 0 else ' ',
|
||||
# needed to help post processors:
|
||||
HtmlFormatter(cssclass=" highlight hl-"+language),
|
||||
language, metadata)
|
||||
|
||||
|
||||
class Highlight2Latex(NbConvertBase):
|
||||
def __init__(self, pygments_lexer=None, **kwargs):
|
||||
self.pygments_lexer = pygments_lexer or 'ipython3'
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@observe('default_language')
|
||||
def _default_language_changed(self, change):
|
||||
warn('Setting default_language in config is deprecated as of 5.0, '
|
||||
'please use language_info metadata instead.')
|
||||
self.pygments_lexer = change['new']
|
||||
|
||||
def __call__(self, source, language=None, metadata=None, strip_verbatim=False):
|
||||
"""
|
||||
Return a syntax-highlighted version of the input source as latex output.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
source : str
|
||||
source of the cell to highlight
|
||||
language : str
|
||||
language to highlight the syntax of
|
||||
metadata : NotebookNode cell metadata
|
||||
metadata of the cell to highlight
|
||||
strip_verbatim : bool
|
||||
remove the Verbatim environment that pygments provides by default
|
||||
"""
|
||||
from pygments.formatters import LatexFormatter
|
||||
if not language:
|
||||
language=self.pygments_lexer
|
||||
|
||||
latex = _pygments_highlight(source, LatexFormatter(), language, metadata)
|
||||
if strip_verbatim:
|
||||
latex = latex.replace(r'\begin{Verbatim}[commandchars=\\\{\}]' + '\n', '')
|
||||
return latex.replace('\n\\end{Verbatim}\n', '')
|
||||
else:
|
||||
return latex
|
||||
|
||||
|
||||
|
||||
def _pygments_highlight(source, output_formatter, language='ipython', metadata=None):
|
||||
"""
|
||||
Return a syntax-highlighted version of the input source
|
||||
|
||||
Parameters
|
||||
----------
|
||||
source : str
|
||||
source of the cell to highlight
|
||||
output_formatter : Pygments formatter
|
||||
language : str
|
||||
language to highlight the syntax of
|
||||
metadata : NotebookNode cell metadata
|
||||
metadata of the cell to highlight
|
||||
"""
|
||||
from pygments import highlight
|
||||
from pygments.lexers import get_lexer_by_name
|
||||
from pygments.util import ClassNotFound
|
||||
|
||||
# If the cell uses a magic extension language,
|
||||
# use the magic language instead.
|
||||
if language.startswith('ipython') \
|
||||
and metadata \
|
||||
and 'magics_language' in metadata:
|
||||
|
||||
language = metadata['magics_language']
|
||||
|
||||
lexer = None
|
||||
if language == 'ipython2':
|
||||
try:
|
||||
from IPython.lib.lexers import IPythonLexer
|
||||
except ImportError:
|
||||
warn("IPython lexer unavailable, falling back on Python")
|
||||
language = 'python'
|
||||
else:
|
||||
lexer = IPythonLexer()
|
||||
elif language == 'ipython3':
|
||||
try:
|
||||
from IPython.lib.lexers import IPython3Lexer
|
||||
except ImportError:
|
||||
warn("IPython3 lexer unavailable, falling back on Python 3")
|
||||
language = 'python3'
|
||||
else:
|
||||
lexer = IPython3Lexer()
|
||||
|
||||
if lexer is None:
|
||||
try:
|
||||
lexer = get_lexer_by_name(language, stripall=True)
|
||||
except ClassNotFound:
|
||||
warn("No lexer found for language %r. Treating as plain text." % language)
|
||||
from pygments.lexers.special import TextLexer
|
||||
lexer = TextLexer()
|
||||
|
||||
|
||||
return highlight(source, lexer, output_formatter)
|
64
venv/Lib/site-packages/nbconvert/filters/latex.py
Normal file
64
venv/Lib/site-packages/nbconvert/filters/latex.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
"""Latex filters.
|
||||
|
||||
Module of useful filters for processing Latex within Jinja latex templates.
|
||||
"""
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2013, the IPython Development Team.
|
||||
#
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Imports
|
||||
#-----------------------------------------------------------------------------
|
||||
import re
|
||||
from nbconvert.utils.pandoc import pandoc
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Globals and constants
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
LATEX_RE_SUBS = (
|
||||
(re.compile(r'\.\.\.+'), r'{\\ldots}'),
|
||||
)
|
||||
|
||||
# Latex substitutions for escaping latex.
|
||||
# see: http://stackoverflow.com/questions/16259923/how-can-i-escape-latex-special-characters-inside-django-templates
|
||||
|
||||
LATEX_SUBS = {
|
||||
'&': r'\&',
|
||||
'%': r'\%',
|
||||
'$': r'\$',
|
||||
'#': r'\#',
|
||||
'_': r'\_',
|
||||
'{': r'\{',
|
||||
'}': r'\}',
|
||||
'~': r'\textasciitilde{}',
|
||||
'^': r'\^{}',
|
||||
'\\': r'\textbackslash{}',
|
||||
}
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Functions
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
__all__ = ['escape_latex']
|
||||
|
||||
|
||||
def escape_latex(text):
|
||||
"""
|
||||
Escape characters that may conflict with latex.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
text : str
|
||||
Text containing characters that may conflict with Latex
|
||||
"""
|
||||
text = ''.join(LATEX_SUBS.get(c, c) for c in text)
|
||||
for pattern, replacement in LATEX_RE_SUBS:
|
||||
text = pattern.sub(replacement, text)
|
||||
|
||||
return text
|
103
venv/Lib/site-packages/nbconvert/filters/markdown.py
Normal file
103
venv/Lib/site-packages/nbconvert/filters/markdown.py
Normal file
|
@ -0,0 +1,103 @@
|
|||
"""Markdown filters
|
||||
|
||||
This file contains a collection of utility filters for dealing with
|
||||
markdown within Jinja templates.
|
||||
"""
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from __future__ import print_function
|
||||
import re
|
||||
|
||||
try:
|
||||
from .markdown_mistune import markdown2html_mistune
|
||||
except ImportError as e:
|
||||
# store in variable for Python 3
|
||||
_mistune_import_error = e
|
||||
|
||||
def markdown2html_mistune(source):
|
||||
"""mistune is unavailable, raise ImportError"""
|
||||
raise ImportError("markdown2html requires mistune: %s"
|
||||
% _mistune_import_error)
|
||||
|
||||
from .pandoc import convert_pandoc
|
||||
|
||||
|
||||
__all__ = [
|
||||
'markdown2html',
|
||||
'markdown2html_pandoc',
|
||||
'markdown2html_mistune',
|
||||
'markdown2latex',
|
||||
'markdown2rst',
|
||||
'markdown2asciidoc',
|
||||
]
|
||||
|
||||
|
||||
def markdown2latex(source, markup='markdown', extra_args=None):
|
||||
"""
|
||||
Convert a markdown string to LaTeX via pandoc.
|
||||
|
||||
This function will raise an error if pandoc is not installed.
|
||||
Any error messages generated by pandoc are printed to stderr.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
source : string
|
||||
Input string, assumed to be valid markdown.
|
||||
markup : string
|
||||
Markup used by pandoc's reader
|
||||
default : pandoc extended markdown
|
||||
(see https://pandoc.org/README.html#pandocs-markdown)
|
||||
|
||||
Returns
|
||||
-------
|
||||
out : string
|
||||
Output as returned by pandoc.
|
||||
"""
|
||||
return convert_pandoc(source, markup, 'latex', extra_args=extra_args)
|
||||
|
||||
|
||||
def markdown2html_pandoc(source, extra_args=None):
|
||||
"""
|
||||
Convert a markdown string to HTML via pandoc.
|
||||
"""
|
||||
extra_args = extra_args or ['--mathjax']
|
||||
return convert_pandoc(source, 'markdown', 'html', extra_args=extra_args)
|
||||
|
||||
|
||||
def markdown2asciidoc(source, extra_args=None):
|
||||
"""Convert a markdown string to asciidoc via pandoc"""
|
||||
extra_args = extra_args or ['--atx-headers']
|
||||
asciidoc = convert_pandoc(source, 'markdown', 'asciidoc',
|
||||
extra_args=extra_args)
|
||||
# workaround for https://github.com/jgm/pandoc/issues/3068
|
||||
if "__" in asciidoc:
|
||||
asciidoc = re.sub(r'\b__([\w \n-]+)__([:,.\n\)])', r'_\1_\2', asciidoc)
|
||||
# urls / links:
|
||||
asciidoc = re.sub(r'\(__([\w\/-:\.]+)__\)', r'(_\1_)', asciidoc)
|
||||
|
||||
return asciidoc
|
||||
|
||||
|
||||
# The mistune renderer is the default, because it's simple to depend on it
|
||||
markdown2html = markdown2html_mistune
|
||||
|
||||
|
||||
def markdown2rst(source, extra_args=None):
|
||||
"""
|
||||
Convert a markdown string to ReST via pandoc.
|
||||
|
||||
This function will raise an error if pandoc is not installed.
|
||||
Any error messages generated by pandoc are printed to stderr.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
source : string
|
||||
Input string, assumed to be valid markdown.
|
||||
|
||||
Returns
|
||||
-------
|
||||
out : string
|
||||
Output as returned by pandoc.
|
||||
"""
|
||||
return convert_pandoc(source, 'markdown', 'rst', extra_args=extra_args)
|
181
venv/Lib/site-packages/nbconvert/filters/markdown_mistune.py
Normal file
181
venv/Lib/site-packages/nbconvert/filters/markdown_mistune.py
Normal file
|
@ -0,0 +1,181 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Markdown filters with mistune
|
||||
|
||||
Used from markdown.py
|
||||
"""
|
||||
# Copyright (c) IPython Development Team.
|
||||
# Distributed under the terms of the Modified BSD License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import re
|
||||
from functools import partial
|
||||
|
||||
try:
|
||||
from html import escape
|
||||
html_escape = partial(escape, quote=False)
|
||||
except ImportError:
|
||||
# Python 2
|
||||
from cgi import escape as html_escape
|
||||
|
||||
import mistune
|
||||
|
||||
from pygments import highlight
|
||||
from pygments.lexers import get_lexer_by_name
|
||||
from pygments.formatters import HtmlFormatter
|
||||
from pygments.util import ClassNotFound
|
||||
|
||||
from nbconvert.filters.strings import add_anchor
|
||||
|
||||
|
||||
class MathBlockGrammar(mistune.BlockGrammar):
|
||||
"""This defines a single regex comprised of the different patterns that
|
||||
identify math content spanning multiple lines. These are used by the
|
||||
MathBlockLexer.
|
||||
"""
|
||||
multi_math_str = "|".join([r"^\$\$.*?\$\$",
|
||||
r"^\\\\\[.*?\\\\\]",
|
||||
r"^\\begin\{([a-z]*\*?)\}(.*?)\\end\{\1\}"])
|
||||
multiline_math = re.compile(multi_math_str, re.DOTALL)
|
||||
|
||||
|
||||
class MathBlockLexer(mistune.BlockLexer):
|
||||
""" This acts as a pass-through to the MathInlineLexer. It is needed in
|
||||
order to avoid other block level rules splitting math sections apart.
|
||||
"""
|
||||
|
||||
default_rules = (['multiline_math']
|
||||
+ mistune.BlockLexer.default_rules)
|
||||
|
||||
def __init__(self, rules=None, **kwargs):
|
||||
if rules is None:
|
||||
rules = MathBlockGrammar()
|
||||
super().__init__(rules, **kwargs)
|
||||
|
||||
def parse_multiline_math(self, m):
|
||||
"""Add token to pass through mutiline math."""
|
||||
self.tokens.append({
|
||||
"type": "multiline_math",
|
||||
"text": m.group(0)
|
||||
})
|
||||
|
||||
|
||||
class MathInlineGrammar(mistune.InlineGrammar):
|
||||
"""This defines different ways of declaring math objects that should be
|
||||
passed through to mathjax unaffected. These are used by the MathInlineLexer.
|
||||
"""
|
||||
inline_math = re.compile(r"^\$(.+?)\$|^\\\\\((.+?)\\\\\)", re.DOTALL)
|
||||
block_math = re.compile(r"^\$\$(.*?)\$\$|^\\\\\[(.*?)\\\\\]", re.DOTALL)
|
||||
latex_environment = re.compile(r"^\\begin\{([a-z]*\*?)\}(.*?)\\end\{\1\}",
|
||||
re.DOTALL)
|
||||
text = re.compile(r'^[\s\S]+?(?=[\\<!\[_*`~$]|https?://| {2,}\n|$)')
|
||||
|
||||
|
||||
class MathInlineLexer(mistune.InlineLexer):
|
||||
r"""This interprets the content of LaTeX style math objects using the rules
|
||||
defined by the MathInlineGrammar.
|
||||
|
||||
In particular this grabs ``$$...$$``, ``\\[...\\]``, ``\\(...\\)``, ``$...$``,
|
||||
and ``\begin{foo}...\end{foo}`` styles for declaring mathematics. It strips
|
||||
delimiters from all these varieties, and extracts the type of environment
|
||||
in the last case (``foo`` in this example).
|
||||
"""
|
||||
default_rules = (['block_math', 'inline_math', 'latex_environment']
|
||||
+ mistune.InlineLexer.default_rules)
|
||||
|
||||
def __init__(self, renderer, rules=None, **kwargs):
|
||||
if rules is None:
|
||||
rules = MathInlineGrammar()
|
||||
super().__init__(renderer, rules, **kwargs)
|
||||
|
||||
def output_inline_math(self, m):
|
||||
return self.renderer.inline_math(m.group(1) or m.group(2))
|
||||
|
||||
def output_block_math(self, m):
|
||||
return self.renderer.block_math(m.group(1) or m.group(2) or "")
|
||||
|
||||
def output_latex_environment(self, m):
|
||||
return self.renderer.latex_environment(m.group(1),
|
||||
m.group(2))
|
||||
|
||||
|
||||
class MarkdownWithMath(mistune.Markdown):
|
||||
def __init__(self, renderer, **kwargs):
|
||||
if 'inline' not in kwargs:
|
||||
kwargs['inline'] = MathInlineLexer
|
||||
if 'block' not in kwargs:
|
||||
kwargs['block'] = MathBlockLexer
|
||||
super().__init__(renderer, **kwargs)
|
||||
|
||||
|
||||
def output_multiline_math(self):
|
||||
return self.inline(self.token["text"])
|
||||
|
||||
|
||||
class IPythonRenderer(mistune.Renderer):
|
||||
def block_code(self, code, lang):
|
||||
if lang:
|
||||
try:
|
||||
lexer = get_lexer_by_name(lang, stripall=True)
|
||||
except ClassNotFound:
|
||||
code = lang + '\n' + code
|
||||
lang = None
|
||||
|
||||
if not lang:
|
||||
return '\n<pre><code>%s</code></pre>\n' % \
|
||||
mistune.escape(code)
|
||||
|
||||
formatter = HtmlFormatter()
|
||||
return highlight(code, lexer, formatter)
|
||||
|
||||
def header(self, text, level, raw=None):
|
||||
html = super().header(text, level, raw=raw)
|
||||
if self.options.get("exclude_anchor_links"):
|
||||
return html
|
||||
anchor_link_text = self.options.get('anchor_link_text', u'¶')
|
||||
return add_anchor(html, anchor_link_text=anchor_link_text)
|
||||
|
||||
def escape_html(self, text):
|
||||
return html_escape(text)
|
||||
|
||||
def block_math(self, text):
|
||||
return '$$%s$$' % self.escape_html(text)
|
||||
|
||||
def latex_environment(self, name, text):
|
||||
name = self.escape_html(name)
|
||||
text = self.escape_html(text)
|
||||
return r'\begin{%s}%s\end{%s}' % (name, text, name)
|
||||
|
||||
def inline_math(self, text):
|
||||
return '$%s$' % self.escape_html(text)
|
||||
|
||||
def image(self, src, title, text):
|
||||
"""Rendering a image with title and text.
|
||||
|
||||
:param src: source link of the image.
|
||||
:param title: title text of the image.
|
||||
:param text: alt text of the image.
|
||||
"""
|
||||
attachments = self.options.get('attachments', {})
|
||||
attachment_prefix = 'attachment:'
|
||||
if src.startswith(attachment_prefix):
|
||||
name = src[len(attachment_prefix):]
|
||||
assert name in attachments, "missing attachment: {}".format(name)
|
||||
attachment = attachments[name]
|
||||
# we choose vector over raster, and lossless over lossy
|
||||
preferred_mime_types = ['image/svg+xml', 'image/png', 'image/jpeg']
|
||||
for preferred_mime_type in preferred_mime_types:
|
||||
if preferred_mime_type in attachment:
|
||||
break
|
||||
else: # otherwise we choose the first mimetype we can find
|
||||
preferred_mime_type = list(attachment.keys())[0]
|
||||
mime_type = preferred_mime_type
|
||||
data = attachment[mime_type]
|
||||
src = 'data:' + mime_type + ';base64,' + data
|
||||
return super().image(src, title, text)
|
||||
|
||||
|
||||
def markdown2html_mistune(source):
|
||||
"""Convert a markdown string to HTML using mistune"""
|
||||
return MarkdownWithMath(renderer=IPythonRenderer(
|
||||
escape=False)).render(source)
|
16
venv/Lib/site-packages/nbconvert/filters/metadata.py
Normal file
16
venv/Lib/site-packages/nbconvert/filters/metadata.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
"""filters for metadata"""
|
||||
|
||||
def get_metadata(output, key, mimetype=None):
|
||||
"""Resolve an output metadata key
|
||||
|
||||
If mimetype given, resolve at mimetype level first,
|
||||
then fallback to top-level.
|
||||
Otherwise, just resolve at top-level.
|
||||
Returns None if no data found.
|
||||
"""
|
||||
md = output.get('metadata') or {}
|
||||
if mimetype and mimetype in md:
|
||||
value = md[mimetype].get(key)
|
||||
if value is not None:
|
||||
return value
|
||||
return md.get(key)
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue