diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..62d76f9 --- /dev/null +++ b/.flake8 @@ -0,0 +1,6 @@ +[flake8] +max-line-length = 79 +exclude = **/__pycache__ +max-complexity = 20 +ignore = E203, W503 +jobs=4 \ No newline at end of file diff --git a/.jupyter/jupyter_notebook_config.json b/.jupyter/jupyter_notebook_config.json new file mode 100644 index 0000000..d2d2d3e --- /dev/null +++ b/.jupyter/jupyter_notebook_config.json @@ -0,0 +1,7 @@ +{ + "NotebookApp": { + "nbserver_extensions": { + "jupyter_nbextensions_configurator": true + } + } +} diff --git a/.jupyter/jupyter_notebook_config.py b/.jupyter/jupyter_notebook_config.py new file mode 100644 index 0000000..6c1040e --- /dev/null +++ b/.jupyter/jupyter_notebook_config.py @@ -0,0 +1,876 @@ +# Configuration file for jupyter-notebook. + +# ------------------------------------------------------------------------------ +# Application(SingletonConfigurable) configuration +# ------------------------------------------------------------------------------ + +## This is an application. + +## The date format used by logging formatters for %(asctime)s +# c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S' + +## The Logging format template +# c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s' + +## Set the log level by value or name. +# c.Application.log_level = 30 + +# ------------------------------------------------------------------------------ +# JupyterApp(Application) configuration +# ------------------------------------------------------------------------------ + +## Base class for Jupyter applications + +## Answer yes to any prompts. +# c.JupyterApp.answer_yes = False + +## Full path of a config file. +# c.JupyterApp.config_file = '' + +## Specify a config file to load. +# c.JupyterApp.config_file_name = '' + +## Generate default config file. +# c.JupyterApp.generate_config = False + +# ------------------------------------------------------------------------------ +# NotebookApp(JupyterApp) configuration +# ------------------------------------------------------------------------------ + +## Set the Access-Control-Allow-Credentials: true header +# c.NotebookApp.allow_credentials = False + +## Set the Access-Control-Allow-Origin header +# +# Use '*' to allow any origin to access your server. +# +# Takes precedence over allow_origin_pat. +# c.NotebookApp.allow_origin = '' + +## Use a regular expression for the Access-Control-Allow-Origin header +# +# Requests from an origin matching the expression will get replies with: +# +# Access-Control-Allow-Origin: origin +# +# where `origin` is the origin of the request. +# +# Ignored if allow_origin is set. +# c.NotebookApp.allow_origin_pat = '' + +## Allow password to be changed at login for the notebook server. +# +# While loggin in with a token, the notebook server UI will give the opportunity +# to the user to enter a new password at the same time that will replace the +# token login mechanism. +# +# This can be set to false to prevent changing password from the UI/API. +# c.NotebookApp.allow_password_change = True + +## Allow requests where the Host header doesn't point to a local server +# +# By default, requests get a 403 forbidden response if the 'Host' header shows +# that the browser thinks it's on a non-local domain. Setting this option to +# True disables this check. +# +# This protects against 'DNS rebinding' attacks, where a remote web server +# serves you a page and then changes its DNS to send later requests to a local +# IP, bypassing same-origin checks. +# +# Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, along +# with hostnames configured in local_hostnames. +# c.NotebookApp.allow_remote_access = False + +## Whether to allow the user to run the notebook as root. +# c.NotebookApp.allow_root = False + +## DEPRECATED use base_url +# c.NotebookApp.base_project_url = '/' + +## The base URL for the notebook server. +# +# Leading and trailing slashes can be omitted, and will automatically be added. +# c.NotebookApp.base_url = '/' + +## Specify what command to use to invoke a web browser when opening the notebook. +# If not specified, the default browser will be determined by the `webbrowser` +# standard library module, which allows setting of the BROWSER environment +# variable to override it. +# c.NotebookApp.browser = '' + +## The full path to an SSL/TLS certificate file. +# c.NotebookApp.certfile = '' + +## The full path to a certificate authority certificate for SSL/TLS client +# authentication. +# c.NotebookApp.client_ca = '' + +## The config manager class to use +# c.NotebookApp.config_manager_class = 'notebook.services.config.manager.ConfigManager' + +## The notebook manager class to use. +# c.NotebookApp.contents_manager_class = 'notebook.services.contents.largefilemanager.LargeFileManager' + +## Extra keyword arguments to pass to `set_secure_cookie`. See tornado's +# set_secure_cookie docs for details. +# c.NotebookApp.cookie_options = {} + +## The random bytes used to secure cookies. By default this is a new random +# number every time you start the Notebook. Set it to a value in a config file +# to enable logins to persist across server sessions. +# +# Note: Cookie secrets should be kept private, do not share config files with +# cookie_secret stored in plaintext (you can read the value from a file). +# c.NotebookApp.cookie_secret = b'' + +## The file where the cookie secret is stored. +# c.NotebookApp.cookie_secret_file = '' + +## Override URL shown to users. +# +# Replace actual URL, including protocol, address, port and base URL, with the +# given value when displaying URL to the users. Do not change the actual +# connection URL. If authentication token is enabled, the token is added to the +# custom URL automatically. +# +# This option is intended to be used when the URL to display to the user cannot +# be determined reliably by the Jupyter notebook server (proxified or +# containerized setups for example). +# c.NotebookApp.custom_display_url = '' + +## The default URL to redirect to from `/` +# c.NotebookApp.default_url = '/tree' + +## Disable cross-site-request-forgery protection +# +# Jupyter notebook 4.3.1 introduces protection from cross-site request +# forgeries, requiring API requests to either: +# +# - originate from pages served by this server (validated with XSRF cookie and +# token), or - authenticate with a token +# +# Some anonymous compute resources still desire the ability to run code, +# completely without authentication. These services can disable all +# authentication and security checks, with the full knowledge of what that +# implies. +# c.NotebookApp.disable_check_xsrf = False + +## Whether to enable MathJax for typesetting math/TeX +# +# MathJax is the javascript library Jupyter uses to render math/LaTeX. It is +# very large, so you may want to disable it if you have a slow internet +# connection, or for offline use of the notebook. +# +# When disabled, equations etc. will appear as their untransformed TeX source. +# c.NotebookApp.enable_mathjax = True + +## extra paths to look for Javascript notebook extensions +# c.NotebookApp.extra_nbextensions_path = [] + +## handlers that should be loaded at higher priority than the default services +# c.NotebookApp.extra_services = [] + +## Extra paths to search for serving static files. +# +# This allows adding javascript/css to be available from the notebook server +# machine, or overriding individual files in the IPython +# c.NotebookApp.extra_static_paths = [] + +## Extra paths to search for serving jinja templates. +# +# Can be used to override templates from notebook.templates. +# c.NotebookApp.extra_template_paths = [] + +## +# c.NotebookApp.file_to_run = '' + +## Extra keyword arguments to pass to `get_secure_cookie`. See tornado's +# get_secure_cookie docs for details. +# c.NotebookApp.get_secure_cookie_kwargs = {} + +## Deprecated: Use minified JS file or not, mainly use during dev to avoid JS +# recompilation +# c.NotebookApp.ignore_minified_js = False + +## (bytes/sec) Maximum rate at which stream output can be sent on iopub before +# they are limited. +# c.NotebookApp.iopub_data_rate_limit = 1000000 + +## (msgs/sec) Maximum rate at which messages can be sent on iopub before they are +# limited. +# c.NotebookApp.iopub_msg_rate_limit = 1000 + +## The IP address the notebook server will listen on. +# c.NotebookApp.ip = 'localhost' + +## Supply extra arguments that will be passed to Jinja environment. +# c.NotebookApp.jinja_environment_options = {} + +## Extra variables to supply to jinja templates when rendering. +# c.NotebookApp.jinja_template_vars = {} + +## The kernel manager class to use. +# c.NotebookApp.kernel_manager_class = 'notebook.services.kernels.kernelmanager.MappingKernelManager' + +## The kernel spec manager class to use. Should be a subclass of +# `jupyter_client.kernelspec.KernelSpecManager`. +# +# The Api of KernelSpecManager is provisional and might change without warning +# between this version of Jupyter and the next stable one. +# c.NotebookApp.kernel_spec_manager_class = 'jupyter_client.kernelspec.KernelSpecManager' + +## The full path to a private key file for usage with SSL/TLS. +# c.NotebookApp.keyfile = '' + +## Hostnames to allow as local when allow_remote_access is False. +# +# Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted as +# local as well. +# c.NotebookApp.local_hostnames = ['localhost'] + +## The login handler class to use. +# c.NotebookApp.login_handler_class = 'notebook.auth.login.LoginHandler' + +## The logout handler class to use. +# c.NotebookApp.logout_handler_class = 'notebook.auth.logout.LogoutHandler' + +## The MathJax.js configuration file that is to be used. +# c.NotebookApp.mathjax_config = 'TeX-AMS-MML_HTMLorMML-full,Safe' + +## A custom url for MathJax.js. Should be in the form of a case-sensitive url to +# MathJax, for example: /static/components/MathJax/MathJax.js +# c.NotebookApp.mathjax_url = '' + +## Sets the maximum allowed size of the client request body, specified in the +# Content-Length request header field. If the size in a request exceeds the +# configured value, a malformed HTTP message is returned to the client. +# +# Note: max_body_size is applied even in streaming mode. +# c.NotebookApp.max_body_size = 536870912 + +## Gets or sets the maximum amount of memory, in bytes, that is allocated for +# use by the buffer manager. +# c.NotebookApp.max_buffer_size = 536870912 + +## Gets or sets a lower bound on the open file handles process resource limit. +# This may need to be increased if you run into an OSError: [Errno 24] Too many +# open files. This is not applicable when running on Windows. +# c.NotebookApp.min_open_files_limit = 4096 + +## Dict of Python modules to load as notebook server extensions.Entry values can +# be used to enable and disable the loading ofthe extensions. The extensions +# will be loaded in alphabetical order. +# c.NotebookApp.nbserver_extensions = {} + +## The directory to use for notebooks and kernels. +# c.NotebookApp.notebook_dir = 'jupyter-notebooks' + +## Whether to open in a browser after starting. The specific browser used is +# platform dependent and determined by the python standard library `webbrowser` +# module, unless it is overridden using the --browser (NotebookApp.browser) +# configuration option. +# c.NotebookApp.open_browser = True + +## Hashed password to use for web authentication. +# +# To generate, type in a python/IPython shell: +# +# from notebook.auth import passwd; passwd() +# +# The string should be of the form type:salt:hashed-password. +c.NotebookApp.password = "" + +## Forces users to use a password for the Notebook server. This is useful in a +# multi user environment, for instance when everybody in the LAN can access each +# other's machine through ssh. +# +# In such a case, server the notebook server on localhost is not secure since +# any user can connect to the notebook server via ssh. +# c.NotebookApp.password_required = False + +## The port the notebook server will listen on. +# c.NotebookApp.port = 8888 + +## The number of additional ports to try if the specified port is not available. +# c.NotebookApp.port_retries = 50 + +## DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. +# c.NotebookApp.pylab = 'disabled' + +## If True, display a button in the dashboard to quit (shutdown the notebook +# server). +# c.NotebookApp.quit_button = True + +## (sec) Time window used to check the message and data rate limits. +# c.NotebookApp.rate_limit_window = 3 + +## Reraise exceptions encountered loading server extensions? +# c.NotebookApp.reraise_server_extension_failures = False + +## DEPRECATED use the nbserver_extensions dict instead +# c.NotebookApp.server_extensions = [] + +## The session manager class to use. +# c.NotebookApp.session_manager_class = 'notebook.services.sessions.sessionmanager.SessionManager' + +## Shut down the server after N seconds with no kernels or terminals running and +# no activity. This can be used together with culling idle kernels +# (MappingKernelManager.cull_idle_timeout) to shutdown the notebook server when +# it's not in use. This is not precisely timed: it may shut down up to a minute +# later. 0 (the default) disables this automatic shutdown. +# c.NotebookApp.shutdown_no_activity_timeout = 0 + +## Supply SSL options for the tornado HTTPServer. See the tornado docs for +# details. +# c.NotebookApp.ssl_options = {} + +## Supply overrides for terminado. Currently only supports "shell_command". +# c.NotebookApp.terminado_settings = {} + +## Set to False to disable terminals. +# +# This does *not* make the notebook server more secure by itself. Anything the +# user can in a terminal, they can also do in a notebook. +# +# Terminals may also be automatically disabled if the terminado package is not +# available. +# c.NotebookApp.terminals_enabled = True + +## Token used for authenticating first-time connections to the server. +# +# When no password is enabled, the default is to generate a new, random token. +# +# Setting to an empty string disables authentication altogether, which is NOT +# RECOMMENDED. +c.NotebookApp.token = "" # '' + +## Supply overrides for the tornado.web.Application that the Jupyter notebook +# uses. +# c.NotebookApp.tornado_settings = {} + +## Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- +# For headerssent by the upstream reverse proxy. Necessary if the proxy handles +# SSL +# c.NotebookApp.trust_xheaders = False + +## Disable launching browser by redirect file +# +# For versions of notebook > 5.7.2, a security feature measure was added that +# prevented the authentication token used to launch the browser from being +# visible. This feature makes it difficult for other users on a multi-user +# system from running code in your Jupyter session as you. +# +# However, some environments (like Windows Subsystem for Linux (WSL) and +# Chromebooks), launching a browser using a redirect file can lead the browser +# failing to load. This is because of the difference in file structures/paths +# between the runtime and the browser. +# +# Disabling this setting to False will disable this behavior, allowing the +# browser to launch by using a URL and visible token (as before). +# c.NotebookApp.use_redirect_file = True + +## DEPRECATED, use tornado_settings +# c.NotebookApp.webapp_settings = {} + +## Specify Where to open the notebook on startup. This is the `new` argument +# passed to the standard library method `webbrowser.open`. The behaviour is not +# guaranteed, but depends on browser support. Valid values are: +# +# - 2 opens a new tab, +# - 1 opens a new window, +# - 0 opens in an existing window. +# +# See the `webbrowser.open` documentation for details. +# c.NotebookApp.webbrowser_open_new = 2 + +## Set the tornado compression options for websocket connections. +# +# This value will be returned from +# :meth:`WebSocketHandler.get_compression_options`. None (default) will disable +# compression. A dict (even an empty one) will enable compression. +# +# See the tornado docs for WebSocketHandler.get_compression_options for details. +# c.NotebookApp.websocket_compression_options = None + +## The base URL for websockets, if it differs from the HTTP server (hint: it +# almost certainly doesn't). +# +# Should be in the form of an HTTP origin: ws[s]://hostname[:port] +# c.NotebookApp.websocket_url = '' + +# ------------------------------------------------------------------------------ +# ConnectionFileMixin(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ + +## Mixin for configurable classes that work with connection files + +## JSON file in which to store connection info [default: kernel-.json] +# +# This file will contain the IP, ports, and authentication key needed to connect +# clients to this kernel. By default, this file will be created in the security +# dir of the current profile, but can be specified by absolute path. +# c.ConnectionFileMixin.connection_file = '' + +## set the control (ROUTER) port [default: random] +# c.ConnectionFileMixin.control_port = 0 + +## set the heartbeat port [default: random] +# c.ConnectionFileMixin.hb_port = 0 + +## set the iopub (PUB) port [default: random] +# c.ConnectionFileMixin.iopub_port = 0 + +## Set the kernel's IP address [default localhost]. If the IP address is +# something other than localhost, then Consoles on other machines will be able +# to connect to the Kernel, so be careful! +# c.ConnectionFileMixin.ip = '' + +## set the shell (ROUTER) port [default: random] +# c.ConnectionFileMixin.shell_port = 0 + +## set the stdin (ROUTER) port [default: random] +# c.ConnectionFileMixin.stdin_port = 0 + +## +# c.ConnectionFileMixin.transport = 'tcp' + +# ------------------------------------------------------------------------------ +# KernelManager(ConnectionFileMixin) configuration +# ------------------------------------------------------------------------------ + +## Manages a single kernel in a subprocess on this host. +# +# This version starts kernels with Popen. + +## Should we autorestart the kernel if it dies. +# c.KernelManager.autorestart = True + +## DEPRECATED: Use kernel_name instead. +# +# The Popen Command to launch the kernel. Override this if you have a custom +# kernel. If kernel_cmd is specified in a configuration file, Jupyter does not +# pass any arguments to the kernel, because it cannot make any assumptions about +# the arguments that the kernel understands. In particular, this means that the +# kernel does not receive the option --debug if it given on the Jupyter command +# line. +# c.KernelManager.kernel_cmd = [] + +## Time to wait for a kernel to terminate before killing it, in seconds. +# c.KernelManager.shutdown_wait_time = 5.0 + +# ------------------------------------------------------------------------------ +# Session(Configurable) configuration +# ------------------------------------------------------------------------------ + +## Object for handling serialization and sending of messages. +# +# The Session object handles building messages and sending them with ZMQ sockets +# or ZMQStream objects. Objects can communicate with each other over the +# network via Session objects, and only need to work with the dict-based IPython +# message spec. The Session will handle serialization/deserialization, security, +# and metadata. +# +# Sessions support configurable serialization via packer/unpacker traits, and +# signing with HMAC digests via the key/keyfile traits. +# +# Parameters ---------- +# +# debug : bool +# whether to trigger extra debugging statements +# packer/unpacker : str : 'json', 'pickle' or import_string +# importstrings for methods to serialize message parts. If just +# 'json' or 'pickle', predefined JSON and pickle packers will be used. +# Otherwise, the entire importstring must be used. +# +# The functions must accept at least valid JSON input, and output *bytes*. +# +# For example, to use msgpack: +# packer = 'msgpack.packb', unpacker='msgpack.unpackb' +# pack/unpack : callables +# You can also set the pack/unpack callables for serialization directly. +# session : bytes +# the ID of this Session object. The default is to generate a new UUID. +# username : unicode +# username added to message headers. The default is to ask the OS. +# key : bytes +# The key used to initialize an HMAC signature. If unset, messages +# will not be signed or checked. +# keyfile : filepath +# The file containing a key. If this is set, `key` will be initialized +# to the contents of the file. + +## Threshold (in bytes) beyond which an object's buffer should be extracted to +# avoid pickling. +# c.Session.buffer_threshold = 1024 + +## Whether to check PID to protect against calls after fork. +# +# This check can be disabled if fork-safety is handled elsewhere. +# c.Session.check_pid = True + +## Threshold (in bytes) beyond which a buffer should be sent without copying. +# c.Session.copy_threshold = 65536 + +## Debug output in the Session +# c.Session.debug = False + +## The maximum number of digests to remember. +# +# The digest history will be culled when it exceeds this value. +# c.Session.digest_history_size = 65536 + +## The maximum number of items for a container to be introspected for custom +# serialization. Containers larger than this are pickled outright. +# c.Session.item_threshold = 64 + +## execution key, for signing messages. +# c.Session.key = b'' + +## path to file containing execution key. +# c.Session.keyfile = '' + +## Metadata dictionary, which serves as the default top-level metadata dict for +# each message. +# c.Session.metadata = {} + +## The name of the packer for serializing messages. Should be one of 'json', +# 'pickle', or an import name for a custom callable serializer. +# c.Session.packer = 'json' + +## The UUID identifying this session. +# c.Session.session = '' + +## The digest scheme used to construct the message signatures. Must have the form +# 'hmac-HASH'. +# c.Session.signature_scheme = 'hmac-sha256' + +## The name of the unpacker for unserializing messages. Only used with custom +# functions for `packer`. +# c.Session.unpacker = 'json' + +## Username for the Session. Default is your system username. +# c.Session.username = 'ddw' + +# ------------------------------------------------------------------------------ +# MultiKernelManager(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ + +## A class for managing multiple kernels. + +## The name of the default kernel to start +# c.MultiKernelManager.default_kernel_name = 'python3' + +## The kernel manager class. This is configurable to allow subclassing of the +# KernelManager for customized behavior. +# c.MultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager' + +# ------------------------------------------------------------------------------ +# MappingKernelManager(MultiKernelManager) configuration +# ------------------------------------------------------------------------------ + +## A KernelManager that handles notebook mapping and HTTP error handling + +## White list of allowed kernel message types. When the list is empty, all +# message types are allowed. +# c.MappingKernelManager.allowed_message_types = [] + +## Whether messages from kernels whose frontends have disconnected should be +# buffered in-memory. +# +# When True (default), messages are buffered and replayed on reconnect, avoiding +# lost messages due to interrupted connectivity. +# +# Disable if long-running kernels will produce too much output while no +# frontends are connected. +# c.MappingKernelManager.buffer_offline_messages = True + +## Whether to consider culling kernels which are busy. Only effective if +# cull_idle_timeout > 0. +# c.MappingKernelManager.cull_busy = False + +## Whether to consider culling kernels which have one or more connections. Only +# effective if cull_idle_timeout > 0. +# c.MappingKernelManager.cull_connected = False + +## Timeout (in seconds) after which a kernel is considered idle and ready to be +# culled. Values of 0 or lower disable culling. Very short timeouts may result +# in kernels being culled for users with poor network connections. +# c.MappingKernelManager.cull_idle_timeout = 0 + +## The interval (in seconds) on which to check for idle kernels exceeding the +# cull timeout value. +# c.MappingKernelManager.cull_interval = 300 + +## Timeout for giving up on a kernel (in seconds). +# +# On starting and restarting kernels, we check whether the kernel is running and +# responsive by sending kernel_info_requests. This sets the timeout in seconds +# for how long the kernel can take before being presumed dead. This affects the +# MappingKernelManager (which handles kernel restarts) and the +# ZMQChannelsHandler (which handles the startup). +# c.MappingKernelManager.kernel_info_timeout = 60 + +## +# c.MappingKernelManager.root_dir = '' + +# ------------------------------------------------------------------------------ +# KernelSpecManager(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ + +## If there is no Python kernelspec registered and the IPython kernel is +# available, ensure it is added to the spec list. +# c.KernelSpecManager.ensure_native_kernel = True + +## The kernel spec class. This is configurable to allow subclassing of the +# KernelSpecManager for customized behavior. +# c.KernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' + +## Whitelist of allowed kernel names. +# +# By default, all installed kernels are allowed. +# c.KernelSpecManager.whitelist = set() + +# ------------------------------------------------------------------------------ +# ContentsManager(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ + +## Base class for serving files and directories. +# +# This serves any text or binary file, as well as directories, with special +# handling for JSON notebook documents. +# +# Most APIs take a path argument, which is always an API-style unicode path, and +# always refers to a directory. +# +# - unicode, not url-escaped +# - '/'-separated +# - leading and trailing '/' will be stripped +# - if unspecified, path defaults to '', +# indicating the root path. + +## Allow access to hidden files +# c.ContentsManager.allow_hidden = False + +## +# c.ContentsManager.checkpoints = None + +## +# c.ContentsManager.checkpoints_class = 'notebook.services.contents.checkpoints.Checkpoints' + +## +# c.ContentsManager.checkpoints_kwargs = {} + +## handler class to use when serving raw file requests. +# +# Default is a fallback that talks to the ContentsManager API, which may be +# inefficient, especially for large files. +# +# Local files-based ContentsManagers can use a StaticFileHandler subclass, which +# will be much more efficient. +# +# Access to these files should be Authenticated. +# c.ContentsManager.files_handler_class = 'notebook.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# +# For example, StaticFileHandlers generally expect a `path` argument specifying +# the root directory from which to serve files. +# c.ContentsManager.files_handler_params = {} + +## Glob patterns to hide in file and directory listings. +# c.ContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] + +## Python callable or importstring thereof +# +# To be called on a contents model prior to save. +# +# This can be used to process the structure, such as removing notebook outputs +# or other side effects that should not be saved. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(path=path, model=model, contents_manager=self) +# +# - model: the model to be saved. Includes file contents. +# Modifying this dict will affect the file that is stored. +# - path: the API path of the save destination +# - contents_manager: this ContentsManager instance +# c.ContentsManager.pre_save_hook = None + +## +# c.ContentsManager.root_dir = '/' + +## The base name used when creating untitled directories. +# c.ContentsManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +# c.ContentsManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +# c.ContentsManager.untitled_notebook = 'Untitled' + +# ------------------------------------------------------------------------------ +# FileManagerMixin(Configurable) configuration +# ------------------------------------------------------------------------------ + +## Mixin for ContentsAPI classes that interact with the filesystem. +# +# Provides facilities for reading, writing, and copying both notebooks and +# generic files. +# +# Shared by FileContentsManager and FileCheckpoints. +# +# Note ---- Classes using this mixin must provide the following attributes: +# +# root_dir : unicode +# A directory against against which API-style paths are to be resolved. +# +# log : logging.Logger + +## By default notebooks are saved on disk on a temporary file and then if +# successfully written, it replaces the old ones. This procedure, namely +# 'atomic_writing', causes some bugs on file system without operation order +# enforcement (like some networked fs). If set to False, the new notebook is +# written directly on the old one which could fail (eg: full filesystem or quota +# ) +# c.FileManagerMixin.use_atomic_writing = True + +# ------------------------------------------------------------------------------ +# FileContentsManager(FileManagerMixin,ContentsManager) configuration +# ------------------------------------------------------------------------------ + +## If True (default), deleting files will send them to the platform's +# trash/recycle bin, where they can be recovered. If False, deleting files +# really deletes them. +# c.FileContentsManager.delete_to_trash = True + +## Python callable or importstring thereof +# +# to be called on the path of a file just saved. +# +# This can be used to process the file on disk, such as converting the notebook +# to a script or HTML via nbconvert. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(os_path=os_path, model=model, contents_manager=instance) +# +# - path: the filesystem path to the file just written - model: the model +# representing the file - contents_manager: this ContentsManager instance +# c.FileContentsManager.post_save_hook = None + +## +# c.FileContentsManager.root_dir = '' + +## DEPRECATED, use post_save_hook. Will be removed in Notebook 5.0 +# c.FileContentsManager.save_script = False + +# ------------------------------------------------------------------------------ +# NotebookNotary(LoggingConfigurable) configuration +# ------------------------------------------------------------------------------ + +## A class for computing and verifying notebook signatures. + +## The hashing algorithm used to sign notebooks. +# c.NotebookNotary.algorithm = 'sha256' + +## The sqlite file in which to store notebook signatures. By default, this will +# be in your Jupyter data directory. You can set it to ':memory:' to disable +# sqlite writing to the filesystem. +# c.NotebookNotary.db_file = '' + +## The secret key with which notebooks are signed. +# c.NotebookNotary.secret = b'' + +## The file where the secret key is stored. +# c.NotebookNotary.secret_file = '' + +## A callable returning the storage backend for notebook signatures. The default +# uses an SQLite database. +# c.NotebookNotary.store_factory = traitlets.Undefined + +# ------------------------------------------------------------------------------ +# GatewayKernelManager(MappingKernelManager) configuration +# ------------------------------------------------------------------------------ + +## Kernel manager that supports remote kernels hosted by Jupyter Kernel or +# Enterprise Gateway. + +# ------------------------------------------------------------------------------ +# GatewayKernelSpecManager(KernelSpecManager) configuration +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# GatewayClient(SingletonConfigurable) configuration +# ------------------------------------------------------------------------------ + +## This class manages the configuration. It's its own singleton class so that we +# can share these values across all objects. It also contains some helper methods +# to build request arguments out of the various config options. + +## The authorization token used in the HTTP headers. (JUPYTER_GATEWAY_AUTH_TOKEN +# env var) +# c.GatewayClient.auth_token = None + +## The filename of CA certificates or None to use defaults. +# (JUPYTER_GATEWAY_CA_CERTS env var) +# c.GatewayClient.ca_certs = None + +## The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT +# env var) +# c.GatewayClient.client_cert = None + +## The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) +# c.GatewayClient.client_key = None + +## The time allowed for HTTP connection establishment with the Gateway server. +# (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var) +# c.GatewayClient.connect_timeout = 60.0 + +## A comma-separated list of environment variable names that will be included, +# along with their values, in the kernel startup request. The corresponding +# `env_whitelist` configuration value must also be set on the Gateway server - +# since that configuration value indicates which environmental values to make +# available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var) +# c.GatewayClient.env_whitelist = '' + +## Additional HTTP headers to pass on the request. This value will be converted +# to a dict. (JUPYTER_GATEWAY_HEADERS env var) +# c.GatewayClient.headers = '{}' + +## The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) +# c.GatewayClient.http_pwd = None + +## The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) +# c.GatewayClient.http_user = None + +## The gateway API endpoint for accessing kernel resources +# (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var) +# c.GatewayClient.kernels_endpoint = '/api/kernels' + +## The gateway API endpoint for accessing kernelspecs +# (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var) +# c.GatewayClient.kernelspecs_endpoint = '/api/kernelspecs' + +## The gateway endpoint for accessing kernelspecs resources +# (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var) +# c.GatewayClient.kernelspecs_resource_endpoint = '/kernelspecs' + +## The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT +# env var) +# c.GatewayClient.request_timeout = 60.0 + +## The url of the Kernel or Enterprise Gateway server where kernel specifications +# are defined and kernel management takes place. If defined, this Notebook +# server acts as a proxy for all kernel management and kernel specification +# retrieval. (JUPYTER_GATEWAY_URL env var) +# c.GatewayClient.url = None + +## For HTTPS requests, determines if server's certificate should be validated or +# not. (JUPYTER_GATEWAY_VALIDATE_CERT env var) +# c.GatewayClient.validate_cert = True + +## The websocket url of the Kernel or Enterprise Gateway server. If not +# provided, this value will correspond to the value of the Gateway url with 'ws' +# in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) +# c.GatewayClient.ws_url = None diff --git a/.jupyter/migrated b/.jupyter/migrated new file mode 100644 index 0000000..9d1fe45 --- /dev/null +++ b/.jupyter/migrated @@ -0,0 +1 @@ +2022-03-09T10:34:55.281930 \ No newline at end of file diff --git a/.jupyter/nbconfig/common.json b/.jupyter/nbconfig/common.json new file mode 100644 index 0000000..f3be66a --- /dev/null +++ b/.jupyter/nbconfig/common.json @@ -0,0 +1,3 @@ +{ + "nbext_hide_incompat": false +} diff --git a/.jupyter/nbconfig/notebook.json b/.jupyter/nbconfig/notebook.json new file mode 100644 index 0000000..10a4a07 --- /dev/null +++ b/.jupyter/nbconfig/notebook.json @@ -0,0 +1,11 @@ +{ + "load_extensions": { + "nbextensions_configurator/config_menu/main": true, + "contrib_nbextensions_help_item/main": true, + "execute_time/ExecuteTime": true, + "codefolding/main": true + }, + "Notebook": { + "Header": true + } +} \ No newline at end of file diff --git a/.jupyter/nbconfig/tree.json b/.jupyter/nbconfig/tree.json new file mode 100644 index 0000000..8592283 --- /dev/null +++ b/.jupyter/nbconfig/tree.json @@ -0,0 +1,5 @@ +{ + "load_extensions": { + "nbextensions_configurator/tree_tab/main": true + } +} diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..ce1102a --- /dev/null +++ b/.pylintrc @@ -0,0 +1,492 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist=pydantic + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS,.git + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + .*requirements.*txt, + package-lock.json, + package.json + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +# init-hook='import sys; sys.path.append("lifetime")' + +# Use multiple processes to speed up Pylint. +jobs=4 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=no + +# Specify a configuration file. +#rcfile= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +#disable=missing-docstring,ungrouped-imports,no-else-return +disable= + protected-access, + missing-module-docstring, + logging-format-interpolation, + locally-disabled + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +msg-template={path}:{line}: [{msg_id}({symbol}), {obj}] {msg} + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio).You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=colorized + +# Tells whether to display a full report or only the messages +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete alloed of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=optparse.Values,sys.exit + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=79 + +# Maximum number of lines in a module +max-module-lines=2000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +# no-space-check=trailing-comma, + # dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=yes + +# Minimum lines number of a similarity. +min-similarity-lines=10 + + +[BASIC] + +# Naming style matching correct argument names +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style +argument-rgx=[a-z_][a-z0-9_]{1,30}$ + +# Naming style matching correct attribute names +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style +attr-rgx=[a-z_][a-z0-9_]{1,30}$ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Naming style matching correct class attribute names +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style +#class-attribute-rgx= + +# Naming style matching correct class names +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming-style +#class-rgx= + +# Naming style matching correct constant names +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma +good-names= + a, + b, + c, + g, + h, + i, + j, + k, + l, + m, + n, + t, + v, + x, + y, + ex, + NC, + Run, + Tmi, + Tpl, + _, + logger, + DType, + COV_PORT + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=yes + +# Naming style matching correct inline iteration names +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style +#inlinevar-rgx= + +# Naming style matching correct method names +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style +#method-rgx= + +# Naming style matching correct module names +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style +variable-rgx=[a-z_][a-z0-9_]{1,30}$ + + +[IMPORTS] + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=10 + +# Maximum number of attributes for a class (see R0902). +max-attributes=15 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + +# Maximum number of branch for function / method body +max-branches=15 + +# Maximum number of locals for function / method body +max-locals=30 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of statements in function / method body (def. 50) +max-statements=75 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=1 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "builtins.Exception" +overgeneral-exceptions=builtins.Exception diff --git a/invoke.yaml b/invoke.yaml new file mode 100644 index 0000000..d557f13 --- /dev/null +++ b/invoke.yaml @@ -0,0 +1,8 @@ +ado: + organization: "" + project: "" + repository: + name: "cobra" +project_name: "cobra" +project_slug: "cobra" +start_port: 9876 \ No newline at end of file diff --git a/jupyter-notebooks/0.0.0 - PD.Generic-development-guide.ipynb b/jupyter-notebooks/0.0.0 - PD.Generic-development-guide.ipynb new file mode 100644 index 0000000..ee91fe9 --- /dev/null +++ b/jupyter-notebooks/0.0.0 - PD.Generic-development-guide.ipynb @@ -0,0 +1,138 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "cb139d65", + "metadata": {}, + "source": [ + "## Development with the notebook\n", + "\n", + "This notebook server can be used as a scratchpad. Work in progress code, algorithms, visualizations, tests, ...\n", + "\n", + "The ultimate goal here is to refactor these snippets in well documented and tested python classes and methods so they can be imported easily.\n", + "\n", + "The suggested workflow is as follows:\n", + "\n", + "1. Use the notebook to prototype and test drive your development\n", + "2. Migrate your developed code into the project. Create a python file, organize it in classes,...\n", + "3. Document your migrated code (docstring) so it's functionality is documented in the sphinx docs.\n", + "4. Import the migrated code back into this notebook and develop tests around it.\n", + "5. Include these tests in the project.\n", + "6. Cleanup your notebook as the functionality is now in the python project and can be easily imported in the future.\n", + "\n", + "### Import the source code of the project\n", + "\n", + "Updates to the project's source need to be reflected here so it's possible to add new functionality and fix bugs which are directly \n", + "reflected here. Jupyter supports this behaviour with the autoreload magic.\n", + "\n", + "The 2 cells below will setup this autoreload functionality (so the original import is not cached) and add the project path to the system path. Now the project source can be imported (without installing it as a python package)." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "b7917704", + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2\n", + "# %matplotlib inline" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "d05ad465", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "PROJECT_PATH = /Users/pietro.dantuono/Python/Packages/cobra\n", + "Platform: macOS-13.2-x86_64-i386-64bit\n", + "Python version: 3.10.9 (main, Feb 16 2023, 11:50:01) [Clang 14.0.0 (clang-1400.0.29.202)]\n", + "numpy version: 1.24.2\n", + "pandas version: 1.5.3\n", + "scikit-learn version: 1.2.2\n" + ] + } + ], + "source": [ + "import datetime as dt\n", + "import os\n", + "import platform\n", + "import sys\n", + "import numpy as np\n", + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "import sklearn\n", + "\n", + "PROJECT_PATH = os.path.dirname(os.getcwd())\n", + "print(f'PROJECT_PATH = {PROJECT_PATH}')\n", + "\n", + "if not PROJECT_PATH in sys.path:\n", + " sys.path.append(PROJECT_PATH)\n", + " \n", + "print('Platform:', platform.platform())\n", + "print('Python version:', sys.version)\n", + "print('numpy version:', np.__version__)\n", + "print('pandas version:', pd.__version__)\n", + "print('scikit-learn version:', sklearn.__version__)\n", + "np.set_printoptions(threshold=sys.maxsize)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "64e9dc85", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n", + " Hi, welcome to Cobra!\n", + " You can find some tutorials that explain the functioning of cobra on the PythonPredictions GitHub:\n", + " https://github.com/PythonPredictions/cobra/tree/master/tutorials\n", + " \n" + ] + } + ], + "source": [ + "import cobra" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5e0e1c68", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/jupyter-notebooks/README.md b/jupyter-notebooks/README.md new file mode 100644 index 0000000..1416d8d --- /dev/null +++ b/jupyter-notebooks/README.md @@ -0,0 +1,8 @@ +Jupyter notebooks. +Naming convention is `X.Y.Z - NS.notebook-name` +where `X.Y.Z` is the version of the notebook, `NS` are the creators initials, +and `notebook-name` is a short description of the notebook. + +**Example** + +`1.2.3 - JD.initial-data-exploration` diff --git a/requirements.dev.txt b/requirements.dev.txt index 0a7c2fe..5c74845 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -1,7 +1,10 @@ +flake8 >= 6.0.0 +invoke >= 2.0.0 mypy>=0.942 pycodestyle>=2.8.0 pydocstyle>=6.1.1 pylint>=2.13.7 pytest>=7.1.1 pytest-mock>=3.7.0 -pytest-cov \ No newline at end of file +pytest-cov +git-changelog \ No newline at end of file diff --git a/requirements.notebook.txt b/requirements.notebook.txt new file mode 100644 index 0000000..c1685d9 --- /dev/null +++ b/requirements.notebook.txt @@ -0,0 +1,2 @@ +notebook +nb-clean \ No newline at end of file diff --git a/tasks/__init__.py b/tasks/__init__.py new file mode 100644 index 0000000..48b6b24 --- /dev/null +++ b/tasks/__init__.py @@ -0,0 +1,27 @@ +"""Invoke tasks for the COBRA project.""" + +from invoke import Collection + +from . import notebook +from . import quality +# from . import docs +from . import test +# from . import ado +# from . import cruft +# from . import search +# from . import performance +# from . import export + + +ns = Collection() +ns.add_collection(quality, name="qa") +# ns.add_collection(docs) +ns.add_collection(test) +# ns.add_collection(ado) +# ns.add_collection(cruft) +# ns.add_collection(search) +# ns.add_collection(performance) +# ns.add_collection(export) + + +ns.add_collection(notebook, name="nb") diff --git a/tasks/colors.py b/tasks/colors.py new file mode 100644 index 0000000..9fe44d9 --- /dev/null +++ b/tasks/colors.py @@ -0,0 +1,26 @@ +from enum import Enum + +ENDC = "\033[0m" +BOLD = "\033[1m" +UNDERLINE = "\033[4m" + + +class Color(Enum): + """Color class.""" + HEADER = "\033[95m" + OKBLUE = "\033[94m" + OKGREEN = "\033[92m" + WARNING = "\033[93m" + ERROR = "\033[91m" + + +def colorize(message, color=Color.OKGREEN, underline=False, bold=False): + """Colorize a message.""" + msg = color.value + if underline: + msg += UNDERLINE + if bold: + msg += BOLD + msg += message + msg += ENDC + return msg diff --git a/tasks/notebook.py b/tasks/notebook.py new file mode 100644 index 0000000..363780d --- /dev/null +++ b/tasks/notebook.py @@ -0,0 +1,106 @@ +import glob +from invoke import task +from .colors import colorize, Color +from .system import ( + OperatingSystem, + get_current_system, +) + +SCREEN_NAME = "notebook" +SYSTEM = get_current_system() + + +@task +def run(c_r): + """Start notebook server on foreground.""" + with c_r.cd("./jupyter-notebooks"): + # check current directory + with c_r.prefix("export JUPYTER_CONFIG_DIR=../.jupyter"): + _command = f"jupyter notebook --port={c_r.start_port} --no-browser" + c_r.run(_command) + + +@task +def stop(c_r): + """Stop notebook server in background.""" + result = c_r.run(f"screen -ls {SCREEN_NAME}", warn=True, hide="both") + if "No Sockets" in result.stdout: + return + if SYSTEM in [OperatingSystem.LINUX, OperatingSystem.MAC]: + tmp_str = colorize( + "Stopping notebook server...", + color=Color.HEADER, + bold=True + ) + print(f"{tmp_str}") + _command = f"kill $(lsof -ti:{c_r.start_port})" + print(f">>> {colorize(_command, color=Color.OKBLUE)}\n") + c_r.run(_command) + elif SYSTEM == OperatingSystem.WINDOWS: + print( + "Stopping notebook server is not supported on Windows. " + "Please stop the server manually." + ) + else: + raise ValueError(f"System {SYSTEM} is not supported") + + +@task(pre=[stop], default=True) +def start(c_r): + """Start notebook server in background.""" + + tmp_str = colorize( + "Starting notebook server...", + color=Color.HEADER, + bold=True + ) + with c_r.cd("./jupyter-notebooks"): + if SYSTEM in [OperatingSystem.LINUX, OperatingSystem.MAC]: + with c_r.prefix("export JUPYTER_CONFIG_DIR=../.jupyter"): + _command = ( + f"screen -d -S {SCREEN_NAME} -m " + f"jupyter notebook --port={c_r.start_port} --no-browser" + ) + print(f"{tmp_str}") + c_r.run(_command) + print(f">>> {colorize(_command, color=Color.OKBLUE)}\n") + elif SYSTEM == OperatingSystem.WINDOWS: + with c_r.cd("./jupyter-notebooks"): + _command = ( + "wt -d . jupyter notebook " + f"--port={c_r.start_port}" + ) + print( + colorize( + "Notebook server is not attached to this terminal process." + " Close windows terminal instance instead.", + color=Color.WARNING, + ) + ) + + print(f"{tmp_str}") + c_r.run(_command) + print(f">>> {colorize(_command, color=Color.OKBLUE)}\n") + else: + raise ValueError(f"System {SYSTEM} is not supported") + + url = f'http://localhost:{c_r.start_port}' + + print("Jupyter hosted in background:\n") + print(f"--> {colorize(url, underline=True)}\n") + print(f"Stop server: {colorize('inv nb.stop')}\n") + + +@task +def clean(c_r, check=False): + """Clean all tutorials in folder.""" + notebook_files = glob.glob("./jupyter-notebooks/*.ipynb") + command = ("clean", "Cleaning") + if check: + command = ("check", "Checking") + + for nb_file in notebook_files: + print(f"{command[1]} {nb_file}") + _command = f"nb-clean {command[0]} -e {nb_file}" + print(f"--> {_command}") + c_r.run(_command) diff --git a/tasks/quality.py b/tasks/quality.py new file mode 100644 index 0000000..06ce049 --- /dev/null +++ b/tasks/quality.py @@ -0,0 +1,51 @@ +"""Quality assessment tasks. Run all quality checks with `inv qa`. +The quality checks are: +- black +- flake8 +- pylint +- mypy + +""" + +from invoke import task +from .colors import colorize, Color + +from .system import PTY + + +# @task +# def black(c_r): +# """Run code formatter: black.""" +# tmp_str = colorize("\nRunning black...\n", color=Color.HEADER, bold=True) +# print(f"{tmp_str}") +# c_r.run(f"black {c_r.project_slug}", pty=PTY) + + +@task +def flake(c_r): + """Run style guide enforcement: flake8.""" + tmp_str = colorize("\nRunning flake8...\n", color=Color.HEADER, bold=True) + print(f"{tmp_str}") + c_r.run(f"flake8 {c_r.project_slug}", warn=True, pty=PTY) + + +@task +def pylint(c_r): + """Run code analysis: pylint.""" + tmp_str = colorize("\nRunning pylint...\n", color=Color.HEADER, bold=True) + print(f"{tmp_str}") + c_r.run(f"pylint {c_r.project_slug}", warn=True, pty=PTY) + + +@task +def mypy(c_r): + """Run static type checking: mypy.""" + tmp_str = colorize("Running mypy...\n", color=Color.HEADER, bold=True) + print(f"{tmp_str}") + c_r.run(f"mypy {c_r.project_slug}", warn=True, pty=PTY) + + +# @task(post=[black, flake, pylint, mypy], default=True) +@task(post=[flake, pylint, mypy], default=True) +def all(c_r): # pylint: disable=W0622,W0613 # noqa: F811 + """Run all quality checks.""" diff --git a/tasks/system.py b/tasks/system.py new file mode 100644 index 0000000..932ead9 --- /dev/null +++ b/tasks/system.py @@ -0,0 +1,44 @@ +"""Identification of the current operating system.""" + +import platform +from enum import Enum + +COV_SCREEN_NAME = "coverage" +DOC_SCREEN_NAME = "sphinx-docs" + + +class OperatingSystem(Enum): + """Initializes the operating system.""" + WINDOWS = 'Windows' + LINUX = 'Linux' + MAC = 'Darwin' + + +def get_current_system(): + """Returns the current operating system.""" + system = platform.system() # pylint: disable=W0621 + + if system == 'Linux': + return OperatingSystem.LINUX + if system == 'Windows': + return OperatingSystem.WINDOWS + if system == 'Darwin': + return OperatingSystem.MAC + + raise ValueError(f'Invalid operating system: {system}') + + +system = get_current_system() + +if system in [OperatingSystem.LINUX, OperatingSystem.MAC]: + COV_DOC_BUILD_DIR = "_build/htmlcov" + DOCS_BUILD_DIR = "docs" +elif system == OperatingSystem.WINDOWS: + COV_DOC_BUILD_DIR = r"_build\htmlcov" + DOCS_BUILD_DIR = r"_build\docs" +else: + raise ValueError(f'System {system} is not supported') + +PTY = True +if get_current_system() == OperatingSystem.WINDOWS: + PTY = False diff --git a/tasks/test.py b/tasks/test.py new file mode 100644 index 0000000..b78a648 --- /dev/null +++ b/tasks/test.py @@ -0,0 +1,105 @@ +"""Test tasks.""" + +from invoke import task +from .colors import colorize, Color +from .system import ( + OperatingSystem, + get_current_system, + COV_DOC_BUILD_DIR, + COV_SCREEN_NAME, + PTY, +) + +SYSTEM = get_current_system() + + +@task(help={"verbose": "Run tests verbose."}) +def run(c_r, verbose=False): + """Run test suite.""" + if verbose: + c_r.run( + f"pytest -v -W ignore::UserWarning " + f"--cov={c_r.project_slug} --cov-report=term:skip-covered " + f"--cov-report=html --cov-report=html:{COV_DOC_BUILD_DIR}", + pty=PTY, + ) + else: + c_r.run( + f"pytest -W ignore::UserWarning " + f"--cov={c_r.project_slug} --cov-report=term:skip-covered " + f"--cov-report=html --cov-report=html:{COV_DOC_BUILD_DIR}", + pty=PTY, + ) + + +@task +def coverage(c_r): + """Start coverage report webserver.""" + COV_PORT = c_r.start_port + 2 + + if SYSTEM in [OperatingSystem.LINUX, OperatingSystem.MAC]: + _command = ( + f"screen -d -S {COV_SCREEN_NAME} " + "-m python -m http.server --bind localhost " + f"--directory {COV_DOC_BUILD_DIR} {COV_PORT}" + ) + elif SYSTEM == OperatingSystem.WINDOWS: + _command = ( + f"wt -d . python -m http.server --bind localhost " + f"--directory {COV_DOC_BUILD_DIR} {COV_PORT}" + ) + else: + raise ValueError(f"System {SYSTEM} is not supported") + tmp_str = colorize( + "Starting coverage server...", + color=Color.HEADER, + bold=True + ) + print(f"{tmp_str}") + c_r.run(_command) + print(f">>> {colorize(_command, color=Color.OKBLUE)}\n") + + url = f"http://localhost:{COV_PORT}" + + print("Coverage server hosted in background:\n") + print(f"--> {colorize(url, underline=True)}\n") + print(f"Stop server: {colorize('inv test.stop')}\n") + + +@task +def stop(c_r): + """Stop coverage report webserver.""" + COV_PORT = c_r.start_port + 2 + + if SYSTEM in [OperatingSystem.LINUX, OperatingSystem.MAC]: + result = c_r.run( + f"screen -ls {COV_SCREEN_NAME}", warn=True, hide="both" + ) + if "No Sockets" in result.stdout: + return + tmp_str = colorize( + "Stopping coverage server...", + color=Color.HEADER, + bold=True + ) + print(tmp_str) + _command = f"kill $(lsof -ti:{COV_PORT})" + print(f">>> {colorize(_command, color=Color.OKBLUE)}\n") + c_r.run(_command) + + elif SYSTEM == OperatingSystem.WINDOWS: + print( + colorize( + "Coverage server is not attached to this terminal process. " + "Close windows terminal instance instead.", + color=Color.WARNING, + ) + ) + return + else: + raise ValueError(f"System {SYSTEM} is not supported") + + +@task(post=[stop, run, coverage], default=True) +def all(c_r): # pylint: disable=W0622,W0613 # noqa: F811 + """Run all tests and start coverage report webserver."""