diff --git a/doc/doccumbo/conf.py b/doc/doccumbo/conf.py
new file mode 100644
index 0000000000000000000000000000000000000000..e7414e8b56ea28a0e8e305937c8287c684c4d98c
--- /dev/null
+++ b/doc/doccumbo/conf.py
@@ -0,0 +1,185 @@
+# -*- coding: utf-8 -*-
+
+# import os, sys
+#
+# MultiviewPlatform documentation build configuration file, created by
+# sphinx-quickstart on Mon Jan 29 17:13:09 2018.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import os
+import sys
+
+sys.path.insert(0, os.path.abspath('.'))
+sys.path.insert(0, os.path.abspath('../../multiconfusion'))
+sys.path.insert(0, os.path.abspath('../..'))
+file_loc = os.path.split(__file__)[0]
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(file_loc), '.')))
+import multiconfusion
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#
+# needs_sphinx = '1.0'
+
+add_module_names = False
+
+# sys.path.append(os.path.abspath('sphinxext'))
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = ['sphinx.ext.autodoc',
+#              'sphinx.ext.doctest',
+#              'sphinx.ext.intersphinx',
+#              'sphinx.ext.todo',
+#              'nbsphinx',
+             'sphinx.ext.coverage',
+             'sphinx.ext.imgmath',
+#              'sphinx.ext.mathjax',
+#              'sphinx.ext.ifconfig',
+#              'sphinx.ext.viewcode',
+#              'sphinx.ext.githubpages',
+               'sphinx.ext.napoleon',
+               'm2r',]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+#
+# source_suffix = {'.rst': 'restructuredtext', '.md': 'markdown'}
+# source_suffix = '.rst'
+source_suffix = ['.rst', '.md']
+
+# source_parsers = {
+#  '.md': CommonMarkParser,
+# }
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'MultiConfusion'
+copyright = u'2019, Dominique Benielli'
+author = u'Dominique Benielli'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = u'0.0.0'
+# The full version, including alpha/beta/rc tags.
+release = u'0'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This patterns also effect to html_static_path and html_extra_path
+exclude_patterns = []
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+#
+# html_theme = 'sphinx_rtd_theme'
+html_theme = 'classic'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#
+# html_theme_options = {}
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = []
+
+# -- Options for HTMLHelp output ------------------------------------------
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'MultiConfusiondoc'
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+    # The paper size ('letterpaper' or 'a4paper').
+    #
+    # 'papersize': 'letterpaper',
+
+    # The font size ('10pt', '11pt' or '12pt').
+    #
+    # 'pointsize': '10pt',
+
+    # Additional stuff for the LaTeX preamble.
+    #
+    # 'preamble': '',
+
+    # Latex figure (float) alignment
+    #
+    # 'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+latex_documents = [
+    (master_doc, 'MultiConfusion.tex', u'MultiConfusion Documentation',
+     u'Dominique Benielli', 'manual'),
+]
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    (master_doc, 'confusion', u'MultiConfusion Documentation',
+     [author], 1)
+]
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+    (master_doc, 'MultiConfusion', u'MultiConfusion Documentation',
+     author, 'MultiConfusion', 'One line description of project.',
+     'Miscellaneous'),
+]
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {'https://docs.python.org/': None}
+
+# def setup(app):
+#     app.add_config_value('recommonmark_config', {
+#             'auto_toc_tree_section': 'Contents',
+#             }, True)
+#     app.add_transform(AutoStructify)
diff --git a/doc/doccumbo/credits.rst b/doc/doccumbo/credits.rst
new file mode 100644
index 0000000000000000000000000000000000000000..9da33fd266b9dbb700fe69648fe05db610020c68
--- /dev/null
+++ b/doc/doccumbo/credits.rst
@@ -0,0 +1,59 @@
+Credits
+=======
+
+**multiconfusion* is developped by the
+`development team <https://developpement.lis-lab.fr/>`_ of the
+`LIS <http://www.lis-lab.fr/>`_.
+
+If you use **multiconfusion** in a scientific publication, please cite the
+following paper::
+
+ @InProceedings{Koco:2011:BAM,
+  author={Ko\c{c}o, Sokol and Capponi, C{\'e}cile},
+  editor={Gunopulos, Dimitrios and Hofmann, Thomas and Malerba, Donato
+          and Vazirgiannis, Michalis},
+  title={A Boosting Approach to Multiview Classification with Cooperation},
+  booktitle={Proceedings of the 2011 European Conference on Machine Learning
+             and Knowledge Discovery in Databases - Volume Part II},
+  year={2011},
+  location={Athens, Greece},
+  publisher={Springer-Verlag},
+  address={Berlin, Heidelberg},
+  pages={209--228},
+  numpages = {20},
+  isbn={978-3-642-23783-6}
+  url={https://link.springer.com/chapter/10.1007/978-3-642-23783-6_14},
+  keywords={boosting, classification, multiview learning,
+            supervised learning},
+ }
+
+References
+----------
+
+* Sokol Koço, Cécile Capponi,
+  `"A boosting approach to multiview classification with cooperation"
+  <https://link.springer.com/chapter/10.1007/978-3-642-23783-6_14>`_,
+  Proceedings of the 2011 European Conference on Machine Learning (ECML),
+  Athens, Greece, pp.209-228, 2011, Springer-Verlag.
+
+* Sokol Koço,
+  `"Tackling the uneven views problem with cooperation based ensemble
+  learning methods" <http://www.theses.fr/en/2013AIXM4101>`_,
+  PhD Thesis, Aix-Marseille Université, 2013.
+
+Copyright
+---------
+
+Université d'Aix Marseille (AMU) -
+Centre National de la Recherche Scientifique (CNRS) -
+Université de Toulon (UTLN).
+
+Copyright © 2019-2020 AMU, CNRS, UTLN
+
+License
+-------
+
+**multiconfusion** is free software: you can redistribute it and/or modify
+it under the terms of the **GNU Lesser General Public License** as published by
+the Free Software Foundation, either **version 3** of the License, or
+(at your option) any later version.
diff --git a/doc/doccumbo/index.rst b/doc/doccumbo/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..740f792823e5898d029d7ef34928005af7988048
--- /dev/null
+++ b/doc/doccumbo/index.rst
@@ -0,0 +1,29 @@
+.. Multiconfusion documentation master file, created by
+   sphinx-quickstart on Mon Sep  2 12:12:08 2019.
+   You can adapt this file completely to your liking, but it should at least
+   contain the root `toctree` directive.
+
+Welcome to Mucumbo's documentation!
+===================================
+
+**multiconfusion** is a Python package implementing boost algorithms for
+machine learning with multimodal with confusion matrix data.
+
+It is compatible with `scikit-learn <http://scikit-learn.org/>`_, a popular
+package for machine learning in Python.
+
+.. toctree::
+   :maxdepth: 2
+   :caption: Contents:
+
+   install_devel
+   reference/index
+   credits
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
diff --git a/doc/doccumbo/install_devel.rst b/doc/doccumbo/install_devel.rst
new file mode 100644
index 0000000000000000000000000000000000000000..eb63b9006cd975bc6c3014ca19c814d6cd3c4c61
--- /dev/null
+++ b/doc/doccumbo/install_devel.rst
@@ -0,0 +1,75 @@
+Installation and development
+============================
+
+Dependencies
+------------
+
+**multiconfusion** works with **Python 3.5 or later**.
+
+**multiconfusion** depends on **scikit-learn** (version >= 0.19).
+
+Optionally, **matplotlib** is required when running the examples.
+
+Installation
+------------
+
+**multiconfusion** is
+`available on PyPI <https://pypi.org/project/multimodalboost/>`_
+and can be installed using **pip**::
+
+  pip install multiconfusion
+
+If you prefer to install directly from the **source code**, clone the **Git**
+repository of the project and run the **setup.py** file with the following
+commands::
+
+  git clone git@gitlab.lis-lab.fr:dev/multiconfusion.git
+  cd multimodalboost
+  python setup.py install
+
+or alternatively use **pip**::
+
+  pip install git+https://gitlab.lis-lab.fr/dev/multiconfusion.git
+
+Development
+-----------
+
+The development of multimodalboost follows the guidelines provided by the
+scikit-learn community.
+
+Refer to the `Developer's Guide <http://scikit-learn.org/stable/developers>`_
+of the scikit-learn project for more details.
+
+Source code
+-----------
+
+You can get the **source code** from the **Git** repository of the project::
+
+  git clone git@gitlab.lis-lab.fr:dev/multiconfusion.git
+
+
+Testing
+-------
+
+**pytest** and **pytest-cov** are required to run the **test suite** with::
+
+  pytest-3
+
+A code coverage report is displayed in the terminal when running the tests.
+An HTML version of the report is also stored in the directory **htmlcov**.
+
+Generating the documentation
+----------------------------
+
+The generation of the documentation requires **sphinx**, **sphinx-gallery**,
+**numpydoc** and **matplotlib** and can be run with::
+
+  python setup.py build_sphinx
+
+ou::
+
+  cd doc
+  sphinx-build -b html . ./build
+
+
+The resulting files are stored in the directory **build/sphinx/html**.
diff --git a/doc/doccumbo/reference/index.rst b/doc/doccumbo/reference/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..6d289a7bfd35d3770ef6ca770e4ee56b10de2676
--- /dev/null
+++ b/doc/doccumbo/reference/index.rst
@@ -0,0 +1,16 @@
+Welcome to Multi-View Mu Cumbo's reference!
+===========================================
+
+.. toctree::
+   :maxdepth: 2
+   :caption: Contents:
+
+   modules
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
diff --git a/doc/doccumbo/reference/modules.rst b/doc/doccumbo/reference/modules.rst
new file mode 100644
index 0000000000000000000000000000000000000000..b6d9bd91ac735d04472dfff3d813c0f07d9b0aaa
--- /dev/null
+++ b/doc/doccumbo/reference/modules.rst
@@ -0,0 +1,12 @@
+multiconfusion
+==============
+
+.. toctree::
+   :maxdepth: 3
+
+.. automodule:: multiconfusion.cumbo
+   :members:
+   :inherited-members:
+
+   datasets
+ 
diff --git a/doc/docmetricexamples/conf.py b/doc/docmetricexamples/conf.py
new file mode 100644
index 0000000000000000000000000000000000000000..a45f0cf2f943497097cbe6fc091262596b280486
--- /dev/null
+++ b/doc/docmetricexamples/conf.py
@@ -0,0 +1,277 @@
+# -*- coding: utf-8 -*-
+
+from datetime import date
+import os
+import sys
+sys.path.insert(0, os.path.abspath('../metriclearning'))
+sys.path.insert(0, os.path.abspath('../..'))
+sys.path.insert(0, os.path.abspath("."))
+sys.path.append(os.path.join(os.path.dirname(__name__), '..')) 
+sys.path.append(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'sphinxext'))
+import metriclearning
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+# sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+    'sphinx.ext.autodoc',
+    'sphinx.ext.doctest',
+    'sphinx.ext.imgmath',
+    'numpydoc',
+ #     'sphinx_gallery.gen_gallery'
+]
+
+# Add any paths that contain templates here, relative to this directory.
+# templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+source_encoding = 'utf-8'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'metriclearning'
+author = 'Dominique Benielli'
+copyright = '2017-{}, LIS UMR 7020'.format(date.today().year)
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = metriclearning.__version__
+# The full version, including alpha/beta/rc tags.
+release = metriclearning.__version__
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+# language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+# keep_warnings = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = 'nature'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+# html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+# html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+# html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+# html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+# html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+# html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+# html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+# html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+# html_domain_indices = True
+
+# If false, no index is generated.
+# html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = '{}doc'.format(project)
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+    # The paper size ('letterpaper' or 'a4paper').
+    'papersize': 'a4paper',
+
+    # The font size ('10pt', '11pt' or '12pt').
+    'pointsize': '10pt',
+
+    # Additional stuff for the LaTeX preamble.
+    # 'preamble': '',
+
+    # Latex figure (float) alignment
+    'figure_align': 'htbp'}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+latex_documents = [
+  (master_doc, '{}.tex'.format(project), '{} Documentation'.format(project),
+   author, 'manual')]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+# latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+# latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+# latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    (master_doc, project, '{} Documentation'.format(project),
+     [author], 1)
+]
+
+# If true, show URL addresses after external links.
+# man_show_urls = False
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+  (master_doc, project, '{} Documentation'.format(project), author, project,
+   'Multi-View Metric Learning in Vector-Valued Kernel Spaces for machine learning.',
+   'Miscellaneous')]
+
+# Documents to append as an appendix to all manuals.
+# texinfo_appendices = []
+
+# If false, no module index is generated.
+# texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+# texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+# texinfo_no_detailmenu = False
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+    'sklearn': ('http://scikit-learn.org/stable', None)
+}
+
+numpydoc_show_class_members = False
+
+sphinx_gallery_conf = {
+    'doc_module': (project,),
+    'backreferences_dir': 'backreferences',
+    # path to your examples scripts
+    'examples_dirs': '../examples',
+    # path where to save gallery generated examples
+    'gallery_dirs': 'auto_examples'}
+
+# Generate the plots for the gallery
+plot_gallery = 'True'
diff --git a/doc/docmetricexamples/demo.py b/doc/docmetricexamples/demo.py
new file mode 100644
index 0000000000000000000000000000000000000000..108ac8c6550c4c0abba5d9fc09573ce815ea024d
--- /dev/null
+++ b/doc/docmetricexamples/demo.py
@@ -0,0 +1,183 @@
+
+import numpy as np
+import matplotlib.pyplot as plt
+from sklearn import datasets
+from sklearn.metrics import accuracy_score
+from sklearn.metrics.pairwise import rbf_kernel
+from metriclearning.mvml import MVML
+from metriclearning.datasets.data_sample import DataSample
+from metriclearning.tests.datasets.get_dataset_path import get_dataset_path
+import pickle
+"""
+Demonstration on how MVML (in file mvml.py) is intended to be used with very simple simulated dataset
+
+Demonstration uses scikit-learn for retrieving datasets and for calculating rbf kernel function, see
+http://scikit-learn.org/stable/
+"""
+
+np.random.seed(4)
+
+# =========== create a simple dataset ============
+
+n_tot = 200
+half = int(n_tot/2)
+n_tr = 120
+
+# create a bit more data than needed so that we can take "half" amount of samples for each class
+X0, y0 = datasets.make_moons(n_samples=n_tot+2, noise=0.3, shuffle=False)
+X1, y1 = datasets.make_circles(n_samples=n_tot+2, noise=0.1, shuffle=False)
+
+# make multi-view correspondence (select equal number of samples for both classes and order the data same way
+# in both views)
+
+yinds0 = np.append(np.where(y0 == 0)[0][0:half], np.where(y0 == 1)[0][0:half])
+yinds1 = np.append(np.where(y1 == 0)[0][0:half], np.where(y1 == 1)[0][0:half])
+
+X0 = X0[yinds0, :]
+X1 = X1[yinds1, :]
+Y = np.append(np.zeros(half)-1, np.ones(half))  # labels -1 and 1
+
+# show data
+# =========== create a simple dataset ============
+
+n_tot = 200
+half = int(n_tot/2)
+n_tr = 120
+
+# create a bit more data than needed so that we can take "half" amount of samples for each class
+X0, y0 = datasets.make_moons(n_samples=n_tot+2, noise=0.3, shuffle=False)
+X1, y1 = datasets.make_circles(n_samples=n_tot+2, noise=0.1, shuffle=False)
+
+# make multi-view correspondence (select equal number of samples for both classes and order the data same way
+# in both views)
+
+yinds0 = np.append(np.where(y0 == 0)[0][0:half], np.where(y0 == 1)[0][0:half])
+yinds1 = np.append(np.where(y1 == 0)[0][0:half], np.where(y1 == 1)[0][0:half])
+
+X0 = X0[yinds0, :]
+X1 = X1[yinds1, :]
+Y = np.append(np.zeros(half)-1, np.ones(half))  # labels -1 and 1
+
+# show data
+plt.figure(1)
+plt.subplot(121)
+plt.scatter(X0[:, 0], X0[:, 1], c=Y)
+plt.title("all data, view 1")
+plt.subplot(122)
+plt.scatter(X1[:, 0], X1[:, 1], c=Y)
+plt.title("all data, view 2")
+# plt.show()
+
+# shuffle
+order = np.random.permutation(n_tot)
+X0 = X0[order, :]
+X1 = X1[order, :]
+Y = Y[order]
+
+# make kernel dictionaries
+kernel_dict = {}
+test_kernel_dict = {}
+kernel_dict[0] = rbf_kernel(X0[0:n_tr, :])
+kernel_dict[1] = rbf_kernel(X1[0:n_tr, :])
+test_kernel_dict[0] = rbf_kernel(X0[n_tr:n_tot, :], X0[0:n_tr, :])
+test_kernel_dict[1] = rbf_kernel(X1[n_tr:n_tot, :], X1[0:n_tr, :])
+
+# input_x = get_dataset_path("input_x_dic.pkl")
+# f = open(input_x, "wb")
+# pickle.dump(input_x, f)
+
+
+d= DataSample(kernel_dict)
+a = d.data
+
+# =========== use MVML in classifying the data ============
+
+# demo on how the code is intended to be used; parameters are not cross-validated, just picked some
+# mvml = MVML(kernel_dict, Y[0:n_tr], [0.1, 1], nystrom_param=0.2)
+mvml = MVML( [0.1, 1], nystrom_param=0.2)
+A1, g1, w1 = mvml.fit(a, Y[0:n_tr])
+# with approximation
+# mvml = MVML(kernel_dict, Y[0:n_tr], [0.1, 1], nystrom_param=1)  # without approximation
+
+A1, g1, w1 = mvml.learn_mvml()  # default: learn A, don't learn w   (learn_A=1, learn_w=0)
+pred1 = np.sign(mvml.predict_mvml(test_kernel_dict, g1, w1))  # take sign for classification result
+
+A2, g2, w2 = mvml.learn_mvml(learn_A=2, learn_w=1)  # learn sparse A and learn w
+pred2 = np.sign(mvml.predict_mvml(test_kernel_dict, g2, w2))
+# print(w2)
+
+A3, g3, w3 = mvml.learn_mvml(learn_A=3)  # use MVML_Cov, don't learn w
+pred3 = np.sign(mvml.predict_mvml(test_kernel_dict, g3, w3))
+
+A4, g4, w4 = mvml.learn_mvml(learn_A=4)  # use MVML_I, don't learn w
+pred4 = np.sign(mvml.predict_mvml(test_kernel_dict, g4, w4))
+
+
+# =========== show results ============
+
+# accuracies
+acc1 = accuracy_score(Y[n_tr:n_tot], pred1)
+acc2 = accuracy_score(Y[n_tr:n_tot], pred2)
+acc3 = accuracy_score(Y[n_tr:n_tot], pred3)
+acc4 = accuracy_score(Y[n_tr:n_tot], pred4)
+
+# display obtained accuracies
+
+print("MVML:       ", acc1)
+print("MVMLsparse: ", acc2)
+print("MVML_Cov:   ", acc3)
+print("MVML_I:     ", acc4)
+
+
+# plot data and some classification results
+
+plt.figure(2)
+plt.subplot(341)
+plt.scatter(X0[n_tr:n_tot, 0], X0[n_tr:n_tot, 1], c=Y[n_tr:n_tot])
+plt.title("orig. view 1")
+plt.subplot(342)
+plt.scatter(X1[n_tr:n_tot, 0], X1[n_tr:n_tot, 1], c=Y[n_tr:n_tot])
+plt.title("orig. view 2")
+
+pred1[np.where(pred1[:, 0] != Y[n_tr:n_tot])] = 0
+pred1 = pred1.reshape((pred1.shape[0]))
+plt.subplot(343)
+plt.scatter(X0[n_tr:n_tot, 0], X0[n_tr:n_tot, 1], c=pred1)
+plt.title("MVML view 1")
+plt.subplot(344)
+plt.scatter(X1[n_tr:n_tot, 0], X1[n_tr:n_tot, 1], c=pred1)
+plt.title("MVML view 2")
+
+pred2[np.where(pred2[:, 0] != Y[n_tr:n_tot])] = 0
+pred2 = pred2.reshape((pred2.shape[0]))
+plt.subplot(345)
+plt.scatter(X0[n_tr:n_tot, 0], X0[n_tr:n_tot, 1], c=pred2)
+plt.title("MVMLsparse view 1")
+plt.subplot(346)
+plt.scatter(X1[n_tr:n_tot, 0], X1[n_tr:n_tot, 1], c=pred2)
+plt.title("MVMLsparse view 2")
+
+pred3[np.where(pred3[:, 0] != Y[n_tr:n_tot])] = 0
+pred3 = pred3.reshape((pred3.shape[0]))
+
+plt.subplot(347)
+plt.scatter(X0[n_tr:n_tot, 0], X0[n_tr:n_tot, 1], c=pred3)
+plt.title("MVML_Cov view 1")
+plt.subplot(348)
+plt.scatter(X1[n_tr:n_tot, 0], X1[n_tr:n_tot, 1], c=pred3)
+plt.title("MVML_Cov view 2")
+
+pred4[np.where(pred4[:, 0] != Y[n_tr:n_tot])] = 0
+pred4 = pred4.reshape((pred4.shape[0]))
+plt.subplot(349)
+plt.scatter(X0[n_tr:n_tot, 0], X0[n_tr:n_tot, 1], c=pred4)
+plt.title("MVML_I view 1")
+plt.subplot(3,4,10)
+plt.scatter(X1[n_tr:n_tot, 0], X1[n_tr:n_tot, 1], c=pred4)
+plt.title("MVML_I view 2")
+
+# plt.figure(3)
+# plt.spy(A2)
+# plt.title("sparse learned A")
+
+plt.show()
diff --git a/doc/docmetricexamples/index.rst b/doc/docmetricexamples/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..71e56a03f0e56363349387590c87c4da2246e445
--- /dev/null
+++ b/doc/docmetricexamples/index.rst
@@ -0,0 +1,21 @@
+.. metriclearning documentation master file, created by
+   sphinx-quickstart on Mon Sep  2 12:12:08 2019.
+   You can adapt this file completely to your liking, but it should at least
+   contain the root `toctree` directive.
+
+Welcome to metriclearning's documentation!
+==========================================
+
+.. toctree::
+   :maxdepth: 2
+   :caption: Contents:
+
+   reference/index
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
diff --git a/doc/docmetricexamples/reference/datasets.rst b/doc/docmetricexamples/reference/datasets.rst
new file mode 100644
index 0000000000000000000000000000000000000000..21d17f4f12eb38121011208e0b3d295ec38177fe
--- /dev/null
+++ b/doc/docmetricexamples/reference/datasets.rst
@@ -0,0 +1,14 @@
+datasets
+========
+
+.. automodule:: metriclearning.datasets.base
+    :members: 
+    :undoc-members:
+    :show-inheritance:
+
+
+
+.. automodule:: metriclearning.datasets.data_sample
+    :members: 
+    :undoc-members:
+    :show-inheritance:
diff --git a/doc/docmetricexamples/reference/index.rst b/doc/docmetricexamples/reference/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..f0c60bfb216b320b754b82c1f599bcf9c5edf2f5
--- /dev/null
+++ b/doc/docmetricexamples/reference/index.rst
@@ -0,0 +1,16 @@
+Welcome to Multi-View metriclearning's reference!
+=================================================
+
+.. toctree::
+   :maxdepth: 2
+   :caption: Contents:
+
+   modules
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
diff --git a/doc/docmetricexamples/reference/lpMKL.rst b/doc/docmetricexamples/reference/lpMKL.rst
new file mode 100644
index 0000000000000000000000000000000000000000..27a1e37642d6859285c923dd2df5b0b52d12caa2
--- /dev/null
+++ b/doc/docmetricexamples/reference/lpMKL.rst
@@ -0,0 +1,11 @@
+lpMKL
+=====
+
+ .. automodule:: metriclearning.lpMKL
+    :members: MKL
+    :undoc-members:
+    :show-inheritance:  
+
+
+ 
+
diff --git a/doc/docmetricexamples/reference/mkernel.rst b/doc/docmetricexamples/reference/mkernel.rst
new file mode 100644
index 0000000000000000000000000000000000000000..b3eaf1c1558f954e5c06d129290b94fe53341c5e
--- /dev/null
+++ b/doc/docmetricexamples/reference/mkernel.rst
@@ -0,0 +1,9 @@
+abstract mkernel
+================
+
+.. automodule:: metriclearning.mkernel
+    :members: MKernel
+    :undoc-members:
+    :show-inheritance:
+
+
diff --git a/doc/docmetricexamples/reference/modules.rst b/doc/docmetricexamples/reference/modules.rst
new file mode 100644
index 0000000000000000000000000000000000000000..6d61b3e57bb53e20298ffa6c6fa0d6a4d0c5a404
--- /dev/null
+++ b/doc/docmetricexamples/reference/modules.rst
@@ -0,0 +1,11 @@
+metriclearning
+==============
+
+.. toctree::
+   :maxdepth: 3
+
+   mkernel
+   mvml
+   lpMKL
+   datasets
+ 
diff --git a/doc/docmetricexamples/reference/mvml.rst b/doc/docmetricexamples/reference/mvml.rst
new file mode 100644
index 0000000000000000000000000000000000000000..3dae7b8b56934c9dd59cff00f80a8f1d5b57152b
--- /dev/null
+++ b/doc/docmetricexamples/reference/mvml.rst
@@ -0,0 +1,11 @@
+MVML of metriclearning
+======================
+
+.. automodule:: metriclearning.mvml
+    :members: MVML
+    :undoc-members:
+    :show-inheritance:
+
+
+ 
+
diff --git a/doc/docmetricexamples/tes-demo.py b/doc/docmetricexamples/tes-demo.py
new file mode 100644
index 0000000000000000000000000000000000000000..2632e5ef0352bbdf82c76d3cc86d1dd17d1f5530
--- /dev/null
+++ b/doc/docmetricexamples/tes-demo.py
@@ -0,0 +1,97 @@
+import numpy as np
+from sklearn import datasets
+from sklearn.metrics.pairwise import rbf_kernel
+from metriclearning.mvml import MVML
+from metriclearning.lpMKL import MKL
+from metriclearning.datasets.data_sample import DataSample
+from metriclearning.tests.datasets.get_dataset_path import get_dataset_path
+import pickle
+np.random.seed(4)
+
+# =========== create a simple dataset ============
+
+n_tot = 200
+half = int(n_tot/2)
+n_tr = 120
+
+# create a bit more data than needed so that we can take "half" amount of samples for each class
+X0, y0 = datasets.make_moons(n_samples=n_tot+2, noise=0.3, shuffle=False)
+X1, y1 = datasets.make_circles(n_samples=n_tot+2, noise=0.1, shuffle=False)
+
+# make multi-view correspondence (select equal number of samples for both classes and order the data same way
+# in both views)
+
+yinds0 = np.append(np.where(y0 == 0)[0][0:half], np.where(y0 == 1)[0][0:half])
+yinds1 = np.append(np.where(y1 == 0)[0][0:half], np.where(y1 == 1)[0][0:half])
+
+X0 = X0[yinds0, :]
+X1 = X1[yinds1, :]
+Y = np.append(np.zeros(half)-1, np.ones(half))  # labels -1 and 1
+
+n_tot = 200
+half = int(n_tot/2)
+n_tr = 120
+
+# create a bit more data than needed so that we can take "half" amount of samples for each class
+X0, y0 = datasets.make_moons(n_samples=n_tot+2, noise=0.3, shuffle=False)
+X1, y1 = datasets.make_circles(n_samples=n_tot+2, noise=0.1, shuffle=False)
+
+# make multi-view correspondence (select equal number of samples for both classes and order the data same way
+# in both views)
+
+yinds0 = np.append(np.where(y0 == 0)[0][0:half], np.where(y0 == 1)[0][0:half])
+yinds1 = np.append(np.where(y1 == 0)[0][0:half], np.where(y1 == 1)[0][0:half])
+
+X0 = X0[yinds0, :]
+X1 = X1[yinds1, :]
+Y = np.append(np.zeros(half)-1, np.ones(half))  # labels -1 and 1
+
+
+# shuffle
+order = np.random.permutation(n_tot)
+X0 = X0[order, :]
+X1 = X1[order, :]
+Y = Y[order]
+
+# make kernel dictionaries
+kernel_dict = {}
+test_kernel_dict = {}
+kernel_dict[0] = rbf_kernel(X0[0:n_tr, :])
+kernel_dict[1] = rbf_kernel(X1[0:n_tr, :])
+test_kernel_dict[0] = rbf_kernel(X0[n_tr:n_tot, :], X0[0:n_tr, :])
+test_kernel_dict[1] = rbf_kernel(X1[n_tr:n_tot, :], X1[0:n_tr, :])
+
+d= DataSample(kernel_dict)
+a = d.data
+# np.save(input_x, kernel_dict)
+# np.save(input_y, Y)
+# f = open(input_x, "wb")
+# pickle.dump(input_x, f)
+#input_x = get_dataset_path("input_x_dic.pkl")
+#f = open(input_x, "r")
+#dicoc = pickle.load(f)
+# pickle.dump(kernel_dict, f)
+#f.close()
+# =========== use MVML in classifying the data ============
+
+# demo on how the code is intended to be used; parameters are not cross-validated, just picked some
+# mvml = MVML(kernel_dict, Y[0:n_tr], [0.1, 1], nystrom_param=0.2)
+
+
+mvml = MVML( [0.1, 1], nystrom_param=0.2)
+mvml.fit(a, Y[0:n_tr])
+print("x shape", mvml.X_.shape)
+print("x shape int",mvml.X_.shapes_int)
+dd = DataSample(test_kernel_dict)
+X_test = dd.data
+red1 = mvml.predict(X_test)
+
+mkl = MKL(lmbda=0.1)
+mkl.fit(kernel_dict,Y[0:n_tr] )
+
+mkl.predict(X_test)
+#red1 = np.sign(mvml.predict_mvml(test_kernel_dict, g1, w1))
+
+
+
+
diff --git a/doc/docmumbo/api.rst b/doc/docmumbo/api.rst
new file mode 100644
index 0000000000000000000000000000000000000000..5a9ad853bdbc2ee10d99c22e7fe6fb8c3de00b75
--- /dev/null
+++ b/doc/docmumbo/api.rst
@@ -0,0 +1,9 @@
+API Documentation
+=================
+
+multimodalboost.mumbo
+---------------------
+
+.. automodule:: multimodalboost.mumbo
+   :members:
+   :inherited-members:
diff --git a/doc/docmumbo/auto_examples/auto_examples_jupyter.zip b/doc/docmumbo/auto_examples/auto_examples_jupyter.zip
new file mode 100644
index 0000000000000000000000000000000000000000..a907b3133a5141e380b3d062d50de3f0672f99f0
Binary files /dev/null and b/doc/docmumbo/auto_examples/auto_examples_jupyter.zip differ
diff --git a/doc/docmumbo/auto_examples/auto_examples_python.zip b/doc/docmumbo/auto_examples/auto_examples_python.zip
new file mode 100644
index 0000000000000000000000000000000000000000..4fd29deea830691b773c143489fc1a246df84a67
Binary files /dev/null and b/doc/docmumbo/auto_examples/auto_examples_python.zip differ
diff --git a/doc/docmumbo/auto_examples/images/sphx_glr_plot_2_views_2_classes_001.png b/doc/docmumbo/auto_examples/images/sphx_glr_plot_2_views_2_classes_001.png
new file mode 100644
index 0000000000000000000000000000000000000000..6c10bfca50d38ffb88d84b3bd7603ff549a6f5e2
Binary files /dev/null and b/doc/docmumbo/auto_examples/images/sphx_glr_plot_2_views_2_classes_001.png differ
diff --git a/doc/docmumbo/auto_examples/images/sphx_glr_plot_2_views_2_classes_002.png b/doc/docmumbo/auto_examples/images/sphx_glr_plot_2_views_2_classes_002.png
new file mode 100644
index 0000000000000000000000000000000000000000..0a7b2d55a07522b45d671a4a5135e0a96d1c6727
Binary files /dev/null and b/doc/docmumbo/auto_examples/images/sphx_glr_plot_2_views_2_classes_002.png differ
diff --git a/doc/docmumbo/auto_examples/images/sphx_glr_plot_3_views_3_classes_001.png b/doc/docmumbo/auto_examples/images/sphx_glr_plot_3_views_3_classes_001.png
new file mode 100644
index 0000000000000000000000000000000000000000..34d20dc6f92a4bfed33da87d308a7e6ecb2aaa0a
Binary files /dev/null and b/doc/docmumbo/auto_examples/images/sphx_glr_plot_3_views_3_classes_001.png differ
diff --git a/doc/docmumbo/auto_examples/images/sphx_glr_plot_3_views_3_classes_002.png b/doc/docmumbo/auto_examples/images/sphx_glr_plot_3_views_3_classes_002.png
new file mode 100644
index 0000000000000000000000000000000000000000..c17cb7816691a1c00401885d938ec50f7afd366f
Binary files /dev/null and b/doc/docmumbo/auto_examples/images/sphx_glr_plot_3_views_3_classes_002.png differ
diff --git a/doc/docmumbo/auto_examples/images/thumb/sphx_glr_plot_2_views_2_classes_thumb.png b/doc/docmumbo/auto_examples/images/thumb/sphx_glr_plot_2_views_2_classes_thumb.png
new file mode 100644
index 0000000000000000000000000000000000000000..5387f6d37a91d3136168809c420c760dbb86cb43
Binary files /dev/null and b/doc/docmumbo/auto_examples/images/thumb/sphx_glr_plot_2_views_2_classes_thumb.png differ
diff --git a/doc/docmumbo/auto_examples/images/thumb/sphx_glr_plot_3_views_3_classes_thumb.png b/doc/docmumbo/auto_examples/images/thumb/sphx_glr_plot_3_views_3_classes_thumb.png
new file mode 100644
index 0000000000000000000000000000000000000000..5f94c4d01baf9fd165269e47b7a85b55cde225a7
Binary files /dev/null and b/doc/docmumbo/auto_examples/images/thumb/sphx_glr_plot_3_views_3_classes_thumb.png differ
diff --git a/doc/docmumbo/auto_examples/index.rst b/doc/docmumbo/auto_examples/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..ba7c2afc25e39a5e29e400188661dd0c78744d79
--- /dev/null
+++ b/doc/docmumbo/auto_examples/index.rst
@@ -0,0 +1,83 @@
+:orphan:
+
+
+
+.. _sphx_glr_auto_examples:
+
+.. _examples:
+
+Examples
+========
+
+The following toy examples illustrate how the MuMBo algorithm exploits
+cooperation between views for classification.
+
+
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from two classes, split between two two-dimensional views...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_2_views_2_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    </div>
+
+
+.. toctree::
+   :hidden:
+
+   /auto_examples/plot_2_views_2_classes
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from three classes, split between three two-dimensional v...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_3_views_3_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
+
+.. raw:: html
+
+    </div>
+
+
+.. toctree::
+   :hidden:
+
+   /auto_examples/plot_3_views_3_classes
+.. raw:: html
+
+    <div class="sphx-glr-clear"></div>
+
+
+
+.. only :: html
+
+ .. container:: sphx-glr-footer
+    :class: sphx-glr-footer-gallery
+
+
+  .. container:: sphx-glr-download
+
+    :download:`Download all examples in Python source code: auto_examples_python.zip <//home/dominique/projets/ANR-Lives/multimodal/multimodalboost/doc/auto_examples/auto_examples_python.zip>`
+
+
+
+  .. container:: sphx-glr-download
+
+    :download:`Download all examples in Jupyter notebooks: auto_examples_jupyter.zip <//home/dominique/projets/ANR-Lives/multimodal/multimodalboost/doc/auto_examples/auto_examples_jupyter.zip>`
+
+
+.. only:: html
+
+ .. rst-class:: sphx-glr-signature
+
+    `Gallery generated by Sphinx-Gallery <https://sphinx-gallery.github.io>`_
diff --git a/doc/docmumbo/auto_examples/plot_2_views_2_classes.ipynb b/doc/docmumbo/auto_examples/plot_2_views_2_classes.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..958f7f5803047b3a7c125914d74d7c1776469175
--- /dev/null
+++ b/doc/docmumbo/auto_examples/plot_2_views_2_classes.ipynb
@@ -0,0 +1,54 @@
+{
+  "cells": [
+    {
+      "cell_type": "code",
+      "execution_count": null,
+      "metadata": {
+        "collapsed": false
+      },
+      "outputs": [],
+      "source": [
+        "%matplotlib inline"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {},
+      "source": [
+        "\n==========================\n2 views, 2 classes example\n==========================\n\nIn this toy example, we generate data from two classes, split between two\ntwo-dimensional views.\n\nFor each view, the data are generated so that half of the points of each class\nare well separated in the plane, while the other half of the points are not\nseparated and placed in the same area. We also insure that the points that are\nnot separated in one view are well separated in the other view.\n\nThus, in the figure representing the data, the points represented by crosses\n(x) are well separated in view 0 while they are not separated in view 1, while\nthe points represented by dots (.) are well separated in view 1 while they are\nnot separated in view 0. In this figure, the blue symbols represent points\nof class 0, while red symbols represent points of class 1.\n\nThe MuMBo algorithm take adavantage of the complementarity of the two views to\nrightly classify the points.\n\n"
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": null,
+      "metadata": {
+        "collapsed": false
+      },
+      "outputs": [],
+      "source": [
+        "# Universit\u00e9 d'Aix Marseille (AMU) -\n# Centre National de la Recherche Scientifique (CNRS) -\n# Universit\u00e9 de Toulon (UTLN).\n# Copyright \u00a9 2017-2018 AMU, CNRS, UTLN\n#\n# This file is part of multimodalboost.\n#\n# multimodalboost is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# multimodalboost is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with multimodalboost. If not, see <http://www.gnu.org/licenses/>.\n#\n# Author: Florent JAILLET - Laboratoire d'Informatique et Syst\u00e8mes - UMR 7020\n\nimport numpy as np\nfrom multimodalboost.mumbo import MumboClassifier\nfrom matplotlib import pyplot as plt\n\n\ndef generate_data(n_samples, lim):\n    \"\"\"Generate random data in a rectangle\"\"\"\n    lim = np.array(lim)\n    n_features = lim.shape[0]\n    data = np.random.random((n_samples, n_features))\n    data = (lim[:, 1]-lim[:, 0]) * data + lim[:, 0]\n    return data\n\n\nseed = 12\nnp.random.seed(seed)\n\nn_samples = 100\n\nview_0 = np.concatenate((generate_data(n_samples, [[0., 1.], [0., 1.]]),\n                         generate_data(n_samples, [[1., 2.], [0., 1.]]),\n                         generate_data(n_samples, [[0., 1.], [0., 1.]]),\n                         generate_data(n_samples, [[0., 1.], [1., 2.]])))\n\nview_1 = np.concatenate((generate_data(n_samples, [[1., 2.], [0., 1.]]),\n                         generate_data(n_samples, [[0., 1.], [0., 1.]]),\n                         generate_data(n_samples, [[0., 1.], [1., 2.]]),\n                         generate_data(n_samples, [[0., 1.], [0., 1.]])))\n\nX = np.concatenate((view_0, view_1), axis=1)\n\ny = np.zeros(4*n_samples, dtype=np.int64)\ny[2*n_samples:] = 1\n\nviews_ind = np.array([0, 2, 4])\n\nn_estimators = 3\nclf = MumboClassifier(n_estimators=n_estimators)\nclf.fit(X, y, views_ind)\n\nprint('\\nAfter 3 iterations, the MuMBo classifier reaches exact '\n      'classification for the\\nlearning samples:')\nfor ind, score in enumerate(clf.staged_score(X, y)):\n    print('  - iteration {}, score: {}'.format(ind + 1, score))\n\n\nprint('\\nThe resulting MuMBo classifier uses three sub-classifiers that are '\n      'wheighted\\nusing the following weights:\\n'\n      '  estimator weights: {}'.format(clf.estimator_weights_))\n\nprint('\\nThe two first sub-classifiers use the data of view 0 to compute '\n      'their\\nclassification results, while the third one uses the data of '\n      'view 1:\\n'\n      '  best views: {}'. format(clf.best_views_))\n\nprint('\\nThe first figure displays the data, splitting the representation '\n      'between the\\ntwo views.')\n\nfig = plt.figure(figsize=(10., 8.))\nfig.suptitle('Representation of the data', size=16)\nfor ind_view in range(2):\n    ax = plt.subplot(2, 1, ind_view + 1)\n    ax.set_title('View {}'.format(ind_view))\n    ind_feature = ind_view * 2\n    styles = ('.b', 'xb', '.r', 'xr')\n    labels = ('non-separated', 'separated')\n    for ind in range(4):\n        ind_class = ind // 2\n        label = labels[(ind + ind_view) % 2]\n        ax.plot(X[n_samples*ind:n_samples*(ind+1), ind_feature],\n                X[n_samples*ind:n_samples*(ind+1), ind_feature + 1],\n                styles[ind],\n                label='Class {} ({})'.format(ind_class, label))\n    ax.legend()\n\nprint('\\nThe second figure displays the classification results for the '\n      'sub-classifiers\\non the learning sample data.\\n')\n\nstyles = ('.b', '.r')\nfig = plt.figure(figsize=(12., 7.))\nfig.suptitle('Classification results on the learning data for the '\n             'sub-classifiers', size=16)\nfor ind_estimator in range(n_estimators):\n    best_view = clf.best_views_[ind_estimator]\n    y_pred = clf.estimators_[ind_estimator].predict(\n        X[:, 2*best_view:2*best_view+2])\n    background_color = (1.0, 1.0, 0.9)\n    for ind_view in range(2):\n        ax = plt.subplot(2, 3, ind_estimator + 3*ind_view + 1)\n        if ind_view == best_view:\n            ax.set_facecolor(background_color)\n        ax.set_title(\n            'Sub-classifier {} - View {}'.format(ind_estimator, ind_view))\n        ind_feature = ind_view * 2\n        for ind_class in range(2):\n            ind_samples = (y_pred == ind_class)\n            ax.plot(X[ind_samples, ind_feature],\n                    X[ind_samples, ind_feature + 1],\n                    styles[ind_class],\n                    label='Class {}'.format(ind_class))\n        ax.legend(title='Predicted class:')\n\nplt.show()"
+      ]
+    }
+  ],
+  "metadata": {
+    "kernelspec": {
+      "display_name": "Python 3",
+      "language": "python",
+      "name": "python3"
+    },
+    "language_info": {
+      "codemirror_mode": {
+        "name": "ipython",
+        "version": 3
+      },
+      "file_extension": ".py",
+      "mimetype": "text/x-python",
+      "name": "python",
+      "nbconvert_exporter": "python",
+      "pygments_lexer": "ipython3",
+      "version": "3.6.8"
+    }
+  },
+  "nbformat": 4,
+  "nbformat_minor": 0
+}
\ No newline at end of file
diff --git a/doc/docmumbo/auto_examples/plot_2_views_2_classes.py b/doc/docmumbo/auto_examples/plot_2_views_2_classes.py
new file mode 100644
index 0000000000000000000000000000000000000000..6d1c788e8be011be31d0c07777d91517a121fe98
--- /dev/null
+++ b/doc/docmumbo/auto_examples/plot_2_views_2_classes.py
@@ -0,0 +1,149 @@
+# -*- coding: utf-8 -*-
+"""
+==========================
+2 views, 2 classes example
+==========================
+
+In this toy example, we generate data from two classes, split between two
+two-dimensional views.
+
+For each view, the data are generated so that half of the points of each class
+are well separated in the plane, while the other half of the points are not
+separated and placed in the same area. We also insure that the points that are
+not separated in one view are well separated in the other view.
+
+Thus, in the figure representing the data, the points represented by crosses
+(x) are well separated in view 0 while they are not separated in view 1, while
+the points represented by dots (.) are well separated in view 1 while they are
+not separated in view 0. In this figure, the blue symbols represent points
+of class 0, while red symbols represent points of class 1.
+
+The MuMBo algorithm take adavantage of the complementarity of the two views to
+rightly classify the points.
+"""
+
+# Université d'Aix Marseille (AMU) -
+# Centre National de la Recherche Scientifique (CNRS) -
+# Université de Toulon (UTLN).
+# Copyright © 2017-2018 AMU, CNRS, UTLN
+#
+# This file is part of multimodalboost.
+#
+# multimodalboost is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# multimodalboost is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with multimodalboost. If not, see <http://www.gnu.org/licenses/>.
+#
+# Author: Florent JAILLET - Laboratoire d'Informatique et Systèmes - UMR 7020
+
+import numpy as np
+from multimodalboost.mumbo import MumboClassifier
+from matplotlib import pyplot as plt
+
+
+def generate_data(n_samples, lim):
+    """Generate random data in a rectangle"""
+    lim = np.array(lim)
+    n_features = lim.shape[0]
+    data = np.random.random((n_samples, n_features))
+    data = (lim[:, 1]-lim[:, 0]) * data + lim[:, 0]
+    return data
+
+
+seed = 12
+np.random.seed(seed)
+
+n_samples = 100
+
+view_0 = np.concatenate((generate_data(n_samples, [[0., 1.], [0., 1.]]),
+                         generate_data(n_samples, [[1., 2.], [0., 1.]]),
+                         generate_data(n_samples, [[0., 1.], [0., 1.]]),
+                         generate_data(n_samples, [[0., 1.], [1., 2.]])))
+
+view_1 = np.concatenate((generate_data(n_samples, [[1., 2.], [0., 1.]]),
+                         generate_data(n_samples, [[0., 1.], [0., 1.]]),
+                         generate_data(n_samples, [[0., 1.], [1., 2.]]),
+                         generate_data(n_samples, [[0., 1.], [0., 1.]])))
+
+X = np.concatenate((view_0, view_1), axis=1)
+
+y = np.zeros(4*n_samples, dtype=np.int64)
+y[2*n_samples:] = 1
+
+views_ind = np.array([0, 2, 4])
+
+n_estimators = 3
+clf = MumboClassifier(n_estimators=n_estimators)
+clf.fit(X, y, views_ind)
+
+print('\nAfter 3 iterations, the MuMBo classifier reaches exact '
+      'classification for the\nlearning samples:')
+for ind, score in enumerate(clf.staged_score(X, y)):
+    print('  - iteration {}, score: {}'.format(ind + 1, score))
+
+
+print('\nThe resulting MuMBo classifier uses three sub-classifiers that are '
+      'wheighted\nusing the following weights:\n'
+      '  estimator weights: {}'.format(clf.estimator_weights_))
+
+print('\nThe two first sub-classifiers use the data of view 0 to compute '
+      'their\nclassification results, while the third one uses the data of '
+      'view 1:\n'
+      '  best views: {}'. format(clf.best_views_))
+
+print('\nThe first figure displays the data, splitting the representation '
+      'between the\ntwo views.')
+
+fig = plt.figure(figsize=(10., 8.))
+fig.suptitle('Representation of the data', size=16)
+for ind_view in range(2):
+    ax = plt.subplot(2, 1, ind_view + 1)
+    ax.set_title('View {}'.format(ind_view))
+    ind_feature = ind_view * 2
+    styles = ('.b', 'xb', '.r', 'xr')
+    labels = ('non-separated', 'separated')
+    for ind in range(4):
+        ind_class = ind // 2
+        label = labels[(ind + ind_view) % 2]
+        ax.plot(X[n_samples*ind:n_samples*(ind+1), ind_feature],
+                X[n_samples*ind:n_samples*(ind+1), ind_feature + 1],
+                styles[ind],
+                label='Class {} ({})'.format(ind_class, label))
+    ax.legend()
+
+print('\nThe second figure displays the classification results for the '
+      'sub-classifiers\non the learning sample data.\n')
+
+styles = ('.b', '.r')
+fig = plt.figure(figsize=(12., 7.))
+fig.suptitle('Classification results on the learning data for the '
+             'sub-classifiers', size=16)
+for ind_estimator in range(n_estimators):
+    best_view = clf.best_views_[ind_estimator]
+    y_pred = clf.estimators_[ind_estimator].predict(
+        X[:, 2*best_view:2*best_view+2])
+    background_color = (1.0, 1.0, 0.9)
+    for ind_view in range(2):
+        ax = plt.subplot(2, 3, ind_estimator + 3*ind_view + 1)
+        if ind_view == best_view:
+            ax.set_facecolor(background_color)
+        ax.set_title(
+            'Sub-classifier {} - View {}'.format(ind_estimator, ind_view))
+        ind_feature = ind_view * 2
+        for ind_class in range(2):
+            ind_samples = (y_pred == ind_class)
+            ax.plot(X[ind_samples, ind_feature],
+                    X[ind_samples, ind_feature + 1],
+                    styles[ind_class],
+                    label='Class {}'.format(ind_class))
+        ax.legend(title='Predicted class:')
+
+plt.show()
diff --git a/doc/docmumbo/auto_examples/plot_2_views_2_classes.py.md5 b/doc/docmumbo/auto_examples/plot_2_views_2_classes.py.md5
new file mode 100644
index 0000000000000000000000000000000000000000..1172a53ec81cdd93bf191f217043413337587907
--- /dev/null
+++ b/doc/docmumbo/auto_examples/plot_2_views_2_classes.py.md5
@@ -0,0 +1 @@
+6bfb170b1af59e9ec2958eea239976e2
\ No newline at end of file
diff --git a/doc/docmumbo/auto_examples/plot_2_views_2_classes.rst b/doc/docmumbo/auto_examples/plot_2_views_2_classes.rst
new file mode 100644
index 0000000000000000000000000000000000000000..b578f41b819cbd8d1917cf9f0296d491053f8aab
--- /dev/null
+++ b/doc/docmumbo/auto_examples/plot_2_views_2_classes.rst
@@ -0,0 +1,245 @@
+.. note::
+    :class: sphx-glr-download-link-note
+
+    Click :ref:`here <sphx_glr_download_auto_examples_plot_2_views_2_classes.py>` to download the full example code
+.. rst-class:: sphx-glr-example-title
+
+.. _sphx_glr_auto_examples_plot_2_views_2_classes.py:
+
+
+==========================
+2 views, 2 classes example
+==========================
+
+In this toy example, we generate data from two classes, split between two
+two-dimensional views.
+
+For each view, the data are generated so that half of the points of each class
+are well separated in the plane, while the other half of the points are not
+separated and placed in the same area. We also insure that the points that are
+not separated in one view are well separated in the other view.
+
+Thus, in the figure representing the data, the points represented by crosses
+(x) are well separated in view 0 while they are not separated in view 1, while
+the points represented by dots (.) are well separated in view 1 while they are
+not separated in view 0. In this figure, the blue symbols represent points
+of class 0, while red symbols represent points of class 1.
+
+The MuMBo algorithm take adavantage of the complementarity of the two views to
+rightly classify the points.
+
+
+
+.. rst-class:: sphx-glr-horizontal
+
+
+    *
+
+      .. image:: /auto_examples/images/sphx_glr_plot_2_views_2_classes_001.png
+            :class: sphx-glr-multi-img
+
+    *
+
+      .. image:: /auto_examples/images/sphx_glr_plot_2_views_2_classes_002.png
+            :class: sphx-glr-multi-img
+
+
+.. rst-class:: sphx-glr-script-out
+
+ Out:
+
+ .. code-block:: none
+
+
+    After 3 iterations, the MuMBo classifier reaches exact classification for the
+    learning samples:
+      - iteration 1, score: 0.75
+      - iteration 2, score: 0.75
+      - iteration 3, score: 1.0
+
+    The resulting MuMBo classifier uses three sub-classifiers that are wheighted
+    using the following weights:
+      estimator weights: [0.54930614 0.80471896 1.09861229]
+
+    The two first sub-classifiers use the data of view 0 to compute their
+    classification results, while the third one uses the data of view 1:
+      best views: [0 0 1]
+
+    The first figure displays the data, splitting the representation between the
+    two views.
+
+    The second figure displays the classification results for the sub-classifiers
+    on the learning sample data.
+
+    /home/dominique/projets/ANR-Lives/multimodal/multimodalboost/examples/plot_2_views_2_classes.py:149: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
+      plt.show()
+
+
+
+
+
+
+|
+
+
+.. code-block:: default
+
+
+    # Université d'Aix Marseille (AMU) -
+    # Centre National de la Recherche Scientifique (CNRS) -
+    # Université de Toulon (UTLN).
+    # Copyright © 2017-2018 AMU, CNRS, UTLN
+    #
+    # This file is part of multimodalboost.
+    #
+    # multimodalboost is free software: you can redistribute it and/or modify
+    # it under the terms of the GNU Lesser General Public License as published by
+    # the Free Software Foundation, either version 3 of the License, or
+    # (at your option) any later version.
+    #
+    # multimodalboost is distributed in the hope that it will be useful,
+    # but WITHOUT ANY WARRANTY; without even the implied warranty of
+    # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    # GNU Lesser General Public License for more details.
+    #
+    # You should have received a copy of the GNU Lesser General Public License
+    # along with multimodalboost. If not, see <http://www.gnu.org/licenses/>.
+    #
+    # Author: Florent JAILLET - Laboratoire d'Informatique et Systèmes - UMR 7020
+
+    import numpy as np
+    from multimodalboost.mumbo import MumboClassifier
+    from matplotlib import pyplot as plt
+
+
+    def generate_data(n_samples, lim):
+        """Generate random data in a rectangle"""
+        lim = np.array(lim)
+        n_features = lim.shape[0]
+        data = np.random.random((n_samples, n_features))
+        data = (lim[:, 1]-lim[:, 0]) * data + lim[:, 0]
+        return data
+
+
+    seed = 12
+    np.random.seed(seed)
+
+    n_samples = 100
+
+    view_0 = np.concatenate((generate_data(n_samples, [[0., 1.], [0., 1.]]),
+                             generate_data(n_samples, [[1., 2.], [0., 1.]]),
+                             generate_data(n_samples, [[0., 1.], [0., 1.]]),
+                             generate_data(n_samples, [[0., 1.], [1., 2.]])))
+
+    view_1 = np.concatenate((generate_data(n_samples, [[1., 2.], [0., 1.]]),
+                             generate_data(n_samples, [[0., 1.], [0., 1.]]),
+                             generate_data(n_samples, [[0., 1.], [1., 2.]]),
+                             generate_data(n_samples, [[0., 1.], [0., 1.]])))
+
+    X = np.concatenate((view_0, view_1), axis=1)
+
+    y = np.zeros(4*n_samples, dtype=np.int64)
+    y[2*n_samples:] = 1
+
+    views_ind = np.array([0, 2, 4])
+
+    n_estimators = 3
+    clf = MumboClassifier(n_estimators=n_estimators)
+    clf.fit(X, y, views_ind)
+
+    print('\nAfter 3 iterations, the MuMBo classifier reaches exact '
+          'classification for the\nlearning samples:')
+    for ind, score in enumerate(clf.staged_score(X, y)):
+        print('  - iteration {}, score: {}'.format(ind + 1, score))
+
+
+    print('\nThe resulting MuMBo classifier uses three sub-classifiers that are '
+          'wheighted\nusing the following weights:\n'
+          '  estimator weights: {}'.format(clf.estimator_weights_))
+
+    print('\nThe two first sub-classifiers use the data of view 0 to compute '
+          'their\nclassification results, while the third one uses the data of '
+          'view 1:\n'
+          '  best views: {}'. format(clf.best_views_))
+
+    print('\nThe first figure displays the data, splitting the representation '
+          'between the\ntwo views.')
+
+    fig = plt.figure(figsize=(10., 8.))
+    fig.suptitle('Representation of the data', size=16)
+    for ind_view in range(2):
+        ax = plt.subplot(2, 1, ind_view + 1)
+        ax.set_title('View {}'.format(ind_view))
+        ind_feature = ind_view * 2
+        styles = ('.b', 'xb', '.r', 'xr')
+        labels = ('non-separated', 'separated')
+        for ind in range(4):
+            ind_class = ind // 2
+            label = labels[(ind + ind_view) % 2]
+            ax.plot(X[n_samples*ind:n_samples*(ind+1), ind_feature],
+                    X[n_samples*ind:n_samples*(ind+1), ind_feature + 1],
+                    styles[ind],
+                    label='Class {} ({})'.format(ind_class, label))
+        ax.legend()
+
+    print('\nThe second figure displays the classification results for the '
+          'sub-classifiers\non the learning sample data.\n')
+
+    styles = ('.b', '.r')
+    fig = plt.figure(figsize=(12., 7.))
+    fig.suptitle('Classification results on the learning data for the '
+                 'sub-classifiers', size=16)
+    for ind_estimator in range(n_estimators):
+        best_view = clf.best_views_[ind_estimator]
+        y_pred = clf.estimators_[ind_estimator].predict(
+            X[:, 2*best_view:2*best_view+2])
+        background_color = (1.0, 1.0, 0.9)
+        for ind_view in range(2):
+            ax = plt.subplot(2, 3, ind_estimator + 3*ind_view + 1)
+            if ind_view == best_view:
+                ax.set_facecolor(background_color)
+            ax.set_title(
+                'Sub-classifier {} - View {}'.format(ind_estimator, ind_view))
+            ind_feature = ind_view * 2
+            for ind_class in range(2):
+                ind_samples = (y_pred == ind_class)
+                ax.plot(X[ind_samples, ind_feature],
+                        X[ind_samples, ind_feature + 1],
+                        styles[ind_class],
+                        label='Class {}'.format(ind_class))
+            ax.legend(title='Predicted class:')
+
+    plt.show()
+
+
+.. rst-class:: sphx-glr-timing
+
+   **Total running time of the script:** ( 0 minutes  0.733 seconds)
+
+
+.. _sphx_glr_download_auto_examples_plot_2_views_2_classes.py:
+
+
+.. only :: html
+
+ .. container:: sphx-glr-footer
+    :class: sphx-glr-footer-example
+
+
+
+  .. container:: sphx-glr-download
+
+     :download:`Download Python source code: plot_2_views_2_classes.py <plot_2_views_2_classes.py>`
+
+
+
+  .. container:: sphx-glr-download
+
+     :download:`Download Jupyter notebook: plot_2_views_2_classes.ipynb <plot_2_views_2_classes.ipynb>`
+
+
+.. only:: html
+
+ .. rst-class:: sphx-glr-signature
+
+    `Gallery generated by Sphinx-Gallery <https://sphinx-gallery.github.io>`_
diff --git a/doc/docmumbo/auto_examples/plot_2_views_2_classes_codeobj.pickle b/doc/docmumbo/auto_examples/plot_2_views_2_classes_codeobj.pickle
new file mode 100644
index 0000000000000000000000000000000000000000..ced0469b4f5b5b29ca214bfe0fbde24ba4659c5b
Binary files /dev/null and b/doc/docmumbo/auto_examples/plot_2_views_2_classes_codeobj.pickle differ
diff --git a/doc/docmumbo/auto_examples/plot_3_views_3_classes.ipynb b/doc/docmumbo/auto_examples/plot_3_views_3_classes.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..bf392e76c61972592842f67e3e412c698d1ac2a6
--- /dev/null
+++ b/doc/docmumbo/auto_examples/plot_3_views_3_classes.ipynb
@@ -0,0 +1,54 @@
+{
+  "cells": [
+    {
+      "cell_type": "code",
+      "execution_count": null,
+      "metadata": {
+        "collapsed": false
+      },
+      "outputs": [],
+      "source": [
+        "%matplotlib inline"
+      ]
+    },
+    {
+      "cell_type": "markdown",
+      "metadata": {},
+      "source": [
+        "\n==========================\n3 views, 3 classes example\n==========================\n\nIn this toy example, we generate data from three classes, split between three\ntwo-dimensional views.\n\nFor each view, the data are generated so that the points for two classes are\nwell seperated, while the points for the third class are not seperated with\nthe two other classes. That means that, taken separately, none of the single\nviews allows for a good classification of the data.\n\nNevertheless, the MuMBo algorithm take adavantage of the complementarity of\nthe views to rightly classify the points.\n\n"
+      ]
+    },
+    {
+      "cell_type": "code",
+      "execution_count": null,
+      "metadata": {
+        "collapsed": false
+      },
+      "outputs": [],
+      "source": [
+        "# Universit\u00e9 d'Aix Marseille (AMU) -\n# Centre National de la Recherche Scientifique (CNRS) -\n# Universit\u00e9 de Toulon (UTLN).\n# Copyright \u00a9 2017-2018 AMU, CNRS, UTLN\n#\n# This file is part of multimodalboost.\n#\n# multimodalboost is free software: you can redistribute it and/or modify\n# it under the terms of the GNU Lesser General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# multimodalboost is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n# GNU Lesser General Public License for more details.\n#\n# You should have received a copy of the GNU Lesser General Public License\n# along with multimodalboost. If not, see <http://www.gnu.org/licenses/>.\n#\n# Author: Florent JAILLET - Laboratoire d'Informatique et Syst\u00e8mes - UMR 7020\n\nimport numpy as np\nfrom multimodalboost.mumbo import MumboClassifier\nfrom matplotlib import pyplot as plt\n\n\ndef generate_data(n_samples, lim):\n    \"\"\"Generate random data in a rectangle\"\"\"\n    lim = np.array(lim)\n    n_features = lim.shape[0]\n    data = np.random.random((n_samples, n_features))\n    data = (lim[:, 1]-lim[:, 0]) * data + lim[:, 0]\n    return data\n\n\nseed = 12\nnp.random.seed(seed)\n\nn_samples = 300\n\nview_0 = np.concatenate((generate_data(n_samples, [[0., 1.], [0., 1.]]),\n                         generate_data(n_samples, [[1., 2.], [0., 1.]]),\n                         generate_data(n_samples, [[0., 2.], [0., 1.]])))\n\nview_1 = np.concatenate((generate_data(n_samples, [[1., 2.], [0., 1.]]),\n                         generate_data(n_samples, [[0., 2.], [0., 1.]]),\n                         generate_data(n_samples, [[0., 1.], [0., 1.]])))\n\nview_2 = np.concatenate((generate_data(n_samples, [[0., 2.], [0., 1.]]),\n                         generate_data(n_samples, [[0., 1.], [0., 1.]]),\n                         generate_data(n_samples, [[1., 2.], [0., 1.]])))\n\nX = np.concatenate((view_0, view_1, view_2), axis=1)\n\ny = np.zeros(3*n_samples, dtype=np.int64)\ny[n_samples:2*n_samples] = 1\ny[2*n_samples:] = 2\n\nviews_ind = np.array([0, 2, 4, 6])\n\nn_estimators = 4\nclf = MumboClassifier(n_estimators=n_estimators)\nclf.fit(X, y, views_ind)\n\nprint('\\nAfter 4 iterations, the MuMBo classifier reaches exact '\n      'classification for the\\nlearning samples:')\nfor ind, score in enumerate(clf.staged_score(X, y)):\n    print('  - iteration {}, score: {}'.format(ind + 1, score))\n\nprint('\\nThe resulting MuMBo classifier uses four sub-classifiers that are '\n      'wheighted\\nusing the following weights:\\n'\n      '  estimator weights: {}'.format(clf.estimator_weights_))\n\nprint('\\nThe first sub-classifier uses the data of view 0 to compute '\n      'its classification\\nresults, the second and third sub-classifiers use '\n      'the data of view 1, while the\\nfourth one uses the data of '\n      'view 2:\\n'\n      '  best views: {}'. format(clf.best_views_))\n\nprint('\\nThe first figure displays the data, splitting the representation '\n      'between the\\nthree views.')\n\nstyles = ('.b', '.r', '.g')\nfig = plt.figure(figsize=(12., 11.))\nfig.suptitle('Representation of the data', size=16)\nfor ind_view in range(3):\n    ax = plt.subplot(3, 1, ind_view + 1)\n    ax.set_title('View {}'.format(ind_view))\n    ind_feature = ind_view * 2\n    for ind_class in range(3):\n        ind_samples = (y == ind_class)\n        ax.plot(X[ind_samples, ind_feature],\n                X[ind_samples, ind_feature + 1],\n                styles[ind_class],\n                label='Class {}'.format(ind_class))\n    ax.legend(loc='upper left', framealpha=0.9)\n\nprint('\\nThe second figure displays the classification results for the '\n      'sub-classifiers\\non the learning sample data.\\n')\n\nfig = plt.figure(figsize=(14., 11.))\nfig.suptitle('Classification results on the learning data for the '\n             'sub-classifiers', size=16)\nfor ind_estimator in range(n_estimators):\n    best_view = clf.best_views_[ind_estimator]\n    y_pred = clf.estimators_[ind_estimator].predict(\n        X[:, 2*best_view:2*best_view+2])\n    background_color = (1.0, 1.0, 0.9)\n    for ind_view in range(3):\n        ax = plt.subplot(3, 4, ind_estimator + 4*ind_view + 1)\n        if ind_view == best_view:\n            ax.set_facecolor(background_color)\n        ax.set_title(\n            'Sub-classifier {} - View {}'.format(ind_estimator, ind_view))\n        ind_feature = ind_view * 2\n        for ind_class in range(3):\n            ind_samples = (y_pred == ind_class)\n            ax.plot(X[ind_samples, ind_feature],\n                    X[ind_samples, ind_feature + 1],\n                    styles[ind_class],\n                    label='Class {}'.format(ind_class))\n        ax.legend(title='Predicted class:', loc='upper left', framealpha=0.9)\n\nplt.show()"
+      ]
+    }
+  ],
+  "metadata": {
+    "kernelspec": {
+      "display_name": "Python 3",
+      "language": "python",
+      "name": "python3"
+    },
+    "language_info": {
+      "codemirror_mode": {
+        "name": "ipython",
+        "version": 3
+      },
+      "file_extension": ".py",
+      "mimetype": "text/x-python",
+      "name": "python",
+      "nbconvert_exporter": "python",
+      "pygments_lexer": "ipython3",
+      "version": "3.6.8"
+    }
+  },
+  "nbformat": 4,
+  "nbformat_minor": 0
+}
\ No newline at end of file
diff --git a/doc/docmumbo/auto_examples/plot_3_views_3_classes.py b/doc/docmumbo/auto_examples/plot_3_views_3_classes.py
new file mode 100644
index 0000000000000000000000000000000000000000..7de610b658890c146929a923712b6033de830d4d
--- /dev/null
+++ b/doc/docmumbo/auto_examples/plot_3_views_3_classes.py
@@ -0,0 +1,143 @@
+# -*- coding: utf-8 -*-
+"""
+==========================
+3 views, 3 classes example
+==========================
+
+In this toy example, we generate data from three classes, split between three
+two-dimensional views.
+
+For each view, the data are generated so that the points for two classes are
+well seperated, while the points for the third class are not seperated with
+the two other classes. That means that, taken separately, none of the single
+views allows for a good classification of the data.
+
+Nevertheless, the MuMBo algorithm take adavantage of the complementarity of
+the views to rightly classify the points.
+"""
+
+# Université d'Aix Marseille (AMU) -
+# Centre National de la Recherche Scientifique (CNRS) -
+# Université de Toulon (UTLN).
+# Copyright © 2017-2018 AMU, CNRS, UTLN
+#
+# This file is part of multimodalboost.
+#
+# multimodalboost is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# multimodalboost is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with multimodalboost. If not, see <http://www.gnu.org/licenses/>.
+#
+# Author: Florent JAILLET - Laboratoire d'Informatique et Systèmes - UMR 7020
+
+import numpy as np
+from multimodalboost.mumbo import MumboClassifier
+from matplotlib import pyplot as plt
+
+
+def generate_data(n_samples, lim):
+    """Generate random data in a rectangle"""
+    lim = np.array(lim)
+    n_features = lim.shape[0]
+    data = np.random.random((n_samples, n_features))
+    data = (lim[:, 1]-lim[:, 0]) * data + lim[:, 0]
+    return data
+
+
+seed = 12
+np.random.seed(seed)
+
+n_samples = 300
+
+view_0 = np.concatenate((generate_data(n_samples, [[0., 1.], [0., 1.]]),
+                         generate_data(n_samples, [[1., 2.], [0., 1.]]),
+                         generate_data(n_samples, [[0., 2.], [0., 1.]])))
+
+view_1 = np.concatenate((generate_data(n_samples, [[1., 2.], [0., 1.]]),
+                         generate_data(n_samples, [[0., 2.], [0., 1.]]),
+                         generate_data(n_samples, [[0., 1.], [0., 1.]])))
+
+view_2 = np.concatenate((generate_data(n_samples, [[0., 2.], [0., 1.]]),
+                         generate_data(n_samples, [[0., 1.], [0., 1.]]),
+                         generate_data(n_samples, [[1., 2.], [0., 1.]])))
+
+X = np.concatenate((view_0, view_1, view_2), axis=1)
+
+y = np.zeros(3*n_samples, dtype=np.int64)
+y[n_samples:2*n_samples] = 1
+y[2*n_samples:] = 2
+
+views_ind = np.array([0, 2, 4, 6])
+
+n_estimators = 4
+clf = MumboClassifier(n_estimators=n_estimators)
+clf.fit(X, y, views_ind)
+
+print('\nAfter 4 iterations, the MuMBo classifier reaches exact '
+      'classification for the\nlearning samples:')
+for ind, score in enumerate(clf.staged_score(X, y)):
+    print('  - iteration {}, score: {}'.format(ind + 1, score))
+
+print('\nThe resulting MuMBo classifier uses four sub-classifiers that are '
+      'wheighted\nusing the following weights:\n'
+      '  estimator weights: {}'.format(clf.estimator_weights_))
+
+print('\nThe first sub-classifier uses the data of view 0 to compute '
+      'its classification\nresults, the second and third sub-classifiers use '
+      'the data of view 1, while the\nfourth one uses the data of '
+      'view 2:\n'
+      '  best views: {}'. format(clf.best_views_))
+
+print('\nThe first figure displays the data, splitting the representation '
+      'between the\nthree views.')
+
+styles = ('.b', '.r', '.g')
+fig = plt.figure(figsize=(12., 11.))
+fig.suptitle('Representation of the data', size=16)
+for ind_view in range(3):
+    ax = plt.subplot(3, 1, ind_view + 1)
+    ax.set_title('View {}'.format(ind_view))
+    ind_feature = ind_view * 2
+    for ind_class in range(3):
+        ind_samples = (y == ind_class)
+        ax.plot(X[ind_samples, ind_feature],
+                X[ind_samples, ind_feature + 1],
+                styles[ind_class],
+                label='Class {}'.format(ind_class))
+    ax.legend(loc='upper left', framealpha=0.9)
+
+print('\nThe second figure displays the classification results for the '
+      'sub-classifiers\non the learning sample data.\n')
+
+fig = plt.figure(figsize=(14., 11.))
+fig.suptitle('Classification results on the learning data for the '
+             'sub-classifiers', size=16)
+for ind_estimator in range(n_estimators):
+    best_view = clf.best_views_[ind_estimator]
+    y_pred = clf.estimators_[ind_estimator].predict(
+        X[:, 2*best_view:2*best_view+2])
+    background_color = (1.0, 1.0, 0.9)
+    for ind_view in range(3):
+        ax = plt.subplot(3, 4, ind_estimator + 4*ind_view + 1)
+        if ind_view == best_view:
+            ax.set_facecolor(background_color)
+        ax.set_title(
+            'Sub-classifier {} - View {}'.format(ind_estimator, ind_view))
+        ind_feature = ind_view * 2
+        for ind_class in range(3):
+            ind_samples = (y_pred == ind_class)
+            ax.plot(X[ind_samples, ind_feature],
+                    X[ind_samples, ind_feature + 1],
+                    styles[ind_class],
+                    label='Class {}'.format(ind_class))
+        ax.legend(title='Predicted class:', loc='upper left', framealpha=0.9)
+
+plt.show()
diff --git a/doc/docmumbo/auto_examples/plot_3_views_3_classes.py.md5 b/doc/docmumbo/auto_examples/plot_3_views_3_classes.py.md5
new file mode 100644
index 0000000000000000000000000000000000000000..e7abc15ad3bfab64532a40674ffb12da81e66730
--- /dev/null
+++ b/doc/docmumbo/auto_examples/plot_3_views_3_classes.py.md5
@@ -0,0 +1 @@
+2dba08fc2c1e223dbeac65eeda17b109
\ No newline at end of file
diff --git a/doc/docmumbo/auto_examples/plot_3_views_3_classes.rst b/doc/docmumbo/auto_examples/plot_3_views_3_classes.rst
new file mode 100644
index 0000000000000000000000000000000000000000..08d7745229e704927cf9be50fc8b71c21222bcb3
--- /dev/null
+++ b/doc/docmumbo/auto_examples/plot_3_views_3_classes.rst
@@ -0,0 +1,241 @@
+.. note::
+    :class: sphx-glr-download-link-note
+
+    Click :ref:`here <sphx_glr_download_auto_examples_plot_3_views_3_classes.py>` to download the full example code
+.. rst-class:: sphx-glr-example-title
+
+.. _sphx_glr_auto_examples_plot_3_views_3_classes.py:
+
+
+==========================
+3 views, 3 classes example
+==========================
+
+In this toy example, we generate data from three classes, split between three
+two-dimensional views.
+
+For each view, the data are generated so that the points for two classes are
+well seperated, while the points for the third class are not seperated with
+the two other classes. That means that, taken separately, none of the single
+views allows for a good classification of the data.
+
+Nevertheless, the MuMBo algorithm take adavantage of the complementarity of
+the views to rightly classify the points.
+
+
+
+.. rst-class:: sphx-glr-horizontal
+
+
+    *
+
+      .. image:: /auto_examples/images/sphx_glr_plot_3_views_3_classes_001.png
+            :class: sphx-glr-multi-img
+
+    *
+
+      .. image:: /auto_examples/images/sphx_glr_plot_3_views_3_classes_002.png
+            :class: sphx-glr-multi-img
+
+
+.. rst-class:: sphx-glr-script-out
+
+ Out:
+
+ .. code-block:: none
+
+
+    After 4 iterations, the MuMBo classifier reaches exact classification for the
+    learning samples:
+      - iteration 1, score: 0.6666666666666666
+      - iteration 2, score: 0.6666666666666666
+      - iteration 3, score: 0.8422222222222222
+      - iteration 4, score: 1.0
+
+    The resulting MuMBo classifier uses four sub-classifiers that are wheighted
+    using the following weights:
+      estimator weights: [0.54930614 0.78652877 0.27471838 0.93876455]
+
+    The first sub-classifier uses the data of view 0 to compute its classification
+    results, the second and third sub-classifiers use the data of view 1, while the
+    fourth one uses the data of view 2:
+      best views: [0 1 1 2]
+
+    The first figure displays the data, splitting the representation between the
+    three views.
+
+    The second figure displays the classification results for the sub-classifiers
+    on the learning sample data.
+
+    /home/dominique/projets/ANR-Lives/multimodal/multimodalboost/examples/plot_3_views_3_classes.py:143: UserWarning: Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.
+      plt.show()
+
+
+
+
+
+
+|
+
+
+.. code-block:: default
+
+
+    # Université d'Aix Marseille (AMU) -
+    # Centre National de la Recherche Scientifique (CNRS) -
+    # Université de Toulon (UTLN).
+    # Copyright © 2017-2018 AMU, CNRS, UTLN
+    #
+    # This file is part of multimodalboost.
+    #
+    # multimodalboost is free software: you can redistribute it and/or modify
+    # it under the terms of the GNU Lesser General Public License as published by
+    # the Free Software Foundation, either version 3 of the License, or
+    # (at your option) any later version.
+    #
+    # multimodalboost is distributed in the hope that it will be useful,
+    # but WITHOUT ANY WARRANTY; without even the implied warranty of
+    # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    # GNU Lesser General Public License for more details.
+    #
+    # You should have received a copy of the GNU Lesser General Public License
+    # along with multimodalboost. If not, see <http://www.gnu.org/licenses/>.
+    #
+    # Author: Florent JAILLET - Laboratoire d'Informatique et Systèmes - UMR 7020
+
+    import numpy as np
+    from multimodalboost.mumbo import MumboClassifier
+    from matplotlib import pyplot as plt
+
+
+    def generate_data(n_samples, lim):
+        """Generate random data in a rectangle"""
+        lim = np.array(lim)
+        n_features = lim.shape[0]
+        data = np.random.random((n_samples, n_features))
+        data = (lim[:, 1]-lim[:, 0]) * data + lim[:, 0]
+        return data
+
+
+    seed = 12
+    np.random.seed(seed)
+
+    n_samples = 300
+
+    view_0 = np.concatenate((generate_data(n_samples, [[0., 1.], [0., 1.]]),
+                             generate_data(n_samples, [[1., 2.], [0., 1.]]),
+                             generate_data(n_samples, [[0., 2.], [0., 1.]])))
+
+    view_1 = np.concatenate((generate_data(n_samples, [[1., 2.], [0., 1.]]),
+                             generate_data(n_samples, [[0., 2.], [0., 1.]]),
+                             generate_data(n_samples, [[0., 1.], [0., 1.]])))
+
+    view_2 = np.concatenate((generate_data(n_samples, [[0., 2.], [0., 1.]]),
+                             generate_data(n_samples, [[0., 1.], [0., 1.]]),
+                             generate_data(n_samples, [[1., 2.], [0., 1.]])))
+
+    X = np.concatenate((view_0, view_1, view_2), axis=1)
+
+    y = np.zeros(3*n_samples, dtype=np.int64)
+    y[n_samples:2*n_samples] = 1
+    y[2*n_samples:] = 2
+
+    views_ind = np.array([0, 2, 4, 6])
+
+    n_estimators = 4
+    clf = MumboClassifier(n_estimators=n_estimators)
+    clf.fit(X, y, views_ind)
+
+    print('\nAfter 4 iterations, the MuMBo classifier reaches exact '
+          'classification for the\nlearning samples:')
+    for ind, score in enumerate(clf.staged_score(X, y)):
+        print('  - iteration {}, score: {}'.format(ind + 1, score))
+
+    print('\nThe resulting MuMBo classifier uses four sub-classifiers that are '
+          'wheighted\nusing the following weights:\n'
+          '  estimator weights: {}'.format(clf.estimator_weights_))
+
+    print('\nThe first sub-classifier uses the data of view 0 to compute '
+          'its classification\nresults, the second and third sub-classifiers use '
+          'the data of view 1, while the\nfourth one uses the data of '
+          'view 2:\n'
+          '  best views: {}'. format(clf.best_views_))
+
+    print('\nThe first figure displays the data, splitting the representation '
+          'between the\nthree views.')
+
+    styles = ('.b', '.r', '.g')
+    fig = plt.figure(figsize=(12., 11.))
+    fig.suptitle('Representation of the data', size=16)
+    for ind_view in range(3):
+        ax = plt.subplot(3, 1, ind_view + 1)
+        ax.set_title('View {}'.format(ind_view))
+        ind_feature = ind_view * 2
+        for ind_class in range(3):
+            ind_samples = (y == ind_class)
+            ax.plot(X[ind_samples, ind_feature],
+                    X[ind_samples, ind_feature + 1],
+                    styles[ind_class],
+                    label='Class {}'.format(ind_class))
+        ax.legend(loc='upper left', framealpha=0.9)
+
+    print('\nThe second figure displays the classification results for the '
+          'sub-classifiers\non the learning sample data.\n')
+
+    fig = plt.figure(figsize=(14., 11.))
+    fig.suptitle('Classification results on the learning data for the '
+                 'sub-classifiers', size=16)
+    for ind_estimator in range(n_estimators):
+        best_view = clf.best_views_[ind_estimator]
+        y_pred = clf.estimators_[ind_estimator].predict(
+            X[:, 2*best_view:2*best_view+2])
+        background_color = (1.0, 1.0, 0.9)
+        for ind_view in range(3):
+            ax = plt.subplot(3, 4, ind_estimator + 4*ind_view + 1)
+            if ind_view == best_view:
+                ax.set_facecolor(background_color)
+            ax.set_title(
+                'Sub-classifier {} - View {}'.format(ind_estimator, ind_view))
+            ind_feature = ind_view * 2
+            for ind_class in range(3):
+                ind_samples = (y_pred == ind_class)
+                ax.plot(X[ind_samples, ind_feature],
+                        X[ind_samples, ind_feature + 1],
+                        styles[ind_class],
+                        label='Class {}'.format(ind_class))
+            ax.legend(title='Predicted class:', loc='upper left', framealpha=0.9)
+
+    plt.show()
+
+
+.. rst-class:: sphx-glr-timing
+
+   **Total running time of the script:** ( 0 minutes  1.293 seconds)
+
+
+.. _sphx_glr_download_auto_examples_plot_3_views_3_classes.py:
+
+
+.. only :: html
+
+ .. container:: sphx-glr-footer
+    :class: sphx-glr-footer-example
+
+
+
+  .. container:: sphx-glr-download
+
+     :download:`Download Python source code: plot_3_views_3_classes.py <plot_3_views_3_classes.py>`
+
+
+
+  .. container:: sphx-glr-download
+
+     :download:`Download Jupyter notebook: plot_3_views_3_classes.ipynb <plot_3_views_3_classes.ipynb>`
+
+
+.. only:: html
+
+ .. rst-class:: sphx-glr-signature
+
+    `Gallery generated by Sphinx-Gallery <https://sphinx-gallery.github.io>`_
diff --git a/doc/docmumbo/auto_examples/plot_3_views_3_classes_codeobj.pickle b/doc/docmumbo/auto_examples/plot_3_views_3_classes_codeobj.pickle
new file mode 100644
index 0000000000000000000000000000000000000000..6f889f01721cd550caa52cbdd98e6550763ee216
Binary files /dev/null and b/doc/docmumbo/auto_examples/plot_3_views_3_classes_codeobj.pickle differ
diff --git a/doc/docmumbo/auto_examples/sg_execution_times.rst b/doc/docmumbo/auto_examples/sg_execution_times.rst
new file mode 100644
index 0000000000000000000000000000000000000000..2ac4607d11c692c649e2db4396e59b36a9295086
--- /dev/null
+++ b/doc/docmumbo/auto_examples/sg_execution_times.rst
@@ -0,0 +1,14 @@
+
+:orphan:
+
+.. _sphx_glr_auto_examples_sg_execution_times:
+
+Computation times
+=================
+**00:02.026** total execution time for **auto_examples** files:
+
++-----------------------------------------------------------------------------------------+-----------+--------+
+| :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py` (``plot_3_views_3_classes.py``) | 00:01.293 | 0.0 MB |
++-----------------------------------------------------------------------------------------+-----------+--------+
+| :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py` (``plot_2_views_2_classes.py``) | 00:00.733 | 0.0 MB |
++-----------------------------------------------------------------------------------------+-----------+--------+
diff --git a/doc/docmumbo/backreferences/multimodalboost.MumboClassifier.examples b/doc/docmumbo/backreferences/multimodalboost.MumboClassifier.examples
new file mode 100644
index 0000000000000000000000000000000000000000..5a6700768609a0f48a125afb6b1ddce85ca6546d
--- /dev/null
+++ b/doc/docmumbo/backreferences/multimodalboost.MumboClassifier.examples
@@ -0,0 +1,40 @@
+
+
+Examples using ``multimodalboost.MumboClassifier``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from two classes, split between two two-dimensional views...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_2_views_2_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from three classes, split between three two-dimensional v...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_3_views_3_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
diff --git a/doc/docmumbo/backreferences/multimodalboost.MumboClassifier.fit.examples b/doc/docmumbo/backreferences/multimodalboost.MumboClassifier.fit.examples
new file mode 100644
index 0000000000000000000000000000000000000000..b202edc020c6ff256a273ea62547988108f59061
--- /dev/null
+++ b/doc/docmumbo/backreferences/multimodalboost.MumboClassifier.fit.examples
@@ -0,0 +1,40 @@
+
+
+Examples using ``multimodalboost.MumboClassifier.fit``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from two classes, split between two two-dimensional views...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_2_views_2_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from three classes, split between three two-dimensional v...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_3_views_3_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
diff --git a/doc/docmumbo/backreferences/multimodalboost.MumboClassifier.staged_score.examples b/doc/docmumbo/backreferences/multimodalboost.MumboClassifier.staged_score.examples
new file mode 100644
index 0000000000000000000000000000000000000000..de145dbfe679961578ee352e2e0cde33a285e193
--- /dev/null
+++ b/doc/docmumbo/backreferences/multimodalboost.MumboClassifier.staged_score.examples
@@ -0,0 +1,40 @@
+
+
+Examples using ``multimodalboost.MumboClassifier.staged_score``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from two classes, split between two two-dimensional views...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_2_views_2_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from three classes, split between three two-dimensional v...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_3_views_3_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.best_views_.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.best_views_.examples
new file mode 100644
index 0000000000000000000000000000000000000000..c9cbba4932fda361aa92d432493c1be7d4c5949b
--- /dev/null
+++ b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.best_views_.examples
@@ -0,0 +1,40 @@
+
+
+Examples using ``multimodalboost.mumbo.MumboClassifier.best_views_``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from two classes, split between two two-dimensional views...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_2_views_2_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from three classes, split between three two-dimensional v...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_3_views_3_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.decision_function.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.decision_function.examples
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.estimator_weights_.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.estimator_weights_.examples
new file mode 100644
index 0000000000000000000000000000000000000000..d6434aca9912cdcd8a2b334196ffa4468c889fa3
--- /dev/null
+++ b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.estimator_weights_.examples
@@ -0,0 +1,40 @@
+
+
+Examples using ``multimodalboost.mumbo.MumboClassifier.estimator_weights_``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from two classes, split between two two-dimensional views...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_2_views_2_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from three classes, split between three two-dimensional v...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_3_views_3_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.estimators_.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.estimators_.examples
new file mode 100644
index 0000000000000000000000000000000000000000..a08cef149f1a6848aecd6a632db662d0d1c1e56a
--- /dev/null
+++ b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.estimators_.examples
@@ -0,0 +1,40 @@
+
+
+Examples using ``multimodalboost.mumbo.MumboClassifier.estimators_``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from two classes, split between two two-dimensional views...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_2_views_2_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_2_views_2_classes.py`
+
+.. raw:: html
+
+    <div class="sphx-glr-thumbcontainer" tooltip="In this toy example, we generate data from three classes, split between three two-dimensional v...">
+
+.. only:: html
+
+ .. figure:: /auto_examples/images/thumb/sphx_glr_plot_3_views_3_classes_thumb.png
+
+     :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
+
+.. raw:: html
+
+    </div>
+
+.. only:: not html
+
+ * :ref:`sphx_glr_auto_examples_plot_3_views_3_classes.py`
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.examples
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.fit.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.fit.examples
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.get_params.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.get_params.examples
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.predict.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.predict.examples
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.score.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.score.examples
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.set_params.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.set_params.examples
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.staged_decision_function.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.staged_decision_function.examples
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.staged_predict.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.staged_predict.examples
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.staged_score.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.MumboClassifier.staged_score.examples
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/doc/docmumbo/backreferences/multimodalboost.mumbo.examples b/doc/docmumbo/backreferences/multimodalboost.mumbo.examples
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/doc/docmumbo/conf.py b/doc/docmumbo/conf.py
new file mode 100644
index 0000000000000000000000000000000000000000..e989eef7fc0407bc9333f7a3ee22efaa4a9f4f07
--- /dev/null
+++ b/doc/docmumbo/conf.py
@@ -0,0 +1,270 @@
+# -*- coding: utf-8 -*-
+
+from datetime import date
+import os
+
+import multimodalboost
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+# sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+    'sphinx.ext.autodoc',
+    'sphinx.ext.doctest',
+    'numpydoc',
+    'sphinx_gallery.gen_gallery']
+
+# Add any paths that contain templates here, relative to this directory.
+# templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+source_encoding = 'utf-8'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'multimodalboost'
+author = 'Florent Jaillet'
+copyright = '2017-{}, LIS UMR 7020'.format(date.today().year)
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = multimodalboost.__version__
+# The full version, including alpha/beta/rc tags.
+release = multimodalboost.__version__
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+# language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+# keep_warnings = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = 'nature'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+# html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+# html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+# html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+# html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+# html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+# html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+# html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+# html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+# html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+# html_domain_indices = True
+
+# If false, no index is generated.
+# html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = '{}doc'.format(project)
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+    # The paper size ('letterpaper' or 'a4paper').
+    'papersize': 'a4paper',
+
+    # The font size ('10pt', '11pt' or '12pt').
+    'pointsize': '10pt',
+
+    # Additional stuff for the LaTeX preamble.
+    # 'preamble': '',
+
+    # Latex figure (float) alignment
+    'figure_align': 'htbp'}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+latex_documents = [
+  (master_doc, '{}.tex'.format(project), '{} Documentation'.format(project),
+   author, 'manual')]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+# latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+# latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+# latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    (master_doc, project, '{} Documentation'.format(project),
+     [author], 1)
+]
+
+# If true, show URL addresses after external links.
+# man_show_urls = False
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+  (master_doc, project, '{} Documentation'.format(project), author, project,
+   'Boost algorithms for machine learning with multimodal data.',
+   'Miscellaneous')]
+
+# Documents to append as an appendix to all manuals.
+# texinfo_appendices = []
+
+# If false, no module index is generated.
+# texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+# texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+# texinfo_no_detailmenu = False
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+    'sklearn': ('http://scikit-learn.org/stable', None)
+}
+
+numpydoc_show_class_members = False
+
+sphinx_gallery_conf = {
+    'doc_module': (project,),
+    'backreferences_dir': 'backreferences',
+    # path to your examples scripts
+    'examples_dirs': '../examples',
+    # path where to save gallery generated examples
+    'gallery_dirs': 'auto_examples'}
+
+# Generate the plots for the gallery
+plot_gallery = 'True'
diff --git a/doc/docmumbo/credits.rst b/doc/docmumbo/credits.rst
new file mode 100644
index 0000000000000000000000000000000000000000..a4ff1ecf7a2fe0964f74db99196e52a41940b9f3
--- /dev/null
+++ b/doc/docmumbo/credits.rst
@@ -0,0 +1,59 @@
+Credits
+=======
+
+**multimodalboost** is developped by the
+`development team <https://developpement.lis-lab.fr/>`_ of the
+`LIS <http://www.lis-lab.fr/>`_.
+
+If you use **multimodalboost** in a scientific publication, please cite the
+following paper::
+
+ @InProceedings{Koco:2011:BAM,
+  author={Ko\c{c}o, Sokol and Capponi, C{\'e}cile},
+  editor={Gunopulos, Dimitrios and Hofmann, Thomas and Malerba, Donato
+          and Vazirgiannis, Michalis},
+  title={A Boosting Approach to Multiview Classification with Cooperation},
+  booktitle={Proceedings of the 2011 European Conference on Machine Learning
+             and Knowledge Discovery in Databases - Volume Part II},
+  year={2011},
+  location={Athens, Greece},
+  publisher={Springer-Verlag},
+  address={Berlin, Heidelberg},
+  pages={209--228},
+  numpages = {20},
+  isbn={978-3-642-23783-6}
+  url={https://link.springer.com/chapter/10.1007/978-3-642-23783-6_14},
+  keywords={boosting, classification, multiview learning,
+            supervised learning},
+ }
+
+References
+----------
+
+* Sokol Koço, Cécile Capponi,
+  `"A boosting approach to multiview classification with cooperation"
+  <https://link.springer.com/chapter/10.1007/978-3-642-23783-6_14>`_,
+  Proceedings of the 2011 European Conference on Machine Learning (ECML),
+  Athens, Greece, pp.209-228, 2011, Springer-Verlag.
+
+* Sokol Koço,
+  `"Tackling the uneven views problem with cooperation based ensemble
+  learning methods" <http://www.theses.fr/en/2013AIXM4101>`_,
+  PhD Thesis, Aix-Marseille Université, 2013.
+
+Copyright
+---------
+
+Université d'Aix Marseille (AMU) -
+Centre National de la Recherche Scientifique (CNRS) -
+Université de Toulon (UTLN).
+
+Copyright © 2017-2018 AMU, CNRS, UTLN
+
+License
+-------
+
+**multimodalboost** is free software: you can redistribute it and/or modify
+it under the terms of the **GNU Lesser General Public License** as published by
+the Free Software Foundation, either **version 3** of the License, or
+(at your option) any later version.
diff --git a/doc/docmumbo/index.rst b/doc/docmumbo/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..3687ae362c02d40859f0807f2feac8b9bbc0c264
--- /dev/null
+++ b/doc/docmumbo/index.rst
@@ -0,0 +1,34 @@
+.. project-template documentation master file, created by
+   sphinx-quickstart on Mon Jan 18 14:44:12 2016.
+   You can adapt this file completely to your liking, but it should at least
+   contain the root `toctree` directive.
+
+multimodalboost
+===============
+
+**multimodalboost** is a Python package implementing boost algorithms for
+machine learning with multimodal data.
+
+It is compatible with `scikit-learn <http://scikit-learn.org/>`_, a popular
+package for machine learning in Python.
+
+Documentation
+-------------
+
+:Release: |version|
+:Date: |today|
+
+.. toctree::
+   :maxdepth: 1
+
+   install_devel
+   api
+   auto_examples/index
+   credits
+
+Indices and tables
+------------------
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
diff --git a/doc/docmumbo/install_devel.rst b/doc/docmumbo/install_devel.rst
new file mode 100644
index 0000000000000000000000000000000000000000..189ff2a1521a0954bae8e2cbb40bae67be79c7bc
--- /dev/null
+++ b/doc/docmumbo/install_devel.rst
@@ -0,0 +1,70 @@
+Installation and development
+============================
+
+Dependencies
+------------
+
+**multimodalboost** works with **Python 3.5 or later**.
+
+**multimodalboost** depends on **scikit-learn** (version >= 0.19).
+
+Optionally, **matplotlib** is required when running the examples.
+
+Installation
+------------
+
+**multimodalboost** is
+`available on PyPI <https://pypi.org/project/multimodalboost/>`_
+and can be installed using **pip**::
+
+  pip install multimodalboost
+
+If you prefer to install directly from the **source code**, clone the **Git**
+repository of the project and run the **setup.py** file with the following
+commands::
+
+  git clone git@gitlab.lis-lab.fr:dev/multimodalboost.git
+  cd multimodalboost
+  python setup.py install
+
+or alternatively use **pip**::
+
+  pip install git+https://gitlab.lis-lab.fr/dev/multimodalboost.git
+
+Development
+-----------
+
+The development of multimodalboost follows the guidelines provided by the
+scikit-learn community.
+
+Refer to the `Developer's Guide <http://scikit-learn.org/stable/developers>`_
+of the scikit-learn project for more details.
+
+Source code
+-----------
+
+You can get the **source code** from the **Git** repository of the project::
+
+  git clone git@gitlab.lis-lab.fr:dev/multimodalboost.git
+
+
+Testing
+-------
+
+**pytest** and **pytest-cov** are required to run the **test suite** with::
+
+  cd multimodalboost
+  pytest
+
+A code coverage report is displayed in the terminal when running the tests.
+An HTML version of the report is also stored in the directory **htmlcov**.
+
+Generating the documentation
+----------------------------
+
+The generation of the documentation requires **sphinx**, **sphinx-gallery**,
+**numpydoc** and **matplotlib** and can be run with::
+
+  python setup.py build_sphinx
+
+The resulting files are stored in the directory **build/sphinx/html**.
diff --git a/multimodal/boosting/__init__.py b/multimodal/boosting/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..5b5a0e3ce1b63e85bd12b953c47e67d1ddd0dafb 100644
--- a/multimodal/boosting/__init__.py
+++ b/multimodal/boosting/__init__.py
@@ -0,0 +1,11 @@
+from .mumbo import MumboClassifier
+
+__all__ = ['MumboClassifier']
+
+__version__ = '1.0.dev0'
+
+from .cumbo import MuCumboClassifier
+
+__all__ = ['MuCumboClassifier']
+
+__version__ = '1.0.dev0'
diff --git a/multimodal/boosting/boost.py b/multimodal/boosting/boost.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/multimodal/boosting/cumbo.py b/multimodal/boosting/cumbo.py
new file mode 100644
index 0000000000000000000000000000000000000000..169925af11a3a91da6d4845e6604ad0ef2e923b1
--- /dev/null
+++ b/multimodal/boosting/cumbo.py
@@ -0,0 +1,793 @@
+# -*- coding: utf-8 -*-
+r"""
+
+This module contains a **Mu**\ lti\ **C**\ onfusion **M**\ Matrix **B**\ osting (**CuMBo**)
+estimator for classification implemented in the ``MuCumboClassifier`` class.
+"""
+
+# Université d'Aix Marseille (AMU) -
+# Centre National de la Recherche Scientifique (CNRS) -
+# Université de Toulon (UTLN).
+# Copyright © 2017-2018 AMU, CNRS, UTLN
+#
+# This file is part of multimodalboost.
+#
+# multimodalboost is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# multimodalboost is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with multiconfusion. If not, see <http://www.gnu.org/licenses/>.
+#
+# Author: Dominique Benielli- Laboratoire d'Informatique et Systèmes - UMR 7020
+
+# The implementation of the MuCumboClassifier in this module used the code of
+# sklearn.ensemble.AdaBoostClassifier as a model and tried to use the same
+# structure, notations and behavior where possible.
+
+import numpy as np
+from sklearn.base import ClassifierMixin
+from sklearn.ensemble import BaseEnsemble
+from sklearn.ensemble.forest import BaseForest
+from sklearn.metrics import accuracy_score
+from sklearn.tree import DecisionTreeClassifier
+from sklearn.tree._tree import DTYPE
+from sklearn.tree.tree import BaseDecisionTree
+from sklearn.utils import check_array, check_X_y, check_random_state
+from sklearn.utils.multiclass import check_classification_targets
+from sklearn.utils.validation import check_is_fitted, has_fit_parameter
+from cvxopt import solvers, matrix, spdiag, exp, spmatrix, mul, div
+from multimodal.datasets.data_sample import Metriclearn_array
+import warnings
+
+
+class MuCumboClassifier(BaseEnsemble, ClassifierMixin):
+    r"""It then iterates the process on the same dataset but where the weights of
+    incorrectly classified instances are adjusted such that subsequent
+    classifiers focus more on difficult cases.
+    A MuCoMBo classifier.
+
+    A MuMBo classifier is a meta-estimator that implements a multimodal
+    (or multi-view) boosting algorithm:
+
+    It fits a set of classifiers on the original dataset splitted into several
+    views and retains the classifier obtained for the best view.
+
+    This class implements the MuMBo algorithm [1]_.
+
+    Parameters
+    ----------
+    base_estimator : object, optional (default=DecisionTreeClassifier)
+        Base estimator from which the boosted ensemble is built.
+        Support for sample weighting is required, as well as proper `classes_`
+        and `n_classes_` attributes. The default is a DecisionTreeClassifier
+        with parameter ``max_depth=1``.
+
+    n_estimators : integer, optional (default=50)
+        Maximum number of estimators at which boosting is terminated.
+
+    random_state : int, RandomState instance or None, optional (default=None)
+        If int, random_state is the seed used by the random number generator;
+        If RandomState instance, random_state is the random number generator;
+        If None, the random number generator is the RandomState instance used
+        by `np.random`.
+
+    best_view_mode : {"edge", "error"}, optional (default="edge")
+        Mode used to select the best view at each iteration:
+
+        - if ``best_view_mode == "edge"``, the best view is the view maximizing
+          the edge value (variable δ (*delta*) in [1]_),
+        - if ``best_view_mode == "error"``, the best view is the view
+          minimizing the classification error.
+
+    Attributes
+    ----------
+    estimators\_ : list of classifiers
+        Collection of fitted sub-estimators.
+
+    classes\_ : numpy.ndarray, shape = (n_classes,)
+        Classes labels.
+
+    n_classes\_ : int
+        Number of classes.
+
+    n_views\_ : int
+        Number of views
+
+    estimator_weights\_ : numpy.ndarray of floats, shape = (len(estimators\_),)
+        Weights for each estimator in the boosted ensemble.
+
+    estimator_errors_ : array of floats
+        Empirical loss for each iteration.
+
+
+    best_views\_ : numpy.ndarray of integers, shape = (len(estimators\_),)
+        Indices of the best view for each estimator in the boosted ensemble.
+
+    n_yi : numpy ndarray of int contains number of train sample for each classe shape (n_classes,)
+
+    Examples
+    --------
+    >>> from multiconfusion.cumbo import MuCumboClassifier
+    >>> from sklearn.datasets import load_iris
+    >>> X, y = load_iris(return_X_y=True)
+    >>> views_ind = [0, 2, 4]  # view 0: sepal data, view 1: petal data
+    >>> clf = MuCumboClassifier(random_state=0)
+    >>> clf.fit(X, y, views_ind)  # doctest: +NORMALIZE_WHITESPACE
+    MumboClassifier(base_estimator=None, best_view_mode='edge',
+        n_estimators=50, random_state=0)
+    >>> print(clf.predict([[ 5.,  3.,  1.,  1.]]))
+    [1]
+    >>> views_ind = [[0, 2], [1, 3]]  # view 0: length data, view 1: width data
+    >>> clf = MuCumboClassifier(random_state=0)
+    >>> clf.fit(X, y, views_ind)  # doctest: +NORMALIZE_WHITESPACE
+    MumboClassifier(base_estimator=None, best_view_mode='edge',
+        n_estimators=50, random_state=0)
+    >>> print(clf.predict([[ 5.,  3.,  1.,  1.]]))
+    [1]
+
+    >>> from sklearn.tree import DecisionTreeClassifier
+    >>> base_estimator = DecisionTreeClassifier(max_depth=2)
+    >>> clf = MuCumboClassifier(base_estimator=base_estimator, random_state=0)
+    >>> clf.fit(X, y, views_ind)  # doctest: +NORMALIZE_WHITESPACE
+    MumboClassifier(base_estimator=DecisionTreeClassifier(class_weight=None,
+            criterion='gini', max_depth=2, max_features=None,
+            max_leaf_nodes=None, min_impurity_decrease=0.0,
+            min_impurity_split=None, min_samples_leaf=1, min_samples_split=2,
+            min_weight_fraction_leaf=0.0, presort=False, random_state=None,
+            splitter='best'),
+        best_view_mode='edge', n_estimators=50, random_state=0)
+    >>> print(clf.predict([[ 5.,  3.,  1.,  1.]]))
+    [1]
+
+    See also
+    --------
+    sklearn.ensemble.AdaBoostClassifier,
+    sklearn.ensemble.GradientBoostingClassifier,
+    sklearn.tree.DecisionTreeClassifier
+
+    References
+    ----------
+
+    .. [1] Ko\c{c}o, Sokol and Capponi, C{\'e}cile
+           A Boosting Approach to Multiview Classification with Cooperation,
+           2011,Proceedings of the 2011 European Conference on Machine Learning
+           and Knowledge Discovery in Databases - Volume Part II, 209--228 Springer-Verlag
+           https://link.springer.com/chapter/10.1007/978-3-642-23783-6_1
+
+    .. [2] Sokol Koço,
+           "Tackling the uneven views problem with cooperation based ensemble
+           learning methods",
+           PhD Thesis, Aix-Marseille Université, 2013,
+           http://www.theses.fr/en/2013AIXM4101.
+    """
+
+    def __init__(self,
+                 base_estimator=None,
+                 n_estimators=50,
+                 random_state=None): # n_estimators=50,
+        super(MuCumboClassifier, self).__init__(
+            base_estimator=base_estimator,
+            n_estimators=n_estimators)
+        self.random_state = random_state
+        # self.best_view_mode = self._validate_best_view_mode(best_view_mode)
+
+
+    def _validate_estimator(self):
+        """Check the estimator and set the base_estimator_ attribute."""
+        super(MuCumboClassifier, self)._validate_estimator(
+            default=DecisionTreeClassifier(max_depth=1))
+
+        if not has_fit_parameter(self.base_estimator_, "sample_weight"):
+            raise ValueError("%s doesn't support sample_weight."
+                             % self.base_estimator_.__class__.__name__)
+
+    def _validate_X_predict(self, X):
+        """Ensure that X is in the proper format."""
+        if (self.base_estimator is None or
+                isinstance(self.base_estimator,
+                           (BaseDecisionTree, BaseForest))):
+            X = check_array(X, accept_sparse='csr', dtype=DTYPE)
+        else:
+            X = check_array(X, accept_sparse=['csr', 'csc'])
+        if X.shape[1] != self.n_features_:
+            raise ValueError("X doesn't contain the right number of features.")
+        return X
+
+    def _extract_view(self, X, ind_view):
+        """Extract the view for the given index ind_view from the dataset X."""
+        if self.view_mode_ == "indices":
+            return X[:, self.views_ind_[ind_view]]
+        else:
+            return X[:, self.views_ind_[ind_view]:self.views_ind_[ind_view+1]]
+
+    def _compute_predictions(self, X):
+        """Compute predictions for all the stored estimators on the data X."""
+        n_samples = X.shape[0]
+        n_estimators = len(self.estimators_)
+        predictions = np.zeros((n_samples, n_estimators), dtype=np.int64)
+        for ind_estimator, estimator in enumerate(self.estimators_):
+            # no best view in mucumbo but all view
+            # ind_view = self.best_views_[ind_estimator]
+            ind_view = ind_estimator % self.n_views_
+            predictions[:, ind_estimator] \
+                = estimator.predict(self._extract_view(X, ind_view))
+        return predictions
+
+    def _validate_views_ind(self, views_ind, n_features):
+        """Ensure proper format for views_ind and return number of views."""
+        views_ind = np.array(views_ind)
+        if np.issubdtype(views_ind.dtype, np.integer) and views_ind.ndim == 1:
+            if np.any(views_ind[:-1] >= views_ind[1:]):
+                raise ValueError("Values in views_ind must be sorted.")
+            if views_ind[0] < 0 or views_ind[-1] > n_features:
+                raise ValueError("Values in views_ind are not in a correct "
+                                 + "range for the provided data.")
+            self.view_mode_ = "slices"
+            n_views = views_ind.shape[0]-1
+        else:
+            if views_ind.ndim == 1:
+                if not views_ind.dtype == np.object:
+                    raise ValueError("The format of views_ind is not "
+                                     + "supported.")
+                for ind, val in enumerate(views_ind):
+                    views_ind[ind] = np.array(val)
+                    if not np.issubdtype(views_ind[ind].dtype, np.integer):
+                        raise ValueError("Values in views_ind must be "
+                                         + "integers.")
+                    if views_ind[ind].min() < 0 \
+                            or views_ind[ind].max() >= n_features:
+                        raise ValueError("Values in views_ind are not in a "
+                                         + "correct range for the provided "
+                                         + "data.")
+            elif views_ind.ndim == 2:
+                if not np.issubdtype(views_ind.dtype, np.integer):
+                    raise ValueError("Values in views_ind must be integers.")
+                if views_ind.min() < 0 or views_ind.max() >= n_features:
+                    raise ValueError("Values in views_ind are not in a "
+                                     + "correct range for the provided data.")
+            else:
+                raise ValueError("The format of views_ind is not supported.")
+            self.view_mode_ = "indices"
+            n_views = views_ind.shape[0]
+        return (views_ind, n_views)
+
+    # def _validate_best_view_mode(self, best_view_mode):
+    #     """Ensure that best_view_mode has a proper value."""
+    #     if best_view_mode not in ("edge", "error"):
+    #         raise ValueError('best_view_mode value must be either "edge" '
+    #                          + 'or "error"')
+    #     return best_view_mode
+
+    def _init_var(self, n_views, y):
+        "Create and initialize the variables used by the MuMBo algorithm."
+        n_classes = self.n_classes_
+        n_samples = y.shape[0]
+        # n_yi = np.unique(y, return_inverse=True)
+        cost = np.ones((n_views, n_samples, n_classes))
+        score_function = np.zeros((n_views, n_samples, n_classes))
+        n_yi_s = np.zeros(n_classes, dtype=np.int)
+        for indice_class in range(n_classes):
+            # n_yi number of examples of the class y_i
+            n_yi = np.where(y==indice_class)[0].shape[0]
+            n_yi_s[indice_class] = int(n_yi)
+            cost[:, :, indice_class] /=   n_yi
+        cost[:, np.arange(n_samples), y] *= -(n_classes-1)
+        # not necessary in mucombo
+        # cost_global = np.ones((n_samples, n_classes))
+        # cost_global[np.arange(n_samples), y] = -(n_classes-1)
+        label_score = np.zeros((n_views, n_samples, n_classes))
+        label_score_global = np.zeros((n_samples, n_classes))
+        predicted_classes = np.empty((n_views, n_samples), dtype=np.int64)
+        beta_class = np.ones((n_views, n_classes)) / n_classes
+        return (cost, label_score, label_score_global, predicted_classes,
+                score_function, beta_class, n_yi_s)
+
+    # def _compute_edge_global(self, cost_global, predicted_classes, y):
+    #     """Compute edge values for the global cost matrix."""
+    #     n_samples = y.shape[0]
+    #     edge_global = - np.sum(
+    #         cost_global[np.arange(n_samples), predicted_classes], axis=1) \
+    #         / (np.sum(cost_global)
+    #            - np.sum(cost_global[np.arange(n_samples), y]))
+    #     return edge_global
+
+    def _compute_dist(self, cost, y):
+        """Compute the sample distribution (i.e. the weights to use)."""
+        n_samples = y.shape[0]
+        # dist is forced to be c-contiguous so that sub-arrays of dist used
+        # as weights for the weak classifiers are also c-contiguous, which is
+        # required by some scikit-learn classifiers (for example
+        # sklearn.svm.SVC)
+        dist = np.empty(cost.shape[:2], dtype=cost.dtype, order="C")
+        # NOTE: In Sokol's PhD thesis, the formula for dist is mistakenly given
+        # with a minus sign in section 2.2.2 page 31
+        dist[:, :] = cost[:, np.arange(n_samples), y] \
+            / np.sum(cost[:, np.arange(n_samples), y], axis=1)[:, np.newaxis]
+        return dist
+
+    # def _compute_coop_coef(self, predicted_classes, y):
+    #     """Compute the cooperation coefficients."""
+    #     coop_coef = np.zeros(predicted_classes.shape)
+    #     coop_coef[predicted_classes == y] = 1.
+    #     coop_coef[:, np.logical_not(coop_coef.any(axis=0))] = 1.
+    #     return coop_coef
+
+    def _indicatrice(self, predicted_classes, y_i):
+        n_samples = y_i.shape[0]
+        indicate_ones = np.zeros((self.n_views_, n_samples, self.n_classes_), dtype=np.int)
+        indicatrice_one_yi = np.zeros((self.n_views_, n_samples, self.n_classes_), dtype=np.int)
+        indicate_ones[np.arange(self.n_views_)[:, np.newaxis],
+                    np.arange(n_samples)[np.newaxis, :],
+                    predicted_classes[np.arange(self.n_views_), :]] = 1
+        indicate_ones[:, np.arange(n_samples), y_i] = 0
+        indicatrice_one_yi[:, np.arange(n_samples), y_i] = 1
+        delta = np.ones((self.n_views_, n_samples, self.n_classes_), dtype=np.int)
+        delta[:, np.arange(n_samples), y_i] = -1
+        # indic_minus_one = np.where(np.arange(self.n_classes_) == y)
+        return indicate_ones, indicatrice_one_yi, delta
+
+    def _compute_edges(self, cost, predicted_classes, y):
+        """Compute edge values for the cost matrices for all the views."""
+        n_views = predicted_classes.shape[0]
+        n_samples = y.shape[0]
+        edges = - np.sum(
+            cost[np.arange(n_views)[:, np.newaxis],
+                 np.arange(n_samples)[np.newaxis, :],
+                 predicted_classes[np.arange(n_views), :]], axis=1) \
+            / (np.sum(cost, axis=(1, 2))
+               - np.sum(cost[:, np.arange(n_samples), y], axis=1))
+        return edges
+
+    def _compute_alphas(self, edges):
+        """Compute values of confidence rate alpha given edge values."""
+        np.where(edges > 1.0, edges, 1.0)
+        alphas = 0.5 * np.log((1. + edges) / (1. - edges))
+        if np.any(np.isinf(alphas)) or np.any(np.isnan(alphas)):
+
+            alphas[np.where(np.isnan(alphas))[0]] = 1.0
+            alphas[np.where(np.isinf(alphas))[0]] = 1.0
+        return alphas
+
+    def _compute_cost(self, label_score, predicted_classes, y, alphas, betas,
+                      use_coop_coef=True):
+        """Update label_score and compute the cost matrices for all views."""
+        # use_coop_coef is a boolean parameter used to choose if the
+        # cooperation coefficients are computed and taken into account when
+        # updating the cost matrices.
+        # It is introduced here for future explorations.
+        n_views = predicted_classes.shape[0]
+        n_samples = y.shape[0]
+        if use_coop_coef:
+            # coop_coef = self._compute_coop_coef(predicted_classes, y)
+
+            # ajout mucumbo verifier les dim
+            # ????? coop_cof_beta = betas[predicted_classes]
+            increment = alphas[:, np.newaxis, np.newaxis] * betas[:, np.newaxis, :]
+            increment = np.tile(increment,(1, n_samples, 1))
+        else:
+            increment = np.tile(alphas[:, np.newaxis, np.newaxis], (1, n_samples, self.n_classes_))
+        label_score[np.arange(n_views)[:, np.newaxis],
+                    np.arange(n_samples)[np.newaxis, :],
+                    predicted_classes[np.arange(n_views), :]] += increment[np.arange(n_views)[:, np.newaxis],
+                                                                           np.arange(n_samples)[np.newaxis, :] ,
+                                                                           predicted_classes[np.arange(n_views), :]]
+        cost = np.exp(
+            label_score
+            - label_score[:, np.arange(n_samples), y][:, :, np.newaxis]) / self.n_yi[np.newaxis, np.newaxis, :]
+        score_function_dif = np.exp(
+            label_score
+            - label_score[:, np.arange(n_samples), y][:, :, np.newaxis]) / self.n_yi[np.newaxis, np.newaxis, :]
+        cost[:, np.arange(n_samples), y] -= np.sum(cost, axis=2)
+        return (cost, label_score, score_function_dif)
+
+    def _prepare_beta_solver(self):
+        view = self.n_views_
+        m = self.n_classes_
+        A = matrix(0.0, (view, m * view))
+        one_vector = np.ones((m))
+        for v in range(view):
+            A[v, v*m : (v*m) +m] = 1
+        b = matrix(1.0, (view,1))
+        l={'l': 2*view*m}
+        G = matrix(0.0, (2*m * view, m * view))
+        one_diag_matrix = matrix(1.0, (m*view,1))
+        G_1 = spdiag(one_diag_matrix)
+        G[0:m * view, :] = G_1
+        G[m* view:2* m * view, :] = -1.0* G_1
+        h = matrix(0.0, (2*m*view,1))
+        h[0:m*view] = 1.0
+        return A, b, G, h, l
+
+    def _compute_betas(self, alphas, y, score_function_dif_Tminus1, predicted_classes):
+        """
+        minimization of
+        :math:` argmin on /beta_{t,c} sum_{v,i,c!=y_i}{frac{1}{n_y_i} cost_{t-1} exp{/apha_{v} \beta_{c}^{b}'
+
+        Parameters
+        ----------
+        edges : array-like
+        alphas
+        y
+        estimators
+
+        Returns
+        -------
+        betas arrays
+        """
+        # delta = self.delta_c_yi(predicted_classes, y)
+        indicat, indicate_yi, delta = self._indicatrice(predicted_classes, y)
+        delta_vue = np.block(np.split(delta, self.n_views_, axis=0)).squeeze()
+        indicate_vue = np.block(np.split(indicat, self.n_views_, axis=0)).squeeze()
+        indicate_vue_yi = np.block(np.split(indicate_yi, self.n_views_, axis=0)).squeeze()
+        score_function_Tminus1_vue = np.block(np.split(score_function_dif_Tminus1, self.n_views_, axis=0)).squeeze()
+        A, b, G, h, l = self._prepare_beta_solver()
+        solver = self._solver_cp_forbeta(alphas, indicate_vue, indicate_vue_yi, delta_vue, score_function_Tminus1_vue, A, b, G, h, l)
+        betas = np.array(solver)
+        betas = betas.reshape((self.n_views_, self.n_classes_))
+        return betas
+
+    def _solver_cp_forbeta(self, alphas, indicate_vue, indicate_vue_yi, delta_vue, score_function_dif_Tminus1, A, b, G, h, l):
+        solvers.options['show_progress'] = False
+        n_view = self.n_views_
+        m = self.n_classes_
+        coef = 1.0/np.tile(self.n_yi, self.n_views_).squeeze() * score_function_dif_Tminus1
+        zeta_v =  np.repeat(alphas, self.n_classes_) * indicate_vue * delta_vue
+        zeta_v_yi = np.repeat(alphas, self.n_classes_) * indicate_vue_yi * delta_vue
+        zeta = zeta_v + zeta_v_yi
+        zeta2 = zeta**2
+        def F(x=None, z=None):
+            if x is None:
+                # l'algorithme fonctionne de manière itérative
+                # il faut choisir un x initial, c'est ce qu'on fait ici
+                return 0, matrix(1.0, (n_view*m, 1))
+            if min(x) < 0.0:
+                return None   # cas impossible
+            # ici commence le code qui définit ce qu'est une itération
+            f = sum(matrix(coef * exp( matrix(zeta * x.T))   ))
+            Df = matrix(np.sum( zeta * coef * exp(matrix( zeta * x.T ) ), axis=0 )).T # -(x**-1).T
+            if z is None: return f, Df
+            H = spdiag(z[0] * matrix(np.sum(coef * zeta2 * exp( matrix(zeta* x.T) ), axis= 0))) ## beta**(-2))
+            return f, Df, H
+        try:
+            solver = solvers.cp(F, A=A, b=b, G=G, h=h, dim={'l':2*n_view*m})['x']
+        except ValueError or ArithmeticError or OverflowError as e:
+            norm = np.sum(1.0/self.n_yi)
+            yi_norm = self.n_yi * (norm )
+            solver = matrix(1.0/np.tile(yi_norm, n_view).squeeze(), (n_view * m, 1))
+            print("Value Error on the evaluation on beta coefficient %s "% e)
+        return solver
+
+    def fit(self, X, y, views_ind=None):
+        """Build a multimodal boosted classifier from the training set (X, y).
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix}, shape = (n_samples, n_features)
+            Training multi-view input samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+
+        y : array-like, shape = (n_samples,)
+            Target values (class labels).
+
+        views_ind : array-like (default=[0, n_features//2, n_features])
+            Paramater specifying how to extract the data views from X:
+
+            - If views_ind is a 1-D array of sorted integers, the entries
+              indicate the limits of the slices used to extract the views,
+              where view ``n`` is given by
+              ``X[:, views_ind[n]:views_ind[n+1]]``.
+
+              With this convention each view is therefore a view (in the NumPy
+              sense) of X and no copy of the data is done.
+
+            - If views_ind is an array of arrays of integers, then each array
+              of integers ``views_ind[n]`` specifies the indices of the view
+              ``n``, which is then given by ``X[:, views_ind[n]]``.
+
+              With this convention each view creates therefore a partial copy
+              of the data in X. This convention is thus more flexible but less
+              efficient than the previous one.
+
+        Returns
+        -------
+        self : object
+            Returns self.
+
+        Raises
+        ------
+        ValueError  estimator must support sample_weight
+
+        ValueError where `X` and `view_ind` are not compatibles
+        """
+        warnings.filterwarnings("ignore")
+        if (self.base_estimator is None or
+                isinstance(self.base_estimator, (BaseDecisionTree,
+                                                 BaseForest))):
+            dtype = DTYPE
+            accept_sparse = 'csc'
+        else:
+            dtype = None
+            accept_sparse = ['csr', 'csc']
+        X, y = check_X_y(X, y, accept_sparse=accept_sparse, dtype=dtype)
+        check_classification_targets(y)
+        self._validate_estimator()
+        if views_ind is None:
+            if X.shape[1] > 1:
+                views_ind = np.array([0, X.shape[1]//2, X.shape[1]])
+            else:
+                views_ind = np.array([0, X.shape[1]])
+        self.X_ = Metriclearn_array(X, view_ind=views_ind)
+        self.views_ind_, n_views = self._validate_views_ind(views_ind,
+                                                            X.shape[1])
+        self.n_iterations_ = self.n_estimators // n_views
+        self.classes_, y = np.unique(y, return_inverse=True)
+        self.n_classes_ = len(self.classes_)
+        self.n_views_ = n_views
+        self.n_features_ = X.shape[1]
+        if self.n_classes_ == 1:
+            # This case would lead to division by 0 when computing the cost
+            # matrix so it needs special handling (but it is an obvious case as
+            # there is only one single class in the data).
+            self.estimators_ = []
+            self.estimator_weights_alpha_ = np.array([], dtype=np.float64)
+            self.estimator_weights_beta_ = np.zeros((self.n_iterations_, n_views), dtype=np.float)
+            self.estimator_errors_ = np.array([], dtype=np.float64)
+            return
+        # probablement la list de  h de t global  que l'on a a la fin
+        self.estimators_ = []
+        # modification mu cumbo
+        # mettre deux dim sur n_estimators * n_views
+        self.estimator_weights_alpha_ = np.zeros((self.n_iterations_, n_views), dtype=np.float64)
+        self.estimator_weights_beta_ = np.zeros((self.n_iterations_, n_views, self.n_classes_), dtype=np.float)
+        self.estimator_errors_ = np.zeros((n_views, self.n_iterations_), dtype=np.float64)
+
+        random_state = check_random_state(self.random_state)
+        (cost, label_score, label_score_global,
+         predicted_classes, score_function_dif, betas, n_yi) = self._init_var(n_views, y)
+        self.n_yi = n_yi
+        for current_iteration in range(self.n_iterations_):
+            # list de h pris a l'etape t
+            dist = self._compute_dist(cost, y)
+            # get h_t _i  with edges delta
+            for ind_view in range(n_views):
+                estimator = self._make_estimator(append=False,
+                                                 random_state=random_state)
+                estimator.fit(self._extract_view(X, ind_view), y,
+                              sample_weight=dist[ind_view, :])
+                predicted_classes[ind_view, :] = estimator.predict(
+                    self._extract_view(X, ind_view))
+                self.estimators_.append(estimator)
+
+            # fin de choose cost matrix
+            #   TO DO estimator_errors_ estimate
+            ###########################################
+
+
+            #############self.estimator_errors_[current_iteration] = to do
+            # update C_t de g
+
+            edges = self._compute_edges(cost, predicted_classes, y)
+            alphas = self._compute_alphas(edges)
+            # modif mu cumbo
+            self.estimator_weights_alpha_[current_iteration, :] = alphas
+
+            betas = self._compute_betas(alphas, y, score_function_dif, predicted_classes)
+            self.estimator_weights_beta_[current_iteration, :, :] = betas
+            # update cost matrices C_t_j ...
+            cost, label_score, score_function_dif = self._compute_cost(
+                label_score, predicted_classes, y, alphas, betas, True)
+        return self
+
+    def decision_function(self, X):
+        """Compute the decision function of X.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix}, shape = (n_samples, n_features)
+            Multi-view input samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+
+        Returns
+        -------
+        dec_fun : numpy.ndarray, shape = (n_view, n_samples, k)
+            Decision function of the input samples.
+            The order of outputs is the same of that of the `classes_`
+            attribute.
+            Binary classification is a special cases with ``k == 1``,
+            otherwise ``k == n_classes``. For binary classification,
+            values <=0 mean classification in the first class in ``classes_``
+            and values >0 mean classification in the second class in
+            ``classes_``.
+        """
+        check_is_fitted(self, ("estimators_", "estimator_weights_alpha_","n_views_",
+                               "estimator_weights_beta_", "n_classes_", "views_ind_"))
+        X = self._validate_X_predict(X)
+
+        n_samples = X.shape[0]
+        n_estimators = len(self.estimators_)
+        n_classes = self.n_classes_
+        n_iterations = self.n_iterations_
+        predictions = self._compute_predictions(X)
+        n_views = self.n_views_
+
+        dec_func = np.zeros((n_samples, n_classes))
+        # update muCombo
+        # for ind_estimator in range(n_estimators):
+        for ind_estimator in range(n_estimators):
+            ind_iteration = ind_estimator // self.n_views_
+            current_vue = ind_estimator % self.n_views_
+            vector_classes = predictions[:, ind_estimator]
+            dec_func[np.arange(n_samples), vector_classes] \
+                += (self.estimator_weights_alpha_[ind_iteration, current_vue, np.newaxis] * \
+                   self.estimator_weights_beta_[ind_iteration, current_vue,  vector_classes])
+
+        if n_classes == 2:
+            dec_func[:, 0] *= -1
+            return np.sum(dec_func, axis=1)
+
+        return dec_func
+
+    def staged_decision_function(self, X):
+        """Compute decision function of X for each boosting iteration.
+
+        This method allows monitoring (i.e. determine error on testing set)
+        after each boosting iteration.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix}, shape = (n_samples, n_features)
+            Multi-view input samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+
+        Returns
+        -------
+        dec_fun : generator of numpy.ndarrays, shape = (n_samples, k)
+            Decision function of the input samples.
+            The order of outputs is the same of that of the `classes_`
+            attribute.
+            Binary classification is a special cases with ``k == 1``,
+            otherwise ``k==n_classes``. For binary classification,
+            values <=0 mean classification in the first class in ``classes_``
+            and values >0 mean classification in the second class in
+            ``classes_``.
+        """
+        check_is_fitted(self, ("estimators_", "estimator_weights_alpha_","n_views_",
+                               "estimator_weights_beta_", "n_classes_", "views_ind_"))
+        X = self._validate_X_predict(X)
+
+        n_samples = X.shape[0]
+        n_stage = len(self.estimators_)
+        n_classes = self.n_classes_
+        n_views = self.n_views_
+        predictions = self._compute_predictions(X)
+
+        dec_func = np.zeros((n_samples, n_classes))
+        for ind_e in range(n_stage):
+            vector_classes = predictions[:, ind_e]
+            current_vue = ind_e % self.n_views_
+            ind_iteration = ind_e // self.n_views_
+            dec_func[np.arange(n_samples), vector_classes] \
+                += (self.estimator_weights_alpha_[ind_iteration, current_vue, np.newaxis] * \
+                   self.estimator_weights_beta_[ind_iteration, current_vue,  vector_classes])
+            if n_classes == 2:
+                tmp_dec_func = np.array(dec_func)
+                tmp_dec_func[ :, 0] *= -1
+                yield tmp_dec_func.sum(axis=1)
+
+            else:
+                yield np.array(dec_func)
+
+    def predict(self, X):
+        """Predict classes for X.
+
+        The predicted class of an input sample is computed as the weighted mean
+        prediction of the classifiers in the ensemble.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix}, shape = (n_samples, n_features)
+            Multi-view input samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+
+        Returns
+        -------
+        y : numpy.ndarray, shape = (n_samples,)
+            Predicted classes.
+
+        Raises
+        ------
+        ValueError   'X' input matrix must be have the same total number of features
+                     of 'X' fit data
+        """
+        pred = self.decision_function(X)
+
+        if self.n_classes_ == 2:
+            return self.classes_.take(pred > 0, axis=0)
+
+        return self.classes_.take(np.argmax(pred, axis=1), axis=0)
+
+    def staged_predict(self, X):
+        """Return staged predictions for X.
+
+        The predicted class of an input sample is computed as the weighted mean
+        prediction of the classifiers in the ensemble.
+
+        This generator method yields the ensemble prediction after each
+        iteration of boosting and therefore allows monitoring, such as to
+        determine the prediction on a test set after each boost.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix} of shape = (n_samples, n_features)
+            Multi-view input samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+
+        Returns
+        -------
+        y : generator of numpy.ndarrays, shape = (n_samples,)
+            Predicted classes.
+        """
+        n_classes = self.n_classes_
+        classes = self.classes_
+
+        if n_classes == 2:
+            for pred in self.staged_decision_function(X):
+                yield np.array(classes.take(pred > 0, axis=0))
+        else:
+            for pred in self.staged_decision_function(X):
+                yield np.array(classes.take(np.argmax(pred, axis=1), axis=0))
+
+    def score(self, X, y):
+        """Return the mean accuracy on the given test data and labels.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix} of shape = (n_samples, n_features)
+            Multi-view test samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+        y : array-like, shape = (n_samples,)
+            True labels for X.
+
+        Returns
+        -------
+        score : float
+            Mean accuracy of self.predict(X) wrt. y.
+        """
+        return super(MuCumboClassifier, self).score(X, y)
+
+    def staged_score(self, X, y):
+        """Return staged mean accuracy on the given test data and labels.
+
+        This generator method yields the ensemble score after each iteration of
+        boosting and therefore allows monitoring, such as to determine the
+        score on a test set after each boost.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix} of shape = (n_samples, n_features)
+            Multi-view test samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+        y : array-like, shape = (n_samples,)
+            True labels for X.
+
+        Returns
+        -------
+        score : generator of floats
+            Mean accuracy of self.staged_predict(X) wrt. y.
+        """
+        for y_pred in self.staged_predict(X):
+            yield accuracy_score(y, y_pred)
diff --git a/multimodal/boosting/mumbo.py b/multimodal/boosting/mumbo.py
new file mode 100644
index 0000000000000000000000000000000000000000..fd7bd42e2e3bac4c1706faac4feae1ad92498dff
--- /dev/null
+++ b/multimodal/boosting/mumbo.py
@@ -0,0 +1,681 @@
+# -*- coding: utf-8 -*-
+r"""Multimodal Boosting
+
+This module contains a **Mu**\ lti\ **M**\ odal **Bo**\ osting (**MuMBo**)
+estimator for classification implemented in the ``MumboClassifier`` class.
+"""
+
+# Université d'Aix Marseille (AMU) -
+# Centre National de la Recherche Scientifique (CNRS) -
+# Université de Toulon (UTLN).
+# Copyright © 2017-2018 AMU, CNRS, UTLN
+#
+# This file is part of multimodalboost.
+#
+# multimodalboost is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# multimodalboost is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with multimodalboost. If not, see <http://www.gnu.org/licenses/>.
+#
+# Author: Florent JAILLET - Laboratoire d'Informatique et Systèmes - UMR 7020
+
+# The implementation of the MumboClassifier in this module used the code of
+# sklearn.ensemble.AdaBoostClassifier as a model and tried to use the same
+# structure, notations and behavior where possible.
+
+import numpy as np
+from sklearn.base import ClassifierMixin
+from sklearn.ensemble import BaseEnsemble
+from sklearn.ensemble.forest import BaseForest
+from sklearn.metrics import accuracy_score
+from sklearn.tree import DecisionTreeClassifier
+from sklearn.tree.tree import BaseDecisionTree
+from sklearn.tree._tree import DTYPE
+from sklearn.utils import check_array, check_X_y, check_random_state
+from sklearn.utils.multiclass import check_classification_targets
+from sklearn.utils.validation import check_is_fitted, has_fit_parameter
+
+
+class MumboClassifier(BaseEnsemble, ClassifierMixin):
+    r"""It then iterates the process on the same dataset but where the weights of
+    incorrectly classified instances are adjusted such that subsequent
+    classifiers focus more on difficult cases.
+    A MuMBo classifier.
+
+    A MuMBo classifier is a meta-estimator that implements a multimodal
+    (or multi-view) boosting algorithm:
+
+    It fits a set of classifiers on the original dataset splitted into several
+    views and retains the classifier obtained for the best view.
+
+    This class implements the MuMBo algorithm [1]_.
+
+    Parameters
+    ----------
+    base_estimator : object, optional (default=DecisionTreeClassifier)
+        Base estimator from which the boosted ensemble is built.
+        Support for sample weighting is required, as well as proper `classes_`
+        and `n_classes_` attributes. The default is a DecisionTreeClassifier
+        with parameter ``max_depth=1``.
+
+    n_estimators : integer, optional (default=50)
+        Maximum number of estimators at which boosting is terminated.
+
+    random_state : int, RandomState instance or None, optional (default=None)
+        If int, random_state is the seed used by the random number generator;
+        If RandomState instance, random_state is the random number generator;
+        If None, the random number generator is the RandomState instance used
+        by `np.random`.
+
+    best_view_mode : {"edge", "error"}, optional (default="edge")
+        Mode used to select the best view at each iteration:
+
+        - if ``best_view_mode == "edge"``, the best view is the view maximizing
+          the edge value (variable δ (*delta*) in [1]_),
+        - if ``best_view_mode == "error"``, the best view is the view
+          minimizing the classification error.
+
+    Attributes
+    ----------
+    estimators\_ : list of classifiers
+        Collection of fitted sub-estimators.
+
+    classes\_ : numpy.ndarray, shape = (n_classes,)
+        Classes labels.
+
+    n_classes\_ : int
+        Number of classes.
+
+    estimator_weights\_ : numpy.ndarray of floats, shape = (len(estimators\_),)
+        Weights for each estimator in the boosted ensemble.
+
+    estimator_errors_ : array of floats
+        Empirical loss for each iteration.
+
+
+    best_views\_ : numpy.ndarray of integers, shape = (len(estimators\_),)
+        Indices of the best view for each estimator in the boosted ensemble.
+
+    Examples
+    --------
+    >>> from multimodalboost.mumbo import MumboClassifier
+    >>> from sklearn.datasets import load_iris
+    >>> X, y = load_iris(return_X_y=True)
+    >>> views_ind = [0, 2, 4]  # view 0: sepal data, view 1: petal data
+    >>> clf = MumboClassifier(random_state=0)
+    >>> clf.fit(X, y, views_ind)  # doctest: +NORMALIZE_WHITESPACE
+    MumboClassifier(base_estimator=None, best_view_mode='edge',
+        n_estimators=50, random_state=0)
+    >>> print(clf.predict([[ 5.,  3.,  1.,  1.]]))
+    [1]
+    >>> views_ind = [[0, 2], [1, 3]]  # view 0: length data, view 1: width data
+    >>> clf = MumboClassifier(random_state=0)
+    >>> clf.fit(X, y, views_ind)  # doctest: +NORMALIZE_WHITESPACE
+    MumboClassifier(base_estimator=None, best_view_mode='edge',
+        n_estimators=50, random_state=0)
+    >>> print(clf.predict([[ 5.,  3.,  1.,  1.]]))
+    [1]
+
+    >>> from sklearn.tree import DecisionTreeClassifier
+    >>> base_estimator = DecisionTreeClassifier(max_depth=2)
+    >>> clf = MumboClassifier(base_estimator=base_estimator, random_state=0)
+    >>> clf.fit(X, y, views_ind)  # doctest: +NORMALIZE_WHITESPACE
+    MumboClassifier(base_estimator=DecisionTreeClassifier(class_weight=None,
+            criterion='gini', max_depth=2, max_features=None,
+            max_leaf_nodes=None, min_impurity_decrease=0.0,
+            min_impurity_split=None, min_samples_leaf=1, min_samples_split=2,
+            min_weight_fraction_leaf=0.0, presort=False, random_state=None,
+            splitter='best'),
+        best_view_mode='edge', n_estimators=50, random_state=0)
+    >>> print(clf.predict([[ 5.,  3.,  1.,  1.]]))
+    [1]
+
+    See also
+    --------
+    sklearn.ensemble.AdaBoostClassifier,
+    sklearn.ensemble.GradientBoostingClassifier,
+    sklearn.tree.DecisionTreeClassifier
+
+    References
+    ----------
+    .. [1] Sokol Koço,
+           "Tackling the uneven views problem with cooperation based ensemble
+           learning methods",
+           PhD Thesis, Aix-Marseille Université, 2013,
+           http://www.theses.fr/en/2013AIXM4101.
+    """
+
+    def __init__(self,
+                 base_estimator=None,
+                 n_estimators=50,
+                 random_state=None,
+                 best_view_mode="edge"):
+
+        super(MumboClassifier, self).__init__(
+            base_estimator=base_estimator,
+            n_estimators=n_estimators)
+
+        self.random_state = random_state
+        self.best_view_mode = self._validate_best_view_mode(best_view_mode)
+
+    def _validate_estimator(self):
+        """Check the estimator and set the base_estimator_ attribute."""
+        super(MumboClassifier, self)._validate_estimator(
+            default=DecisionTreeClassifier(max_depth=1))
+
+        if not has_fit_parameter(self.base_estimator_, "sample_weight"):
+            raise ValueError("%s doesn't support sample_weight."
+                             % self.base_estimator_.__class__.__name__)
+
+    def _validate_X_predict(self, X):
+        """Ensure that X is in the proper format."""
+        if (self.base_estimator is None or
+                isinstance(self.base_estimator,
+                           (BaseDecisionTree, BaseForest))):
+            X = check_array(X, accept_sparse='csr', dtype=DTYPE)
+
+        else:
+            X = check_array(X, accept_sparse=['csr', 'csc'])
+        if X.shape[1] != self.n_features_:
+            raise ValueError("X doesn't contain the right number of features.")
+        return X
+
+    def _extract_view(self, X, ind_view):
+        """Extract the view for the given index ind_view from the dataset X."""
+        if self.view_mode_ == "indices":
+            return X[:, self.views_ind_[ind_view]]
+        else:
+            return X[:, self.views_ind_[ind_view]:self.views_ind_[ind_view+1]]
+
+    def _compute_predictions(self, X):
+        """Compute predictions for all the stored estimators on the data X."""
+        n_samples = X.shape[0]
+        n_estimators = len(self.estimators_)
+        predictions = np.zeros((n_samples, n_estimators), dtype=np.int64)
+        for ind_estimator, estimator in enumerate(self.estimators_):
+            ind_view = self.best_views_[ind_estimator]
+            predictions[:, ind_estimator] \
+                = estimator.predict(self._extract_view(X, ind_view))
+        return predictions
+
+    def _validate_views_ind(self, views_ind, n_features):
+        """Ensure proper format for views_ind and return number of views."""
+        views_ind = np.array(views_ind)
+        if np.issubdtype(views_ind.dtype, np.integer) and views_ind.ndim == 1:
+            if np.any(views_ind[:-1] >= views_ind[1:]):
+                raise ValueError("Values in views_ind must be sorted.")
+            if views_ind[0] < 0 or views_ind[-1] > n_features:
+                raise ValueError("Values in views_ind are not in a correct "
+                                 + "range for the provided data.")
+            self.view_mode_ = "slices"
+            n_views = views_ind.shape[0]-1
+        else:
+            if views_ind.ndim == 1:
+                if not views_ind.dtype == np.object:
+                    raise ValueError("The format of views_ind is not "
+                                     + "supported.")
+                for ind, val in enumerate(views_ind):
+                    views_ind[ind] = np.array(val)
+                    if not np.issubdtype(views_ind[ind].dtype, np.integer):
+                        raise ValueError("Values in views_ind must be "
+                                         + "integers.")
+                    if views_ind[ind].min() < 0 \
+                            or views_ind[ind].max() >= n_features:
+                        raise ValueError("Values in views_ind are not in a "
+                                         + "correct range for the provided "
+                                         + "data.")
+            elif views_ind.ndim == 2:
+                if not np.issubdtype(views_ind.dtype, np.integer):
+                    raise ValueError("Values in views_ind must be integers.")
+                if views_ind.min() < 0 or views_ind.max() >= n_features:
+                    raise ValueError("Values in views_ind are not in a "
+                                     + "correct range for the provided data.")
+            else:
+                raise ValueError("The format of views_ind is not supported.")
+            self.view_mode_ = "indices"
+            n_views = views_ind.shape[0]
+        return (views_ind, n_views)
+
+    def _validate_best_view_mode(self, best_view_mode):
+        """Ensure that best_view_mode has a proper value."""
+        if best_view_mode not in ("edge", "error"):
+            raise ValueError('best_view_mode value must be either "edge" '
+                             + 'or "error"')
+        return best_view_mode
+
+    def _init_var(self, n_views, y):
+        "Create and initialize the variables used by the MuMBo algorithm."
+        n_classes = self.n_classes_
+        n_samples = y.shape[0]
+
+        cost = np.ones((n_views, n_samples, n_classes))
+        cost[:, np.arange(n_samples), y] = -(n_classes-1)
+
+        cost_global = np.ones((n_samples, n_classes))
+        cost_global[np.arange(n_samples), y] = -(n_classes-1)
+
+        label_score = np.zeros((n_views, n_samples, n_classes))
+
+        label_score_global = np.zeros((n_samples, n_classes))
+
+        predicted_classes = np.empty((n_views, n_samples), dtype=np.int64)
+
+        return (cost, cost_global, label_score, label_score_global,
+                predicted_classes)
+
+    def _compute_edge_global(self, cost_global, predicted_classes, y):
+        """Compute edge values for the global cost matrix."""
+        n_samples = y.shape[0]
+        edge_global = - np.sum(
+            cost_global[np.arange(n_samples), predicted_classes], axis=1) \
+            / (np.sum(cost_global)
+               - np.sum(cost_global[np.arange(n_samples), y]))
+        return edge_global
+
+    def _compute_dist(self, cost, y):
+        """Compute the sample distribution (i.e. the weights to use)."""
+        n_samples = y.shape[0]
+        # dist is forced to be c-contiguous so that sub-arrays of dist used
+        # as weights for the weak classifiers are also c-contiguous, which is
+        # required by some scikit-learn classifiers (for example
+        # sklearn.svm.SVC)
+        dist = np.empty(cost.shape[:2], dtype=cost.dtype, order="C")
+        # NOTE: In Sokol's PhD thesis, the formula for dist is mistakenly given
+        # with a minus sign in section 2.2.2 page 31
+        dist[:, :] = cost[:, np.arange(n_samples), y] \
+            / np.sum(cost[:, np.arange(n_samples), y], axis=1)[:, np.newaxis]
+        return dist
+
+    def _compute_coop_coef(self, predicted_classes, y):
+        """Compute the cooperation coefficients."""
+        coop_coef = np.zeros(predicted_classes.shape)
+        coop_coef[predicted_classes == y] = 1.
+        coop_coef[:, np.logical_not(coop_coef.any(axis=0))] = 1.
+        return coop_coef
+
+    def _compute_edges(self, cost, predicted_classes, y):
+        """Compute edge values for the cost matrices for all the views."""
+        n_views = predicted_classes.shape[0]
+        n_samples = y.shape[0]
+        edges = - np.sum(
+            cost[np.arange(n_views)[:, np.newaxis],
+                 np.arange(n_samples)[np.newaxis, :],
+                 predicted_classes[np.arange(n_views), :]], axis=1) \
+            / (np.sum(cost, axis=(1, 2))
+               - np.sum(cost[:, np.arange(n_samples), y], axis=1))
+        return edges
+
+    def _compute_alphas(self, edges):
+        """Compute values of confidence rate alpha given edge values."""
+        alphas = 0.5 * np.log((1.+edges) / (1.-edges))
+        return alphas
+
+    def _compute_cost_global(self, label_score_global, best_predicted_classes,
+                             y, alpha):
+        """Update label_score_global and compute the global cost matrix."""
+        n_samples = y.shape[0]
+        label_score_global[np.arange(n_samples), best_predicted_classes] \
+            += alpha
+        cost_global = np.exp(
+            label_score_global
+            - label_score_global[np.arange(n_samples), y][:, np.newaxis])
+        cost_global[np.arange(n_samples), y] -= np.sum(cost_global, axis=1)
+        return (cost_global, label_score_global)
+
+    def _compute_cost(self, label_score, predicted_classes, y, alphas,
+                      use_coop_coef=True):
+        """Update label_score and compute the cost matrices for all views."""
+        # use_coop_coef is a boolean parameter used to choose if the
+        # cooperation coefficients are computed and taken into account when
+        # updating the cost matrices.
+        # It is introduced here for future explorations.
+        n_views = predicted_classes.shape[0]
+        n_samples = y.shape[0]
+        if use_coop_coef:
+            coop_coef = self._compute_coop_coef(predicted_classes, y)
+            increment = alphas[:, np.newaxis] * coop_coef
+        else:
+            increment = alphas[:, np.newaxis]
+        label_score[np.arange(n_views)[:, np.newaxis],
+                    np.arange(n_samples)[np.newaxis, :],
+                    predicted_classes[np.arange(n_views), :]] += increment
+        cost = np.exp(
+            label_score
+            - label_score[:, np.arange(n_samples), y][:, :, np.newaxis])
+        cost[:, np.arange(n_samples), y] -= np.sum(cost, axis=2)
+        return (cost, label_score)
+
+    def fit(self, X, y, views_ind=None):
+        """Build a multimodal boosted classifier from the training set (X, y).
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix}, shape = (n_samples, n_features)
+            Training multi-view input samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+
+        y : array-like, shape = (n_samples,)
+            Target values (class labels).
+
+        views_ind : array-like (default=[0, n_features//2, n_features])
+            Paramater specifying how to extract the data views from X:
+
+            - If views_ind is a 1-D array of sorted integers, the entries
+              indicate the limits of the slices used to extract the views,
+              where view ``n`` is given by
+              ``X[:, views_ind[n]:views_ind[n+1]]``.
+
+              With this convention each view is therefore a view (in the NumPy
+              sense) of X and no copy of the data is done.
+
+            - If views_ind is an array of arrays of integers, then each array
+              of integers ``views_ind[n]`` specifies the indices of the view
+              ``n``, which is then given by ``X[:, views_ind[n]]``.
+
+              With this convention each view creates therefore a partial copy
+              of the data in X. This convention is thus more flexible but less
+              efficient than the previous one.
+
+        Returns
+        -------
+        self : object
+            Returns self.
+        """
+        self.best_view_mode = self._validate_best_view_mode(
+            self.best_view_mode)
+        if (self.base_estimator is None or
+                isinstance(self.base_estimator, (BaseDecisionTree,
+                                                 BaseForest))):
+            dtype = DTYPE
+            accept_sparse = 'csc'
+        else:
+            dtype = None
+            accept_sparse = ['csr', 'csc']
+        X, y = check_X_y(X, y, accept_sparse=accept_sparse, dtype=dtype)
+        check_classification_targets(y)
+        self._validate_estimator()
+        if views_ind is None:
+            if X.shape[1] > 1:
+                views_ind = np.array([0, X.shape[1]//2, X.shape[1]])
+            else:
+                views_ind = np.array([0, X.shape[1]])
+        self.views_ind_, n_views = self._validate_views_ind(views_ind,
+                                                            X.shape[1])
+
+        self.classes_, y = np.unique(y, return_inverse=True)
+        self.n_classes_ = len(self.classes_)
+        self.n_features_ = X.shape[1]
+
+        if self.n_classes_ == 1:
+            # This case would lead to division by 0 when computing the cost
+            # matrix so it needs special handling (but it is an obvious case as
+            # there is only one single class in the data).
+            self.estimators_ = []
+            self.estimator_weights_ = np.array([], dtype=np.float64)
+            self.estimator_errors_ = np.array([], dtype=np.float64)
+            self.best_views_ = np.array([], dtype=np.int64)
+            return
+
+        self.estimators_ = []
+        self.estimator_weights_ = np.zeros(self.n_estimators, dtype=np.float64)
+        self.estimator_errors_ = np.zeros(self.n_estimators, dtype=np.float64)
+        self.best_views_ = - np.ones(self.n_estimators, dtype=np.int64)
+
+        random_state = check_random_state(self.random_state)
+        (cost, cost_global, label_score, label_score_global,
+         predicted_classes) = self._init_var(n_views, y)
+
+        current_iteration = 0
+        while True:
+            estimators = []
+            dist = self._compute_dist(cost, y)
+            for ind_view in range(n_views):
+                estimator = self._make_estimator(append=False,
+                                                 random_state=random_state)
+                estimator.fit(self._extract_view(X, ind_view), y,
+                              sample_weight=dist[ind_view, :])
+                estimators.append(estimator)
+                predicted_classes[ind_view, :] = estimator.predict(
+                    self._extract_view(X, ind_view))
+
+            edges = self._compute_edge_global(
+                    cost_global, predicted_classes, y)
+            if self.best_view_mode == "edge":
+                best_view = np.argmax(edges)
+            else:  # self.best_view_mode == "error"
+                n_errors = np.sum(predicted_classes != y, axis=1)
+                best_view = np.argmin(n_errors)
+
+            edge = edges[best_view]
+
+            if (edge == 1.):
+                self.estimator_weights_[0] = 1.
+                self.estimator_weights_.resize((1, ))
+                self.best_views_[0] = best_view
+                self.best_views_.resize((1, ))
+                self.estimators_ = [estimators[best_view]]
+                self.estimator_errors_[0] = 0.
+                self.estimator_errors_.resize((1, ))
+                break
+
+            self.estimator_errors_[current_iteration] = (
+                np.average(cost_global[np.arange(y.shape[0]), y])
+                * (-1. / (self.n_classes_-1)))
+
+            alpha = self._compute_alphas(edge)
+            self.estimator_weights_[current_iteration] = alpha
+            self.best_views_[current_iteration] = best_view
+            self.estimators_.append(estimators[best_view])
+
+            if current_iteration == self.n_estimators-1:
+                break
+
+            cost_global, label_score_global = self._compute_cost_global(
+                label_score_global, predicted_classes[best_view, :], y, alpha)
+
+            edges = self._compute_edges(cost, predicted_classes, y)
+            alphas = self._compute_alphas(edges)
+            cost, label_score = self._compute_cost(
+                label_score, predicted_classes, y, alphas)
+
+            current_iteration += 1
+
+        return self
+
+    def decision_function(self, X):
+        """Compute the decision function of X.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix}, shape = (n_samples, n_features)
+            Multi-view input samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+
+        Returns
+        -------
+        dec_fun : numpy.ndarray, shape = (n_samples, k)
+            Decision function of the input samples.
+            The order of outputs is the same of that of the `classes_`
+            attribute.
+            Binary classification is a special cases with ``k == 1``,
+            otherwise ``k == n_classes``. For binary classification,
+            values <=0 mean classification in the first class in ``classes_``
+            and values >0 mean classification in the second class in
+            ``classes_``.
+        """
+        check_is_fitted(self, ("estimators_", "estimator_weights_",
+                               "best_views_", "n_classes_", "views_ind_"))
+        X = self._validate_X_predict(X)
+
+        n_samples = X.shape[0]
+        n_estimators = len(self.estimators_)
+        n_classes = self.n_classes_
+
+        predictions = self._compute_predictions(X)
+
+        dec_func = np.zeros((n_samples, n_classes))
+        for ind_estimator in range(n_estimators):
+            dec_func[np.arange(n_samples), predictions[:, ind_estimator]] \
+                += self.estimator_weights_[ind_estimator]
+
+        if n_classes == 2:
+            dec_func[:, 0] *= -1
+            return np.sum(dec_func, axis=1)
+
+        return dec_func
+
+    def staged_decision_function(self, X):
+        """Compute decision function of X for each boosting iteration.
+
+        This method allows monitoring (i.e. determine error on testing set)
+        after each boosting iteration.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix}, shape = (n_samples, n_features)
+            Multi-view input samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+
+        Returns
+        -------
+        dec_fun : generator of numpy.ndarrays, shape = (n_samples, k)
+            Decision function of the input samples.
+            The order of outputs is the same of that of the `classes_`
+            attribute.
+            Binary classification is a special cases with ``k == 1``,
+            otherwise ``k==n_classes``. For binary classification,
+            values <=0 mean classification in the first class in ``classes_``
+            and values >0 mean classification in the second class in
+            ``classes_``.
+        """
+        check_is_fitted(self, ("estimators_", "estimator_weights_",
+                               "n_classes_", "views_ind_"))
+        X = self._validate_X_predict(X)
+
+        n_samples = X.shape[0]
+        n_estimators = len(self.estimators_)
+        n_classes = self.n_classes_
+
+        predictions = self._compute_predictions(X)
+
+        dec_func = np.zeros((n_samples, n_classes))
+        for ind_estimator in range(n_estimators):
+            dec_func[np.arange(n_samples), predictions[:, ind_estimator]] \
+                += self.estimator_weights_[ind_estimator]
+            if n_classes == 2:
+                tmp_dec_func = np.array(dec_func)
+                tmp_dec_func[:, 0] *= -1
+                yield tmp_dec_func.sum(axis=1)
+            else:
+                yield np.array(dec_func)
+
+    def predict(self, X):
+        """Predict classes for X.
+
+        The predicted class of an input sample is computed as the weighted mean
+        prediction of the classifiers in the ensemble.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix}, shape = (n_samples, n_features)
+            Multi-view input samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+
+        Returns
+        -------
+        y : numpy.ndarray, shape = (n_samples,)
+            Predicted classes.
+        """
+        pred = self.decision_function(X)
+
+        if self.n_classes_ == 2:
+            return self.classes_.take(pred > 0, axis=0)
+
+        return self.classes_.take(np.argmax(pred, axis=1), axis=0)
+
+    def staged_predict(self, X):
+        """Return staged predictions for X.
+
+        The predicted class of an input sample is computed as the weighted mean
+        prediction of the classifiers in the ensemble.
+
+        This generator method yields the ensemble prediction after each
+        iteration of boosting and therefore allows monitoring, such as to
+        determine the prediction on a test set after each boost.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix} of shape = (n_samples, n_features)
+            Multi-view input samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+
+        Returns
+        -------
+        y : generator of numpy.ndarrays, shape = (n_samples,)
+            Predicted classes.
+        """
+        n_classes = self.n_classes_
+        classes = self.classes_
+
+        if n_classes == 2:
+            for pred in self.staged_decision_function(X):
+                yield np.array(classes.take(pred > 0, axis=0))
+        else:
+            for pred in self.staged_decision_function(X):
+                yield np.array(classes.take(np.argmax(pred, axis=1), axis=0))
+
+    def score(self, X, y):
+        """Return the mean accuracy on the given test data and labels.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix} of shape = (n_samples, n_features)
+            Multi-view test samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+        y : array-like, shape = (n_samples,)
+            True labels for X.
+
+        Returns
+        -------
+        score : float
+            Mean accuracy of self.predict(X) wrt. y.
+        """
+        return super(MumboClassifier, self).score(X, y)
+
+    def staged_score(self, X, y):
+        """Return staged mean accuracy on the given test data and labels.
+
+        This generator method yields the ensemble score after each iteration of
+        boosting and therefore allows monitoring, such as to determine the
+        score on a test set after each boost.
+
+        Parameters
+        ----------
+        X : {array-like, sparse matrix} of shape = (n_samples, n_features)
+            Multi-view test samples.
+            Sparse matrix can be CSC, CSR, COO, DOK, or LIL.
+            COO, DOK and LIL are converted to CSR.
+        y : array-like, shape = (n_samples,)
+            True labels for X.
+
+        Returns
+        -------
+        score : generator of floats
+            Mean accuracy of self.staged_predict(X) wrt. y.
+        """
+        for y_pred in self.staged_predict(X):
+            yield accuracy_score(y, y_pred)
diff --git a/multimodal/datasets/__init__.py b/multimodal/datasets/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..3bffe2098bb9f74b932253f87be329a69369cef6
--- /dev/null
+++ b/multimodal/datasets/__init__.py
@@ -0,0 +1,2 @@
+from metriclearning.datasets.base import *
+from metriclearning.datasets.data_sample import DataSample, Metriclearn_array
\ No newline at end of file
diff --git a/multimodal/datasets/__pycache__/__init__.cpython-36.pyc b/multimodal/datasets/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..71e809afc1d58e7dd739fc5d6790139d3a7a4230
Binary files /dev/null and b/multimodal/datasets/__pycache__/__init__.cpython-36.pyc differ
diff --git a/multimodal/datasets/__pycache__/base.cpython-36.pyc b/multimodal/datasets/__pycache__/base.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..c952741223ea079326c463e7176cb64f88b719a4
Binary files /dev/null and b/multimodal/datasets/__pycache__/base.cpython-36.pyc differ
diff --git a/multimodal/datasets/__pycache__/data_sample.cpython-36.pyc b/multimodal/datasets/__pycache__/data_sample.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0dd01d941e19a920b48b248b2107c9ec6586b3b1
Binary files /dev/null and b/multimodal/datasets/__pycache__/data_sample.cpython-36.pyc differ
diff --git a/multimodal/datasets/base.py b/multimodal/datasets/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..0e033e7b2b1b2bf75b372b657a1839fc21776ecf
--- /dev/null
+++ b/multimodal/datasets/base.py
@@ -0,0 +1,31 @@
+from __future__ import print_function
+import numpy as np
+import numpy.ma as ma
+from metriclearning.datasets.data_sample import DataSample
+
+from six.moves import cPickle as pickle #for performance
+
+import numpy as np
+
+
+def load_npz_X_y(filename_):
+    with np.load(filename_) as npzfile:
+       return npzfile['X'] , npzfile['y']
+
+def save_npz_X_y(filename_, X, y):
+    np.savez(filename_, X=X, y=y)
+
+def save_dict(di_, filename_):
+    with open(filename_, 'wb') as f:
+        pickle.dump(di_, f)
+
+def load_dict(filename_):
+    with open(filename_, 'rb') as f:
+        ret_di = pickle.load(f)
+    return ret_di
+
+
+def _create_pickle_files(self, adr, dsample):
+    f = open(adr + ".sample.pkl", "wb")
+    pickle.dump(dsample, f)
+    f.close()
diff --git a/multimodal/datasets/data_sample.py b/multimodal/datasets/data_sample.py
new file mode 100644
index 0000000000000000000000000000000000000000..e6bcf8189646fe50fdef6de07bcce86b83635096
--- /dev/null
+++ b/multimodal/datasets/data_sample.py
@@ -0,0 +1,291 @@
+# -*- coding: utf-8 -*-
+
+"""This module contains the DataSample class and Metriclearn_array class
+The DataSample class encapsulates a sample 's components
+nbL and nbEx numbers,
+Metriclearn_arra class inherit from numpy ndarray and contains a 2d data ndarray
+with the shape (n_samples, n_view_i * n_features_i)
+
+0        1    2    3
+======== ==== ==== ====
+xxxxxxxx xxxx xxxx xxxx
+xxxxxxxx xxxx xxxx xxxx
+xxxxxxxx xxxx xxxx xxxx
+xxxxxxxx xxxx xxxx xxxx
+xxxxxxxx xxxx xxxx xxxx
+xxxxxxxx xxxx xxxx xxxx
+xxxxxxxx xxxx xxxx xxxx
+xxxxxxxx xxxx xxxx xxxx
+xxxxxxxx xxxx xxxx xxxx
+======== ==== ==== ====
+
+the number nbL and nbEx and , the fourth dictionaries for sample,
+prefix, suffix and factor where they are computed
+"""
+import numpy as np
+import numpy.ma as ma
+
+
+class Metriclearn_array(ma.MaskedArray, np.ndarray):
+    """
+    Metriclearn_array inherit from numpy ndarray
+
+
+    Parameters
+    ----------
+
+    data : can be
+         - dictionary of multiview array with shape = (n_samples, n_features)  for multi-view
+              for each view.
+           {0: array([[]],
+            1: array([[]],
+            ...}
+         - numpy array like with shape = (n_samples, n_features)  for multi-view
+              for each view.
+            [[[...]],
+             [[...]],
+             ...]
+         - {array like} with (n_samples, nviews *  n_features) with 'views_ind' diferent to 'None'
+            for Multi-view input samples.
+
+
+
+
+    views_ind : array-like (default= None ) if None
+                [0, n_features//2, n_features]) is constructed (2 views)
+                Paramater specifying how to extract the data views from X:
+
+        - views_ind is a 1-D array of sorted integers, the entries
+          indicate the limits of the slices used to extract the views,
+          where view ``n`` is given by
+          ``X[:, views_ind[n]:views_ind[n+1]]``.
+
+    Attributes
+    ----------
+
+    view_ind : list of views' indice  (may be None)
+
+    n_views : int number of views
+
+    shapes_int: list of int numbers of feature for each views
+
+    keys : name of key, where data come from a dictionary
+
+
+    :Example:
+
+    >>> from metriclearning.datasets.base import load_dict
+    >>> from metriclearning.tests.datasets.get_dataset_path import get_dataset_path
+    >>> from metriclearning.datasets.data_sample import DataSample
+    >>> file = 'input_x_dic.pkl'
+    >>> data = load_dict(get_dataset_path(file))
+    >>> print(data.__class__)
+    <class 'dict'>
+    >>> metric = Metriclearn_array(data)
+    >>> metric.shape
+    (120, 240)
+    >>> metric.keys
+    dict_keys([0, 1])
+    >>> metric.shapes_int
+    [120, 120]
+    >>> metric.n_views
+    2
+
+
+    """
+    def __new__(cls, data, view_ind=None):
+        """Constructor of Metriclearn_array"""
+        shapes_int = []
+        index = 0
+        new_data = np.ndarray([])
+        n_views = len(data)
+        thekeys = None
+        view_ind_self =  None
+        if isinstance(data, dict):
+            n_views = len(data)
+            for key, dat_values in data.items():
+                new_data = cls._populate_new_data(index, dat_values, new_data)
+                shapes_int.append(dat_values.shape[1])
+                index += 1
+            thekeys = data.keys()
+        if isinstance(data, np.ndarray) and view_ind is None and data.ndim == 1:
+            n_views = data.shape[0]
+            for dat_values in data:
+                shapes_int.append(dat_values.shape[1])
+                new_data = cls._populate_new_data(index, dat_values, new_data)
+                index += 1
+        elif isinstance(data, np.ndarray) and data.ndim > 1:
+            if  view_ind is not None:
+                try:
+                    view_ind = np.asarray(view_ind)
+                except :
+                    raise TypeError("n_views should be list or nparray")
+                n_views = view_ind.shape[0] - 1
+            elif view_ind is None:
+                if data.shape[1] > 1:
+                    view_ind = np.array([0, data.shape[1]//2, data.shape[1]])
+                else:
+                    view_ind = np.array([0, data.shape[1]])
+                view_ind, n_views = cls._validate_views_ind(view_ind,
+                                                            data.shape[1])
+            shapes_int = [  in2-in1  for in1, in2 in  zip(view_ind, view_ind[1: ])]
+            new_data = data
+            view_ind_self = view_ind
+
+        # obj =   ma.MaskedArray.__new(new_data)   # new_data.view()  a.MaskedArray(new_data, mask=new_data.mask).view(cls)
+        # bj = super(Metriclearn_array, cls).__new__(cls, new_data.data, new_data.mask)
+        if hasattr(new_data, "mask"):
+            obj = ma.masked_array(new_data.data, new_data.mask).view(cls)
+        elif hasattr(new_data, "data") and \
+                hasattr(new_data, "shape") and len(new_data.shape) > 0:
+            obj = np.asarray(new_data.data).view(cls)
+        else:
+            obj = np.recarray.__new__(cls, shape=(), dtype=np.float)
+        obj.views_ind = view_ind_self
+        obj.shapes_int = shapes_int
+        obj.n_views = n_views
+        obj.keys = thekeys
+        return obj
+
+    @staticmethod
+    def _populate_new_data(index, dat_values, new_data):
+        if index == 0:
+            if isinstance(dat_values, ma.MaskedArray)  or isinstance(dat_values, np.ndarray):
+                new_data = dat_values
+            else:
+                new_data = dat_values.view(ma.MaskedArray) #  ma.masked_array(dat_values, mask=ma.nomask) dat_values.view(ma.MaskedArray) #(
+                new_data.mask = ma.nomask
+        else:
+            if isinstance(dat_values, ma.MaskedArray) or isinstance(dat_values, np.ndarray):
+                new_data = ma.hstack((new_data, dat_values))
+            else:
+                new_data = ma.hstack((new_data,  dat_values.view(ma.MaskedArray) ) ) #  ma.masked_array(dat_values, mask=ma.nomask
+        return new_data
+
+    def __array_finalize__(self, obj):
+        if obj is None: return
+        super(Metriclearn_array, self).__array_finalize__(obj)
+        self.shapes_int = getattr(obj, 'shapes_int', None)
+        self.n_views = getattr(obj, 'n_views', None)
+        self.keys = getattr(obj, 'keys', None)
+        self.views_ind_self = getattr(obj, 'views_ind_self', None)
+
+    def get_col(self, view, col):
+        start = np.sum(np.asarray(self.shapes_int[0: view]))
+        return self.data[start+col, :]
+
+    def get_view(self, view):
+        start = int(np.sum(np.asarray(self.shapes_int[0: view])))
+        stop = int(start + self.shapes_int[view])
+        return self.data[:, start:stop]
+
+    def set_view(self, view, data):
+        start = int(np.sum(np.asarray(self.shapes_int[0: view])))
+        stop = int(start + self.shapes_int[view])
+        if stop-start == data.shape[0] and data.shape[1]== self.data.shape[1]:
+             self.data[:, start:stop] = data
+        else:
+            raise ValueError(
+                "shape of data does not match (%d, %d)" %stop-start %self.data.shape[1])
+
+    def get_raw(self, view, raw):
+        start = np.sum(np.asarray(self.shapes_int[0: view]))
+        stop = np.sum(np.asarray(self.shapes_int[0: view+1]))
+        return self.data[start:stop, raw]
+
+    def add_view(self, v, data):
+        if len(self.shape) > 0:
+            if data.shape[0] == self.data.shape[0]:
+                indice = self.shapes_int[v]
+                np.insert(self.data, data, indice+1, axis=0)
+                self.shapes_int.append(data.shape[1])
+                self.n_views +=1
+        else:
+            raise ValueError("New view can't initialazed")
+           # self.shapes_int= [data.shape[1]]
+           # self.data.reshape(data.shape[0],)
+           # np.insert(self.data, data, 0)
+           # self.n_views = 1
+
+    def _todict(self):
+        dico = {}
+        for view in range(self.n_views):
+            dico[view] = self.get_view(view)
+        return dico
+
+    @staticmethod
+    def _validate_views_ind(views_ind, n_features):
+        """Ensure proper format for views_ind and return number of views."""
+        views_ind = np.array(views_ind)
+        if np.issubdtype(views_ind.dtype, np.integer) and views_ind.ndim == 1:
+            if np.any(views_ind[:-1] >= views_ind[1:]):
+                raise ValueError("Values in views_ind must be sorted.")
+            if views_ind[0] < 0 or views_ind[-1] > n_features:
+                raise ValueError("Values in views_ind are not in a correct "
+                                 + "range for the provided data.")
+            n_views = views_ind.shape[0]-1
+        else:
+            raise ValueError("The format of views_ind is not "
+                                     + "supported.")
+
+        return (views_ind, n_views)
+
+
+class DataSample(dict):
+    """
+    A DataSample instance
+
+
+    :Example:
+
+    >>> from metriclearning.datasets.base import load_dict
+    >>> from metriclearning.tests.datasets.get_dataset_path import get_dataset_path
+    >>> from metriclearning.datasets.data_sample import DataSample
+    >>> file = 'input_x_dic.pkl'
+    >>> data = load_dict(get_dataset_path(file))
+    >>> print(data.__class__)
+    <class 'dict'>
+    >>> s = DataSample(data)
+    >>> type(s.data)
+    <class 'metriclearning.datasets.data_sample.Metriclearn_array'>
+
+
+    - Input:
+
+    Parameters
+    ----------
+    data : dict
+    kwargs : others arguments
+
+    Attributes
+    ----------
+
+    data   : { array like}  Metriclearn_array
+    """
+
+    def __init__(self, data=None, **kwargs):
+
+
+        # The dictionary that contains the sample
+        super(DataSample, self).__init__(kwargs)
+        self._data = None # Metriclearn_array(np.zeros((0,0)))
+        if data is not None:
+            self._data = Metriclearn_array(data)
+
+
+    @property
+    def data(self):
+        """Metriclearn_array"""
+
+        return self._data
+
+    @data.setter
+    def data(self, data):
+        if isinstance(data, (Metriclearn_array, np.ndarray, ma.MaskedArray, np.generic)):
+            self._data = data
+        else:
+            raise TypeError("sample should be a Metriclearn_array.")
+
+
+
+
diff --git a/multimodal/kernels/__init__.py b/multimodal/kernels/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..864783655979849bd72c67e1f456694c071e9c72 100644
--- a/multimodal/kernels/__init__.py
+++ b/multimodal/kernels/__init__.py
@@ -0,0 +1 @@
+__all__ = ['MVML']
diff --git a/multimodal/kernels/lpMKL.py b/multimodal/kernels/lpMKL.py
new file mode 100644
index 0000000000000000000000000000000000000000..20ccb24fb2e85b5ab2ef1d4a1cf2d73fd567743f
--- /dev/null
+++ b/multimodal/kernels/lpMKL.py
@@ -0,0 +1,323 @@
+import numpy as np
+from sklearn.base import BaseEstimator
+from sklearn.base import ClassifierMixin
+from sklearn.utils.multiclass import unique_labels
+from sklearn.utils.validation import check_X_y
+from sklearn.utils.validation  import check_array
+from sklearn.utils.validation  import check_is_fitted
+from metriclearning.datasets.data_sample import DataSample, Metriclearn_array
+from metriclearning.mkernel import MKernel
+
+
+class MKL(BaseEstimator, ClassifierMixin, MKernel):
+    """
+    MKL Classifier for multiview learning
+
+    Parameters
+    ----------
+
+    lmbda : float coeficient for combined kernels
+
+    m_param : float (default : 1.0)
+       value between 0 and 1 indicating level of nyström approximation;
+       1 = no approximation
+
+    kernel : list of str (default: "precomputed") if kernel is as input of fit function set kernel to
+             "precomputed"
+             list or str indicate the metrics used for each kernels
+             list of pairwise kernel function name
+             (default : "precomputed") if kernel is as input of fit function set kernel to "precomputed"
+             example : ['rbf', 'additive_chi2', 'linear' ] for function defined in as
+             PAIRWISE_KERNEL_FUNCTIONS
+
+    kernel_params : list of str default : None) list of dictionaries for parameters of kernel [{'gamma':50}
+                    list of dict of corresponding kernels params KERNEL_PARAMS
+
+    use_approx : (default : True) to use approximation of m_param < 1
+
+    n_loops : (default 50) number of iterions
+
+    Attributes
+    ----------
+    lmbda : float coeficient for combined kernels
+
+    m_param : float (default : 1.0)
+       value between 0 and 1 indicating level of nyström approximation;
+       1 = no approximation
+
+    kernel : list or str indicate the metrics used for each kernels
+             list of pairwise kernel function name
+             (default : "precomputed")
+             example : ['rbf', 'additive_chi2', 'linear' ] for function defined in as
+             PAIRWISE_KERNEL_FUNCTIONS
+             example kernel=['rbf', 'rbf'], for the first two views
+
+    kernel_params: list of dict of corresponding kernels params KERNEL_PARAMS
+
+    precision : float (default : 1E-4) precision to stop algorithm
+
+    n_loops : number of iterions
+
+    classes_ : array like unique label for classes
+
+    X_ : :class:`metriclearning.datasets.data_sample.Metriclearn_array` array of input sample
+
+    K_ : :class:`metriclearning.datasets.data_sample.Metriclearn_array` array of processed kernels
+
+    y_ : array-like, shape = (n_samples,)
+         Target values (class labels).
+
+    C : learning solution that is learned in MKL
+
+    weights : learned weight for combining the solutions of views, learned in
+
+    """
+    def __init__(self, lmbda, m_param=1.0, kernel="precomputed",
+                 kernel_params=None, use_approx=True, precision=1E-4, n_loops=50):
+        # calculate nyström approximation (if used)
+        self.lmbda = lmbda
+        self.n_loops = n_loops
+        self.use_approx = use_approx
+        self.m_param = m_param
+        self.kernel= kernel
+        self.kernel_params = kernel_params
+        self.precision = precision
+
+    def fit(self, X, y= None, views_ind=None):
+        """
+
+        Parameters
+        ----------
+        X : different formats are supported
+            - Metriclearn_array {array-like, sparse matrix}, shape = (n_samples, n_features)
+              Training multi-view input samples. can be also Kernel where attibute 'kernel'
+              is set to precompute "precomputed"
+
+            - Dictionary of {array like} with shape = (n_samples, n_features)  for multi-view
+              for each view.
+
+            - Array of {array like} with shape = (n_samples, n_features)  for multi-view
+              for each view.
+
+            - {array like} with (n_samples, nviews *  n_features) with 'views_ind' diferent to 'None'
+
+        y : array-like, shape = (n_samples,)
+            Target values (class labels).
+            array of length n_samples containing the classification/regression labels
+            for training data
+
+        views_ind : array-like (default=[0, n_features//2, n_features])
+            Paramater specifying how to extract the data views from X:
+
+            - views_ind is a 1-D array of sorted integers, the entries
+              indicate the limits of the slices used to extract the views,
+              where view ``n`` is given by
+              ``X[:, views_ind[n]:views_ind[n+1]]``.
+
+              With this convention each view is therefore a view (in the NumPy
+              sense) of X and no copy of the data is done.
+
+        Returns
+        -------
+        self : object
+            Returns self.
+        """
+        self.X_, self.K_ = self._global_kernel_transform(X, views_ind=views_ind)
+        self.classes_ = unique_labels(y)
+        check_X_y(self.X_, y)
+        self.y_ = y
+        n = self.K_.shape[0]
+        self._calc_nystrom(self.K_, n)
+        C, weights = self.learn_lpMKL()
+        self.C = C
+        self.weights = weights
+        return self
+
+    def learn_lpMKL(self):
+        """
+        function of lpMKL learning
+
+        Returns
+        -------
+        return tuple (C, weights)
+        """
+        views = self.K_.n_views
+        X = self.K_
+        p = 2
+        n = self.K_.shape[0]
+        weights = np.ones(views) / (views)
+
+        prevalpha = False
+        max_diff = 1
+        if (self.precision >= max_diff):
+            raise ValueError(" %f precision must be > to %f" % (self.precision,max_diff))
+        kernels = np.zeros((views, n, n))
+        for v in range(0, views):
+            kernels[v, :, :] = np.dot(self.U_dict[v], np.transpose(self.U_dict[v]))
+
+        rounds = 0
+        stuck = False
+        while max_diff > self.precision and rounds < self.n_loops and not stuck:
+
+            # gammas are fixed upon arrival to the loop
+            # -> solve for alpha!
+
+            if self.m_param < 1 and self.use_approx:
+                combined_kernel = np.zeros((n, n))
+                for v in range(0, views):
+                    combined_kernel = combined_kernel + weights[v] * kernels[v]
+            else:
+                combined_kernel = np.zeros((n, n))
+                for v in range(0, views):
+                    combined_kernel = combined_kernel + weights[v]*X.get_view(v)
+            # combined kernel includes the weights
+
+            # alpha = (K-lambda*I)^-1 y
+            C = np.linalg.solve((combined_kernel + self.lmbda * np.eye(n)), self.y_)
+
+            # alpha fixed -> calculate gammas
+            weights_old = weights.copy()
+
+            # first the ||f_t||^2 todo wtf is the formula used here????
+            ft2 = np.zeros(views)
+            for v in range(0, views):
+                if self.m_param < 1 and self.use_approx:
+                        # ft2[v,vv] = weights_old[v,vv] * np.dot(np.transpose(C), np.dot(np.dot(np.dot(data.U_dict[v],
+                        #                                                             np.transpose(data.U_dict[v])),
+                        #                                                             np.dot(data.U_dict[vv],
+                        #                                                             np.transpose(data.U_dict[vv]))), C))
+                    ft2[v] = np.linalg.norm(weights_old[v] * np.dot(kernels[v], C))**2
+                else:
+                    ft2[v] = np.linalg.norm(weights_old[v] * np.dot(X.get_view(v), C))**2
+                    # ft2[v] = weights_old[v] * np.dot(np.transpose(C), np.dot(data.kernel_dict[v], C))
+            # calculate the sum for downstairs
+            # print(weights_old)
+            # print(ft2)
+            # print(ft2 ** (p / (p + 1.0)))
+            downstairs = np.sum(ft2 ** (p / (p + 1.0))) ** (1.0 / p)
+            # and then the gammas
+            weights = (ft2 ** (1 / (p + 1))) / downstairs
+
+            # convergence
+            if prevalpha == False:  # first time in loop we don't have a previous alpha value
+                prevalpha = True
+                diff_alpha = 1
+            else:
+                diff_alpha = np.linalg.norm(C_old - C) / np.linalg.norm(C_old)
+                max_diff_gamma_prev = max_diff_gamma
+
+            max_diff_gamma = np.max(np.max(np.abs(weights - weights_old)))
+
+            # Add to prevent faillure on max_diff_gamma_prev
+            if not 'max_diff_gamma_prev' in globals(): max_diff_gamma_prev = max_diff_gamma
+            # try to see if convergence is as good as it gets: if it is stuck
+            if max_diff_gamma < 10*self.precision and max_diff_gamma_prev < max_diff_gamma:
+                # if the gamma difference starts to grow we are most definitely stuck!
+                # (this condition determined empirically by running algo and observing the convergence)
+                stuck = True
+            if rounds > 1 and max_diff_gamma - max_diff_gamma_prev > 100*self.precision:
+                # If suddenly the difference starts to grow much
+                stuck = True
+
+            max_diff = np.max([max_diff_gamma, diff_alpha])
+            # print([max_diff_gamma, diff_alpha])  # print if convergence is interesting
+            C_old = C.copy()
+            rounds = rounds + 1
+        # print("\nlearned the weights:")
+        # np.set_printoptions(precision=3, suppress=True)
+        # print(weights)
+        # print("")
+
+        # print if resulting convergence is of interest
+        # print("convergence of ", max_diff, " at step ", rounds, "/500")
+        if stuck:
+            return C_old, weights_old
+        else:
+            return C, weights
+
+
+    def predict(self, X, views_ind=None):
+        """
+
+        Parameters
+        ----------
+
+        X : - Metriclearn_array {array-like, sparse matrix}, shape = (n_samples, n_features)
+              Training multi-view input samples. can be also Kernel where attibute 'kernel'
+              is set to precompute "precomputed"
+
+            - Dictionary of {array like} with shape = (n_samples, n_features)  for multi-view
+              for each view.
+
+            - Array of {array like} with shape = (n_samples, n_features)  for multi-view
+              for each view.
+
+            - {array like} with (n_samples, nviews *  n_features) with 'views_ind' diferent to 'None'
+
+        views_ind : array-like (default=[0, n_features//2, n_features])
+            Paramater specifying how to extract the data views from X:
+
+            - views_ind is a 1-D array of sorted integers, the entries
+              indicate the limits of the slices used to extract the views,
+              where view ``n`` is given by
+              ``X[:, views_ind[n]:views_ind[n+1]]``.
+
+              With this convention each view is therefore a view (in the NumPy
+              sense) of X and no copy of the data is done.
+
+        Returns
+        -------
+
+        y : numpy.ndarray, shape = (n_samples,)
+            Predicted classes.
+        """
+        check_is_fitted(self, ['X_', 'C', 'K_', 'y_', 'weights'])
+        X , test_kernels = self._global_kernel_transform(X,
+                                                         views_ind=views_ind,
+                                                         Y=self.X_)
+        check_array(X)
+        C = self.C
+        weights  = self.weights
+        return self.lpMKL_predict(test_kernels, C, weights)
+
+
+    def lpMKL_predict(self, X, C, weights):
+        """
+
+        Parameters
+        ----------
+
+        X : array-like test kernels precomputed array like
+
+        C : corresponding to  Confusion learned matrix
+
+        weights : learned weights
+
+        Returns
+        -------
+
+        y : numpy.ndarray, shape = (n_samples,)
+            Predicted classes.
+        """
+        views = X.n_views
+        tt = X.shape[0]
+        m = self.K_.shape[0] # self.m_param * n
+
+        #  NO TEST KERNEL APPROXIMATION
+        # kernel = weights[0] * self.data.test_kernel_dict[0]
+        # for v in range(1, views):
+        #     kernel = kernel + weights[v] * self.data.test_kernel_dict[v]
+
+        # TEST KERNEL APPROXIMATION
+        kernel = np.zeros((tt, self.K_.shape[0]))
+        for v in range(0, views):
+            if self.m_param < 1:
+                kernel = kernel + weights[v] * np.dot(np.dot(X.get_view(v)[:, 0:m], self.W_sqrootinv_dict[v]),
+                                                  np.transpose(self.U_dict[v]))
+            else:
+                kernel = kernel + weights[v] * X.get_view(v)
+
+        return np.dot(kernel, C)
+
+
+
diff --git a/multimodal/kernels/mkernel.py b/multimodal/kernels/mkernel.py
new file mode 100644
index 0000000000000000000000000000000000000000..9c4644ce22afc0611d5c846e1cdd42f94cda0e16
--- /dev/null
+++ b/multimodal/kernels/mkernel.py
@@ -0,0 +1,74 @@
+import numpy as np
+import scipy as sp
+from sklearn.metrics.pairwise import pairwise_kernels
+from abc import ABCMeta
+from metriclearning.datasets.data_sample import DataSample, Metriclearn_array
+
+
+class MKernel(metaclass=ABCMeta):
+    """
+    Abstract class MKL and MVML should inherit from
+    for methods of transform kernel to/from data
+    """
+
+    def _get_kernel(self, X, Y=None, v=0):
+        if self.kernel_params is not None:
+            if isinstance(self.kernel_params, list):
+                ind = min(v, len(self.kernel) - 1)
+                params = self.kernel_params[v]
+            else:
+                params = self.kernel_params
+        else:
+            params = {}
+        if isinstance(self.kernel, str):
+            met = self.kernel
+        elif isinstance(self.kernel, list):
+            ind = min(v, len(self.kernel) - 1)
+            met = self.kernel[ind]
+        # Y,
+        return pairwise_kernels(X, Y, metric=met,
+                                filter_params=True, **params)
+
+    def _global_kernel_transform(self, X, views_ind=None, Y=None):
+        kernel_dict = {}
+
+        X_ = None
+        if Y is None:
+            y = Y
+        if isinstance(X, np.ndarray) and X.ndim == 1:
+            X_= Metriclearn_array(X, views_ind)
+            for v in range(X.shape[0]):
+                if Y is not None:  y = Y.get_view(v) #  y = self._global_check_pairwise(X_, Y, v)
+                kernel_dict[v] = self._get_kernel(X[v], y)
+        elif isinstance(X, dict):
+            X_= Metriclearn_array(X)
+            for v in X.keys():
+                if Y is not None:  y = Y.get_view(v) # y = self._global_check_pairwise(X_, Y, v)
+                kernel_dict[v] = self._get_kernel(X[v], y)
+        elif isinstance(X, np.ndarray) and X.ndim > 1:
+            X_ = Metriclearn_array(X, views_ind)
+            X = X_
+        if isinstance(X, Metriclearn_array):
+            for v in range(X.n_views):
+                if Y is not None:   y = Y.get_view(v) # y = self._global_check_pairwise(X, Y, v)
+                kernel_dict[v] = self._get_kernel(X.get_view(v), y)
+            X_= X
+        if not isinstance(X_, Metriclearn_array):
+            raise TypeError("Input format is not reconized")
+        K_ = Metriclearn_array(kernel_dict)
+        return X_, K_
+
+    def _calc_nystrom(self, kernels, n_approx):
+        # calculates the nyström approximation for all the kernels in the given dictionary
+        self.W_sqrootinv_dict = {}
+        self.U_dict = {}
+        for v in range(kernels.n_views):
+            kernel = kernels.get_view(v)
+            E = kernel[:, 0:n_approx]
+            W = E[0:n_approx, :]
+            Ue, Va, _ = sp.linalg.svd(W)
+            vak = Va[0:n_approx]
+            inVa = np.diag(vak ** (-0.5))
+            U_v = np.dot(E, np.dot(Ue[:, 0:n_approx], inVa))
+            self.U_dict[v] = U_v
+            self.W_sqrootinv_dict[v] = np.dot(Ue[:, 0:n_approx], inVa)
diff --git a/multimodal/kernels/mvml.py b/multimodal/kernels/mvml.py
new file mode 100644
index 0000000000000000000000000000000000000000..d42e9a462863ad20bdfedf2e48b75eed3db91d65
--- /dev/null
+++ b/multimodal/kernels/mvml.py
@@ -0,0 +1,576 @@
+import numpy as np
+import scipy.linalg as spli
+from scipy.sparse.linalg import splu
+from scipy.sparse import csc_matrix
+from sklearn.base import BaseEstimator
+from sklearn.base import ClassifierMixin
+from sklearn.utils.multiclass import unique_labels
+from sklearn.metrics.pairwise import pairwise_kernels
+from sklearn.utils.validation import check_X_y
+from sklearn.utils.validation  import check_array
+from sklearn.metrics.pairwise import check_pairwise_arrays
+from sklearn.utils.validation  import check_is_fitted
+from metriclearning.datasets.data_sample import DataSample, Metriclearn_array
+from metriclearning.mkernel import MKernel
+
+"""
+    Copyright (C) 2018  Riikka Huusari
+
+    This program is free software: you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation, either version 3 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+
+This file contains algorithms for Multi-View Metric Learning (MVML) as introduced in
+
+Riikka Huusari, Hachem Kadri and Cécile Capponi:
+Multi-View Metric Learning in Vector-Valued Kernel Spaces
+in International Conference on Artificial Intelligence and Statistics (AISTATS) 2018
+
+Usage (see also demo.py for a more detailed example):
+    create a MVML object via:
+        mvml = MVML(kernel_dict, label_vector, regression_parameter_list, nystrom_param)
+    learn the model:
+        A, g, w = mvml.learn_mvml()
+    predict with the model:
+        predictions = predict_mvml(test_kernel_dict, g, w)
+
+(parameter names as in the paper)
+
+Code is tested with Python 3.5.2 and numpy 1.12.1
+"""
+
+
+class MVML(MKernel, BaseEstimator, ClassifierMixin):
+    r"""
+    The MVML Classifier
+
+    Parameters
+    ----------
+    regression_params: array/list of regression parameters, first for basic regularization, second for
+        regularization of A (not necessary if A is not learned)
+
+    kernel : list of str (default: "precomputed") if kernel is as input of fit function set kernel to
+             "precomputed"
+             list or str indicate the metrics used for each kernels
+             list of pairwise kernel function name
+             (default : "precomputed") if kernel is as input of fit function set kernel to "precomputed"
+             example : ['rbf', 'additive_chi2', 'linear' ] for function defined in as
+             PAIRWISE_KERNEL_FUNCTIONS
+
+    kernel_params : list of str default : None) list of dictionaries for parameters of kernel [{'gamma':50}
+                    list of dict of corresponding kernels params KERNEL_PARAMS
+
+    nystrom_param: value between 0 and 1 indicating level of nyström approximation; 1 = no approximation
+
+    learn_A :  integer (default 1) choose if A is learned or not: 1 - yes (default);
+               2 - yes, sparse; 3 - no (MVML_Cov); 4 - no (MVML_I)
+
+    learn_w : integer (default 0) where learn w is needed
+
+    precision : float (default : 1E-4) precision to stop algorithm
+
+    n_loops : (default 6) number of iterions
+
+
+    Attributes
+    ----------
+    regression_params : array/list of regression parameters
+
+    kernel : list or str indicate the metrics used for each kernels
+             list of pairwise kernel function name
+             (default : "precomputed")
+             example : ['rbf', 'additive_chi2', 'linear' ] for function defined in as
+             PAIRWISE_KERNEL_FUNCTIONS
+             example kernel=['rbf', 'rbf'], for the first two views
+
+    kernel_params: list of dict of corresponding kernels params KERNEL_PARAMS
+
+    learn_A :  1 where Learn matrix A is needded
+
+    learn_w : integer where learn w is needed
+
+    precision : float (default : 1E-4) precision to stop algorithm
+
+    n_loops : number of itterions
+
+    n_approx : number of samples in approximation, equals n if no approx.
+
+    classes_ : array like unique label for classes
+
+    warning_message : dictionary with warning messages
+
+    X_ : :class:`metriclearning.datasets.data_sample.Metriclearn_array` array of input sample
+
+    K_ : :class:`metriclearning.datasets.data_sample.Metriclearn_array` array of processed kernels
+
+    y_ : array-like, shape = (n_samples,)
+         Target values (class labels).
+
+    """
+    # r_cond = 10-30
+    def __init__(self, lmbda, eta, nystrom_param, kernel="precomputed",
+                 kernel_params=None,
+                 learn_A=1, learn_w=0, precision=1E-4, n_loops=6):
+
+        # calculate nyström approximation (if used)
+        self.nystrom_param = nystrom_param
+        self.lmbda = lmbda
+        self.eta = eta
+        # self.regression_params = regression_params
+        self.learn_A = learn_A
+        self.learn_w = learn_w
+        self.n_loops = n_loops
+        self.kernel= kernel
+        self.kernel_params = kernel_params
+        self.precision = precision
+        self.warning_message = {}
+
+    def fit(self, X, y= None, views_ind=None):
+        """
+        Fit the MVML classifier
+
+        Parameters
+        ----------
+
+        X : - Metriclearn_array {array-like, sparse matrix}, shape = (n_samples, n_features)
+              Training multi-view input samples. can be also Kernel where attibute 'kernel'
+              is set to precompute "precomputed"
+            or
+            - Dictionary of {array like} with shape = (n_samples, n_features)  for multi-view
+              for each view.
+            - Array of {array like} with shape = (n_samples, n_features)  for multi-view
+              for each view.
+            - {array like} with (n_samples, nviews *  n_features) with 'views_ind' diferent to 'None'
+
+
+        y : array-like, shape = (n_samples,)
+            Target values (class labels).
+            array of length n_samples containing the classification/regression labels
+            for training data
+
+        views_ind : array-like (default=[0, n_features//2, n_features])
+            Paramater specifying how to extract the data views from X:
+
+            - views_ind is a 1-D array of sorted integers, the entries
+              indicate the limits of the slices used to extract the views,
+              where view ``n`` is given by
+              ``X[:, views_ind[n]:views_ind[n+1]]``.
+
+              With this convention each view is therefore a view (in the NumPy
+              sense) of X and no copy of the data is done.
+
+
+        Returns
+        -------
+        self : object
+            Returns self.
+        """
+        # Check that X and y have correct shape
+
+        # Store the classes seen during fit
+
+        self.X_, self.K_= self._global_kernel_transform(X, views_ind=views_ind)
+        check_X_y(self.X_, y)
+
+        self.classes_ = unique_labels(y)
+        self.y_ = y
+
+        # n = X[0].shape[0]
+        n = self.K_.shape[0]
+        self.n_approx = int(np.floor(self.nystrom_param * n))  # number of samples in approximation, equals n if no approx.
+        if self.nystrom_param < 1:
+            self._calc_nystrom(self.K_, self.n_approx)
+        else:
+            self.U_dict = self.K_._todict()
+
+        # Return the classifier
+        self.learn_mvml(learn_A=self.learn_A, learn_w=self.learn_w, n_loops=self.n_loops)
+        if self.warning_message:
+            print("warning appears during fit process", self.warning_message)
+        return self
+
+    def learn_mvml(self, learn_A=1, learn_w=0, n_loops=6):
+        """
+
+        Parameters
+        ----------
+        learn_A: int choose if A is learned or not (default: 1):
+                 1 - yes (default);
+                 2 - yes, sparse;
+                 3 - no (MVML_Cov);
+                 4 - no (MVML_I)
+        learn_w: int choose if w is learned or not (default: 0):
+                 0 - no (uniform 1/views, default setting),
+                 1 - yes
+        n_loops: int maximum number of iterations in MVML, (default: 6)
+                 usually something like default 6 is already converged
+
+        Returns
+        -------
+        tuple (A, g, w) with A (metrcic matrix - either fixed or learned),
+                             g (solution to learning problem),
+                             w (weights - fixed or learned)
+        """
+        views = len(self.U_dict)
+        n = self.U_dict[0].shape[0]
+        lmbda = self.lmbda
+        if learn_A < 3:
+            eta = self.eta
+
+        # ========= initialize A =========
+
+        # positive definite initialization (with multiplication with the U matrices if using approximation)
+        A = np.zeros((views * self.n_approx, views * self.n_approx))
+        if learn_A < 3:
+            for v in range(views):
+                if self.nystrom_param < 1:
+                    A[v * self.n_approx:(v + 1) * self.n_approx, v * self.n_approx:(v + 1) * self.n_approx] = \
+                        np.dot(np.transpose(self.U_dict[v]), self.U_dict[v])
+                else:
+                    A[v * self.n_approx:(v + 1) * self.n_approx, v * self.n_approx:(v + 1) * self.n_approx] = np.eye(n)
+        # otherwise initialize like this if using MVML_Cov
+        elif learn_A == 3:
+            for v in range(views):
+                for vv in range(views):
+                    if self.nystrom_param < 1:
+                        A[v * self.n_approx:(v + 1) * self.n_approx, vv * self.n_approx:(vv + 1) * self.n_approx] = \
+                            np.dot(np.transpose(self.U_dict[v]), self.U_dict[vv])
+                    else:
+                        A[v * self.n_approx:(v + 1) * self.n_approx, vv * self.n_approx:(vv + 1) * self.n_approx] = \
+                            np.eye(n)
+        # or like this if using MVML_I
+        elif learn_A == 4:
+            for v in range(views):
+                if self.nystrom_param < 1:
+                    A[v * self.n_approx:(v + 1) * self.n_approx, v * self.n_approx:(v + 1) * self.n_approx] = \
+                        np.eye(self.n_approx)
+                else:
+                    # it might be wise to make a dedicated function for MVML_I if using no approximation
+                    # - numerical errors are more probable this way using inverse
+                    A[v * self.n_approx:(v + 1) * self.n_approx, v * self.n_approx:(v + 1) * self.n_approx] = \
+                        spli.pinv(self.U_dict[v])  # U_dict holds whole kernels if no approx
+
+        # ========= initialize w, allocate g =========
+        w = (1 / views) * np.ones((views, 1))
+        g = np.zeros((views * self.n_approx, 1))
+
+        # ========= learn =========
+        loop_counter = 0
+        while True:
+            if loop_counter > 0:
+                g_prev = np.copy(g)
+                A_prev = np.copy(A)
+                w_prev = np.copy(w)
+
+            # ========= update g =========
+
+            # first invert A
+            try:
+                # A_inv = np.linalg.pinv(A + 1e-09 * np.eye(views * self.n_approx))
+                cond_A = np.linalg.cond(A + 1e-08 * np.eye(views * self.n_approx))
+                if cond_A < 10:
+                    A_inv = spli.pinv(A + 1e-8 * np.eye(views * self.n_approx))
+                else:
+                    # A_inv = self._inverse_precond_LU(A + 1e-8 * np.eye(views * self.n_approx), pos="precond_A") # self._inverse_precond_jacobi(A + 1e-8 * np.eye(views * self.n_approx), pos="precond_A")
+                    A_inv = self._inv_best_precond(A + 1e-8 * np.eye(views * self.n_approx), pos="precond_A")
+            except spli.LinAlgError:
+                self.warning_message["LinAlgError"] = self.warning_message.get("LinAlgError", 0) + 1
+                try:
+                    A_inv = spli.pinv(A + 1e-07 * np.eye(views * self.n_approx))
+                except spli.LinAlgError:
+                    try:
+                        A_inv = spli.pinv(A + 1e-06 * np.eye(views * self.n_approx)) # , rcond=self.r_cond*minA
+                    except ValueError:
+                        self.warning_message["ValueError"] = self.warning_message.get("ValueError", 0) + 1
+                        return A_prev, g_prev
+            except ValueError:
+                self.warning_message["ValueError"] = self.warning_message.get("ValueError", 0) + 1
+                return A_prev, g_prev
+            # print("A_inv ",np.sum(A_inv))
+            # then calculate g (block-sparse multiplications in loop) using A_inv
+            for v in range(views):
+                for vv in range(views):
+                    A_inv[v * self.n_approx:(v + 1) * self.n_approx, vv * self.n_approx:(vv + 1) * self.n_approx] = \
+                        w[v] * w[vv] * np.dot(np.transpose(self.U_dict[v]), self.U_dict[vv]) + \
+                        lmbda * A_inv[v * self.n_approx:(v + 1) * self.n_approx,
+                                      vv * self.n_approx:(vv + 1) * self.n_approx]
+                g[v * self.n_approx:(v + 1) * self.n_approx, 0] = np.dot(w[v] * np.transpose(self.U_dict[v]), self.y_)
+            try:
+                # minA_inv = np.min(np.absolute(A_inv)) , rcond=self.r_cond*minA_inv
+                # here A_inv isn't actually inverse of A (changed in above loop)
+                if np.linalg.cond(A_inv) < 10:
+                   g = np.dot(spli.pinv(A_inv), g)
+                else:
+                    # g = np.dot(self._inverse_precond_LU(A_inv, pos="precond_A_1"), g)
+                    g = np.dot(self._inv_best_precond(A_inv, pos="precond_A_1"), g)
+            except spli.LinAlgError:
+                self.warning_message["LinAlgError"] = self.warning_message.get("LinAlgError", 0) + 1
+                g = spli.solve(A_inv, g)
+
+            # ========= check convergence =========
+
+            if learn_A > 2 and learn_w != 1:  # stop at once if only g is to be learned
+                break
+
+            if loop_counter > 0:
+
+                # convergence criteria
+                g_diff = np.linalg.norm(g - g_prev) / np.linalg.norm(g_prev)
+                A_diff = np.linalg.norm(A - A_prev, ord='fro') / np.linalg.norm(A_prev, ord='fro')
+                if g_diff < self.precision and A_diff < self.precision:
+                    break
+
+            if loop_counter >= n_loops:  # failsafe
+                break
+
+            # ========= update A =========
+            if learn_A == 1:
+                A = self._learn_A_func(A, g, lmbda, eta)
+            elif learn_A == 2:
+                A = self._learn_blocksparse_A(A, g, views, self.n_approx, lmbda, eta)
+
+            # ========= update w =========
+            if learn_w == 1:
+                Z = np.zeros((n, views))
+                for v in range(views):
+                    Z[:, v] = np.dot(self.U_dict[v], g[v * self.n_approx:(v + 1) * self.n_approx]).ravel()
+                w = np.dot(spli.pinv(np.dot(np.transpose(Z), Z)), np.dot(np.transpose(Z), self.y_))
+            loop_counter += 1
+        self.g = g
+        self.w = w
+        self.A = A
+        return A, g, w
+
+    def _inv_best_precond(self, A, pos="precond_A"):
+        J_1 = np.diag(1.0/np.diag(A))
+        Pre_J = np.dot(J_1, A)
+        Pm, L, U = spli.lu(A)
+        M = spli.inv(np.dot(L, U))
+        Pre_lu = np.dot(M, A)
+        # print("cond a", np.linalg.cond(A))
+        # print("cond Pre_J", np.linalg.cond(Pre_J))
+        # print("cond Pre_lu", np.linalg.cond(Pre_lu))
+        if np.linalg.cond(A) > np.linalg.cond(Pre_J) and np.linalg.cond(Pre_J) <= np.linalg.cond(Pre_lu):
+            P_inv = spli.pinv(Pre_J)
+            A_inv = np.dot(P_inv,  J_1)
+            self.warning_message[pos] = self.warning_message.get(pos, 0) + 1
+        elif  np.linalg.cond(Pre_lu) < np.linalg.cond(A):
+            P_inv = spli.pinv(Pre_lu)
+            A_inv = np.dot(P_inv,  M)
+            self.warning_message[pos] = self.warning_message.get(pos, 0) + 1
+        else:
+            A_inv = spli.pinv(A)
+        return A_inv
+
+    def _inverse_precond_jacobi(self, A, pos="precond_A"):
+        J_1 = np.diag(1.0/np.diag(A))
+        # J_1 = np.linalg.inv(J)
+        P = np.dot(J_1, A)
+        if np.linalg.cond(A) > np.linalg.cond(P):
+            P_inv = spli.pinv(P)
+            A_inv = np.dot(P_inv,  J_1)
+            self.warning_message[pos] = self.warning_message.get(pos, 0) + 1
+        else:
+            A_inv = self._inverse_precond_LU(A, pos=pos)
+
+        return A_inv
+
+    def _inverse_precond_LU(self, A, pos="precond_A"):
+        P, L, U = spli.lu(A)
+        M = spli.inv(np.dot(L, U))
+        P = np.dot(M, A)
+        if np.linalg.cond(A) > np.linalg.cond(P):
+            P_inv = spli.pinv(P)
+            A_inv = np.dot(P_inv,  M)
+            self.warning_message[pos] = self.warning_message.get(pos, 0) + 1
+        else:
+            A_inv = spli.pinv(A)
+        return A_inv
+
+    def predict(self, X, views_ind=None):
+        """
+
+        Parameters
+        ----------
+        X : different formats are supported
+            - Metriclearn_array {array-like, sparse matrix}, shape = (n_samples, n_features)
+              Training multi-view input samples. can be also Kernel where attibute 'kernel'
+              is set to precompute "precomputed"
+
+            - Dictionary of {array like} with shape = (n_samples, n_features)  for multi-view
+              for each view.
+            - Array of {array like} with shape = (n_samples, n_features)  for multi-view
+              for each view.
+            - {array like} with (n_samples, nviews *  n_features) with 'views_ind' diferent to 'None'
+
+        views_ind : array-like (default=[0, n_features//2, n_features])
+            Paramater specifying how to extract the data views from X:
+
+            - views_ind is a 1-D array of sorted integers, the entries
+              indicate the limits of the slices used to extract the views,
+              where view ``n`` is given by
+              ``X[:, views_ind[n]:views_ind[n+1]]``.
+
+              With this convention each view is therefore a view (in the NumPy
+              sense) of X and no copy of the data is done.
+
+        Returns
+        -------
+        y : numpy.ndarray, shape = (n_samples,)
+            Predicted classes.
+        """
+        check_is_fitted(self, ['X_', 'U_dict', 'K_', 'y_']) # , 'U_dict', 'K_' 'y_'
+        X , test_kernels = self._global_kernel_transform(X,
+                                                         views_ind=views_ind,
+                                                         Y=self.X_)
+        check_array(X)
+        return self.predict_mvml(test_kernels, self.g, self.w)
+
+    def predict_mvml(self, test_kernels, g, w):
+        """
+
+        Parameters
+        ----------
+        test_kernels : `Metriclearn_array` of test kernels
+
+        g : learning solution that is learned in learn_mvml
+
+        w : weights for combining the solutions of views, learned in learn_mvml
+
+        Returns
+        -------
+        numpy.ndarray, shape = (n_samples,) of test_kernels
+            Predicted classes.
+
+        """
+        views = len(self.U_dict)
+        # t = test_kernels[0].shape[0]
+        t = test_kernels.shape[0]
+        K = np.zeros((t, views * self.n_approx))
+        for v in range(views):
+            if self.nystrom_param < 1:
+                K[:, v * self.n_approx:(v + 1) * self.n_approx] = w[v] * \
+                                                                  np.dot(test_kernels.get_view(v)[:, 0:self.n_approx],
+                                                                         self.W_sqrootinv_dict[v])
+            else:
+                K[:, v * self.n_approx : (v + 1) * self.n_approx] = w[v] * test_kernels.get_view(v)
+
+        return np.dot(K, g)
+
+    def _learn_A_func(self, A, g, lmbda, eta):
+        # basic gradient descent
+        stepsize = 0.5
+        if stepsize*eta >= 0.5:
+            stepsize = 0.9*(1/(2*eta))  # make stepsize*eta < 0.5
+
+        loops = 0
+        not_converged = True
+        while not_converged:
+            A_prev = np.copy(A)
+            # minA = np.min(np.absolute(A)) , rcond=self.r_cond*minA
+            A_pinv = spli.pinv(A)
+            A = (1-2*stepsize*eta)*A + stepsize*lmbda*np.dot(np.dot(A_pinv, g), np.dot(np.transpose(g), A_pinv))
+
+            if loops > 0:
+                prev_diff = diff
+            diff = np.linalg.norm(A - A_prev) / np.linalg.norm(A_prev)
+            if loops > 0 and prev_diff > diff:
+                A = A_prev
+                stepsize = stepsize*0.1
+            if diff < 1e-5:
+                not_converged = False
+            if loops > 100:
+                not_converged = False
+            loops += 1
+
+        return A
+
+    def _learn_blocksparse_A(self, A, g, views, m, lmbda, eta):
+
+        # proximal gradient update method
+        converged = False
+        rounds = 0
+
+        L = lmbda * np.linalg.norm(np.dot(g, g.T))
+        # print("L ", L)
+
+        while not converged and rounds < 100:
+            # no line search - this has worked well enough experimentally
+            A = self._proximal_update(A, views, m, L, g, lmbda, eta)
+
+            # convergence
+            if rounds > 0:
+                A_diff = np.linalg.norm(A - A_prev) / np.linalg.norm(A_prev)
+
+                if A_diff < 1e-3:
+                    converged = True
+            A_prev = np.copy(A)
+            rounds += 1
+
+        return A
+
+    def _proximal_update(self, A_prev, views, m, L, D, lmbda, gamma):
+
+        # proximal update
+
+        # the inverse is not always good to compute - in that case just return the previous one and end the search
+        try:
+            # minA_inv = np.min(np.absolute(A_prev)) , rcond=self.r_cond*minA_inv
+            A_prev_inv = spli.pinv(A_prev)
+        except spli.LinAlgError:
+            try:
+                A_prev_inv = spli.pinv(A_prev + 1e-6 * np.eye(views * m))
+            except spli.LinAlgError:
+                return A_prev
+            except ValueError:
+                return A_prev
+        except ValueError:
+            return A_prev
+
+        if np.any(np.isnan(A_prev_inv)):
+            # just in case the inverse didn't return a proper solution (happened once or twice)
+            return A_prev
+
+        A_tmp = A_prev + (lmbda / L) * np.dot(np.dot(A_prev_inv.T, D), np.dot(np.transpose(D), A_prev_inv.T))
+
+        # if there is one small negative eigenvalue this gets rid of it
+        try:
+            val, vec = spli.eigh(A_tmp)
+        except spli.LinAlgError:
+            return A_prev
+        except ValueError:
+            return A_prev
+        val[val < 0] = 0
+
+        A_tmp = np.dot(vec, np.dot(np.diag(val), np.transpose(vec)))
+        A_new = np.zeros((views*m, views*m))
+
+        # proximal update, group by group (symmetric!)
+        for v in range(views):
+            for vv in range(v + 1):
+                if v != vv:
+                    if np.linalg.norm(A_tmp[v * m:(v + 1) * m, vv * m:(vv + 1) * m]) != 0:
+                        multiplier = 1 - gamma / (2 * np.linalg.norm(A_tmp[v * m:(v + 1) * m, vv * m:(vv + 1) * m]))
+                        if multiplier > 0:
+                            A_new[v * m:(v + 1) * m, vv * m:(vv + 1) * m] = multiplier * A_tmp[v * m:(v + 1) * m,
+                                                                                               vv * m:(vv + 1) * m]
+                            A_new[vv * m:(vv + 1) * m, v * m:(v + 1) * m] = multiplier * A_tmp[vv * m:(vv + 1) * m,
+                                                                                               v * m:(v + 1) * m]
+                else:
+                    if (np.linalg.norm(A_tmp[v * m:(v + 1) * m, v * m:(v + 1) * m])) != 0:
+                        multiplier = 1 - gamma / (np.linalg.norm(A_tmp[v * m:(v + 1) * m, v * m:(v + 1) * m]))
+                        if multiplier > 0:
+                            A_new[v * m:(v + 1) * m, v * m:(v + 1) * m] = multiplier * A_tmp[v * m:(v + 1) * m,
+                                                                                             v * m:(v + 1) * m]
+
+        return A_new
diff --git a/multimodal/tests/__init__.py b/multimodal/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/multimodal/tests/__pycache__/__init__.cpython-36.pyc b/multimodal/tests/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..1e99420542d93339ad39b73b8ba0b9c48bc020ac
Binary files /dev/null and b/multimodal/tests/__pycache__/__init__.cpython-36.pyc differ
diff --git a/multimodal/tests/data/__init__.py b/multimodal/tests/data/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/multimodal/tests/data/__pycache__/__init__.cpython-36.pyc b/multimodal/tests/data/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e5ff6f0d7034f50a62323bf1f47e69b0ec338479
Binary files /dev/null and b/multimodal/tests/data/__pycache__/__init__.cpython-36.pyc differ
diff --git a/multimodal/tests/data/__pycache__/get_dataset_path.cpython-36.pyc b/multimodal/tests/data/__pycache__/get_dataset_path.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..99a01a1c57ae74bcb18ca8377f9c799fc5fb125a
Binary files /dev/null and b/multimodal/tests/data/__pycache__/get_dataset_path.cpython-36.pyc differ
diff --git a/multimodal/tests/data/dec_iris.npy b/multimodal/tests/data/dec_iris.npy
new file mode 100644
index 0000000000000000000000000000000000000000..8730186b3909d230155373402ce139497d6494ac
Binary files /dev/null and b/multimodal/tests/data/dec_iris.npy differ
diff --git a/multimodal/tests/data/get_dataset_path.py b/multimodal/tests/data/get_dataset_path.py
new file mode 100644
index 0000000000000000000000000000000000000000..f533aa84910b5ee415db222072d88b66620754ba
--- /dev/null
+++ b/multimodal/tests/data/get_dataset_path.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+
+
+from __future__ import print_function, division
+
+import os
+
+
+def get_dataset_path(filename):
+    """Return the absolute path of a reference dataset for tests
+
+    - Input parameter:
+
+    :param str filename: File name of the file containing reference data
+        for tests (which must be in ``skgilearn/tests/datasets/``)
+
+    - Output parameters:
+
+    :returns: The absolute path where the file with name **filename** is stored
+    :rtype: str
+
+    """
+
+    datasets_path = os.path.dirname(os.path.abspath(__file__))
+    return os.path.join(datasets_path, filename)
diff --git a/multimodal/tests/datasets/X.npy b/multimodal/tests/datasets/X.npy
new file mode 100644
index 0000000000000000000000000000000000000000..f3d3a8d60d2b64d576bff5e5a8f6df787bbcf27c
Binary files /dev/null and b/multimodal/tests/datasets/X.npy differ
diff --git a/multimodal/tests/datasets/__init__.py b/multimodal/tests/datasets/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/multimodal/tests/datasets/__pycache__/__init__.cpython-36.pyc b/multimodal/tests/datasets/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a2c8d7aba3ba76053fed1475db7036e08f9fc6bd
Binary files /dev/null and b/multimodal/tests/datasets/__pycache__/__init__.cpython-36.pyc differ
diff --git a/multimodal/tests/datasets/__pycache__/get_dataset_path.cpython-36.pyc b/multimodal/tests/datasets/__pycache__/get_dataset_path.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..72e442b9eaa3fe3d85f23eac5c5fcf430c5cedf9
Binary files /dev/null and b/multimodal/tests/datasets/__pycache__/get_dataset_path.cpython-36.pyc differ
diff --git a/multimodal/tests/datasets/a.csv b/multimodal/tests/datasets/a.csv
new file mode 100644
index 0000000000000000000000000000000000000000..06b68a314070820cd1a1a5f2f3d52c8e21e3fc42
--- /dev/null
+++ b/multimodal/tests/datasets/a.csv
@@ -0,0 +1,144 @@
+5.902704967182412332e+01 3.269623505767488059e+00 -1.000498277669793046e+00 2.043168347281115516e+00 -2.224389779746056561e+00 -1.617229398617517688e+00 1.107848428235957305e+00 3.608218193420686393e-01 1.152507950352235286e-02 -4.338289847711910840e-01 -3.401275733838883819e-01 5.689215050599324419e-01 -3.097027278861538280e-01 -2.833415285070417511e-02 -1.040460600700565397e-01 1.668967666699228969e-01 -3.655751626048145235e-03 -6.152690891532206635e-02 -5.079790817042751511e-02 1.036981279680113943e-01 2.487505367640383946e-03 1.864175632519755790e-03 1.337781488808269248e-01 -3.737769550000364510e-02 -2.038035250050124897e-02 3.317323145375998539e-02 1.105579576228833509e-02 -2.141248002026539636e-03 2.045836320046340653e-02 1.007175750097230984e-03 4.439784802908462377e-03 -1.685676718110984754e-02 -4.656950705253447634e-03 1.815515545207396341e-03 1.169220407981105304e-02 1.148060084547841177e-02 -5.564095072554813524e-03 -2.677025192730993902e-03 2.439087679672048183e-03 4.431950574244813632e-03 5.046338254006689852e-03 2.213997132808996540e-03 5.193463389783090946e-03 5.660849797375821536e-03 2.464094766111543135e-03 2.981491338960395952e-03 -5.039407332605000921e-04 1.104232973668697177e-04 -6.429725104459368306e-04 1.360132763429056308e-03 1.775117901592001245e-03 -1.312927549033827924e-03 -1.444024689562493609e-03 8.961119087039794671e-04 -2.864247778127231589e-04 -8.464465279258864307e-04 -9.756760977634742647e-04 -3.647752939562452941e-05 4.278344264430664436e-04 -6.169017101996530960e-04 1.079130082100790552e-03 6.190047939003729977e-04 2.818281733047701311e-05 9.609878685960862848e-04 2.295267939094641546e-04 5.580392661284996371e-04 -5.315420357003356735e-04 3.105891209168311308e-04 -3.727824087911550568e-04 -4.065618679394270886e-04 1.284437493004002675e-04 5.971855691692778257e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+3.269623505767488059e+00 2.873950684877809536e+01 1.371677256754226581e+00 8.482474478023823705e-01 6.717292010840882899e-01 5.897599948694107752e-01 -1.777568801852293656e-01 6.776515627863651803e-01 4.344369957781281655e-01 -2.526889519151093166e-01 8.326195591152033004e-02 -7.520827094709188476e-02 6.943652510527546784e-02 1.362615960812576521e-01 2.427410779932691332e-01 3.179616379126863901e-01 9.336235545642160294e-02 2.218548551035433303e-02 -1.018694147650027332e-01 -1.462219314022686692e-02 -8.515627328232243387e-02 -1.667165495639554018e-02 -6.646628461108314290e-02 1.002835273138996501e-02 -1.038734072738129594e-02 -3.065052472017425894e-02 -1.163394565331549689e-02 4.204954623394343421e-02 -4.170448371326857667e-02 1.087817630512656619e-02 -1.732046916316509104e-03 1.203341128201278372e-02 9.836552638151495765e-04 9.960975155926839533e-03 3.138547620051086642e-03 -7.358525800900077470e-03 1.092238113838667809e-03 -2.239600529002745265e-03 4.752863203526146857e-03 -5.405994964182561820e-03 -8.217651829980714660e-04 -2.819862669663264314e-03 1.845953114194293565e-03 -3.546134342665884214e-03 1.198254889496519352e-03 -1.700863377455956189e-03 -2.232496914427477795e-03 3.259613064902853997e-04 -5.442695775069143630e-04 1.975837796380677802e-03 -2.876295397663526307e-03 -9.324694243844581371e-04 -7.700508081800569362e-04 -4.251240495769562327e-04 2.447128421590097488e-04 1.229124310444356695e-03 1.206556055791778166e-04 -5.982927810114174825e-04 1.164977902888665536e-04 3.934508564326714671e-04 -3.678931779718224424e-04 -8.986558550858034277e-04 6.643350647300518864e-04 -8.422135278248198435e-04 2.382580677210656544e-05 -3.531540171926622791e-04 2.682198201414081164e-04 -6.816781157892096155e-04 5.838583736189928734e-04 8.236515472408407828e-05 -1.659019626392819633e-04 -1.190544512946899207e-04 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-1.000498277669793046e+00 1.371677256754226581e+00 1.237996381768079956e+01 5.630295813963484552e-02 -1.856290480693051359e-01 -3.214854019179514966e-01 -3.286176203232473170e-01 -1.370422069996508341e-01 1.045631150388001002e-01 -3.344536386260400507e-01 1.741287783241217968e-01 -2.664143284336018547e-01 -4.896028240900517081e-02 2.232688657510112362e-02 1.157145018649991253e-01 1.511613113560522081e-01 1.591003650484551848e-02 2.658138484315402880e-02 -5.248662567188168253e-03 3.464263962777509864e-02 -1.232921929371953718e-02 -9.134557837192599714e-03 -2.095143721411957347e-03 -3.577813510545549847e-03 -2.643522848780513121e-02 -9.681710450184304387e-03 -1.403434944757496645e-02 1.104551920852550433e-02 -2.469024459754528070e-02 1.663954052897647848e-02 -5.035082209166950006e-03 5.605064729526872819e-03 -1.215676722061089475e-03 2.807330023336020424e-03 8.827408786928744519e-03 5.929448120624693317e-04 -4.846002188642694968e-04 -2.030774166295079650e-03 4.196060681700150910e-03 -3.109451772478799732e-03 1.415284352986231333e-03 -1.889606872391593915e-03 1.219009408413760124e-03 -1.837120395731124331e-03 -6.057768759081008604e-04 -2.839874995105646149e-04 -1.557658019967413966e-03 5.622254273715385133e-04 -8.241478693521974439e-04 1.502217389674169153e-03 -1.202958423119133329e-03 -6.476197422369884453e-04 -1.368272532333382691e-03 -1.019984835938101114e-03 3.933241409104297720e-04 8.560688600932326144e-04 -5.614850361310474052e-06 -5.169084691654691208e-04 1.654993333653115880e-04 3.046880790726673124e-04 -2.135306201034897398e-04 -5.162774997913627999e-04 4.125334398601202533e-04 -2.122226495076474830e-04 8.274816585958972204e-05 -2.181493946457583211e-04 5.357926619562840943e-05 -2.982760727724785696e-04 3.905792364323644656e-04 -3.053091997332592019e-05 5.285111519227582232e-05 -1.028831685821939240e-04 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+2.043168347281115516e+00 8.482474478023823705e-01 5.630295813963484552e-02 9.096424665111690899e+00 -2.536848762076694408e-01 1.125088407078882607e-01 6.210233011054276941e-01 4.853613861574486710e-01 -1.792176994200362916e-01 2.780445565390611898e-02 6.493542518831344479e-01 -2.386087842797707803e-01 1.225924285017379323e-01 1.439259958948584384e-01 2.265742490552618282e-01 3.395860575355240385e-02 -1.319189431473041449e-03 -1.743981455678999442e-03 -2.460260664839713021e-02 -4.785921068039373366e-02 -1.122643117203653285e-02 -3.568183323302856653e-03 -9.157885447170877047e-02 3.430652428805573001e-02 3.498016579144666745e-03 -3.151761163000381394e-02 -1.202845038385303746e-02 -7.458392146961832780e-03 -3.382011397184808005e-02 2.268132337456118738e-02 -1.953464482207950201e-02 2.246783238280236975e-02 4.576317146986056024e-03 6.584277012313372686e-04 -1.042763728856110385e-02 -6.769232956996613058e-03 -1.376551906797528506e-03 -8.247915704885734394e-04 5.092052409944678587e-03 -4.999359320387679155e-03 -3.325428224714398195e-05 -9.544811823055334332e-05 -1.535916539039981071e-03 -2.922443301953129863e-03 -1.565931142795850021e-03 -1.922747140009082494e-03 -1.587669058724534669e-03 -4.004820107785562349e-04 -1.574875083945025079e-03 1.449883573711475688e-04 -2.811842460202660862e-03 -2.902965304598182153e-04 5.021273242985428795e-04 -2.414618813575055301e-03 1.264376639758373337e-04 1.797333339895381939e-03 1.109398921968313047e-03 -5.955108024460374020e-05 -9.319197974060024274e-06 -2.081231757659247837e-05 -1.166760509179642870e-03 -1.006034125247487997e-03 5.069567595259139806e-04 -5.895327056902730694e-04 -2.587150484819884084e-04 -4.634401566693654908e-04 3.674312314872735631e-04 -2.448136261292540509e-04 6.904644320631980530e-04 4.044698707275740470e-04 -2.093652980207591518e-04 -2.030131803318480489e-04 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-2.224389779746056561e+00 6.717292010840882899e-01 -1.856290480693051359e-01 -2.536848762076694408e-01 4.421270030412231655e+00 3.833489585804445987e-01 -2.165445035046260347e-01 2.731920882388166327e-01 -1.945061705966302790e-02 4.201754769936406203e-02 6.167707962710364256e-02 -1.072760989612006749e-01 1.305190989575184535e-01 2.326801245543231450e-02 1.425708935287473000e-02 3.827659815208730998e-02 1.150418195994163707e-02 2.496776811789163872e-02 -2.123331875180941436e-02 -2.949796384868130436e-02 -2.769334623211424512e-02 -2.084246841584775495e-02 -5.449407848789514924e-02 4.669923561202599452e-03 -5.450203698759430344e-03 -3.004664780039125512e-03 -6.122062869122686310e-03 6.233805808468058374e-03 -1.059148907796189122e-02 -5.415285995096942064e-03 -1.110247762279015400e-02 8.572812394065364508e-03 7.128708546968626518e-04 2.996796720101171322e-03 -1.741763489132111238e-03 -6.088494511819885803e-03 4.008696944312153607e-03 1.222558514807537636e-03 -2.218909087883145451e-03 -3.208002769059118634e-03 -4.632979544627331800e-04 -1.617040938609345772e-03 -1.546519078914351504e-03 -3.049026376504262482e-03 -2.268542944204944393e-03 -1.399779265698711380e-03 -7.249757762839474868e-04 -1.886377365029176978e-04 -4.308194383398527335e-04 6.515212322346771931e-04 -1.402266046738543043e-03 4.319301770724475451e-04 1.020509572499054277e-04 -6.796400945814878424e-04 4.611333450761655769e-04 7.396731215819471374e-04 6.127183030049979887e-04 -3.875612569340887762e-04 -1.046716921732588962e-04 3.382293471064566994e-04 -6.319629589999666962e-04 -5.205848245358031751e-04 -2.277651550409889435e-04 -3.012706984038758767e-04 -1.229826903864710236e-04 -3.414273330987119119e-04 2.273362831969488219e-04 -1.444089206042048783e-04 3.502878522383107000e-04 2.218242144966101937e-04 -4.713838284201491395e-05 -8.863960017866568026e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-1.617229398617517688e+00 5.897599948694107752e-01 -3.214854019179514966e-01 1.125088407078882607e-01 3.833489585804445987e-01 1.913695862890308907e+00 -2.913760334358446724e-01 4.157229567063065456e-01 -5.666401363447104433e-02 -6.696079659257146884e-03 4.842902154225438294e-02 -1.161629731783638403e-01 8.263368302807722054e-02 4.732740217156439916e-02 1.202511649782939673e-01 1.749076915000560145e-02 3.586971688656934165e-02 8.134442904583346137e-03 -5.452149313153071067e-03 -1.826690131046378868e-02 -2.549651563684083438e-02 1.519359913114586838e-03 -4.819508472752650347e-02 6.803095351806253722e-03 1.413744038755732112e-03 -1.267078478199945221e-02 -8.275060309749398754e-03 1.087132752046652759e-02 -1.981364156951569491e-02 5.835644220262561736e-03 -3.886638133343893204e-03 7.079470819375142422e-03 -2.979997323784594368e-04 3.454862717470914011e-04 2.063169313516587263e-04 -3.435342426303449222e-03 2.463836144642395781e-03 -1.790756573481944343e-04 1.550999229382867906e-03 -3.578119726556800665e-03 -2.335462239830079795e-03 -1.556059883643280987e-03 -9.883812527745642826e-04 -3.081589754149671560e-03 -3.771722094229367286e-04 -1.269988977558030724e-03 -5.734387254282873264e-04 3.694338794210490153e-04 -3.640846215850657462e-05 2.717591615092382188e-04 -1.116892764687757314e-03 8.581806776768562540e-05 -2.404760466676691521e-04 -5.551853017720563771e-04 1.994072185424956747e-04 6.421932874547304788e-04 2.268459615907873707e-04 -2.223383411733124862e-04 -1.154801572722961508e-04 3.883024271859300590e-04 -3.930092242162990554e-04 -4.734285813143009167e-04 2.751146519578616346e-04 -5.347046413702953018e-04 -7.542974327072151297e-06 -2.967869383235858236e-04 2.297955253836822293e-04 -3.230854200382767580e-04 2.919044781840080178e-04 8.333152384976322876e-05 -1.263008079194895969e-05 -5.500167373470180108e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.107848428235957305e+00 -1.777568801852293656e-01 -3.286176203232473170e-01 6.210233011054276941e-01 -2.165445035046260347e-01 -2.913760334358446724e-01 1.565842740555137391e+00 -2.718110954606955398e-01 -2.064771741008611730e-02 1.499066463870979693e-01 1.207251220858855147e-02 -1.002104840612136295e-03 5.285108954674297248e-03 -3.342228005487063967e-03 -5.271049712093802547e-02 -4.156031192112023126e-02 1.486084946736747001e-02 -1.360019803585902536e-03 2.434158737318100826e-02 -1.225357831199443527e-03 1.885404382326395095e-02 -5.423311931582636657e-03 4.046117822688864198e-05 -7.640766951509455653e-03 -6.524746431913095238e-03 5.801337548866239945e-03 -1.968349866307412838e-03 -1.804186365578614240e-02 7.183562127396692261e-03 -3.678048081015129289e-05 -7.442906987078637578e-03 -6.222918040796957255e-05 -1.296914580242981624e-03 -3.988051704029558879e-03 -3.892401569001677241e-04 2.757588195258237149e-03 1.897077921357448612e-04 9.668611821740146343e-06 -6.229138107702427221e-04 1.064672622853200996e-04 2.389085342522498042e-04 5.701075533514868461e-04 -1.204375879828245132e-03 -1.304031393580420788e-04 -1.767009884975181185e-03 1.728603701557353224e-04 9.973754860486884640e-05 4.116716833139327900e-05 -4.365667882573547823e-04 -3.607238173240592577e-04 3.813165935074124663e-04 3.407847379157533585e-04 -1.415973275824117239e-04 -7.524497350096670404e-04 2.315153918241995594e-04 1.385093346708970183e-04 2.544403137931774061e-04 -1.830521657478542726e-05 -1.177644575281913038e-05 2.446907488225337826e-05 -2.110517031258192554e-04 1.737038247731891851e-05 -1.930093891928036170e-04 2.420012816257650695e-04 -4.332019741084994040e-05 -6.412510133169684490e-05 -2.786727367576333878e-05 1.953436422973198801e-04 4.301171552185920234e-05 5.206229048447722196e-05 8.274412408941154951e-05 -3.448450156663372961e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+3.608218193420686393e-01 6.776515627863651803e-01 -1.370422069996508341e-01 4.853613861574486710e-01 2.731920882388166327e-01 4.157229567063065456e-01 -2.718110954606955398e-01 1.249130796746510175e+00 -4.557374147472958992e-02 -4.045135956455547799e-02 1.252956148898498323e-01 -3.542441939939956763e-02 7.001846806117750788e-02 3.342719240190829971e-02 1.116010369903142480e-01 8.879749127415932988e-03 2.480908584158345725e-02 4.912849170366320739e-02 -1.446677289120288680e-02 -1.876475144628519595e-02 -2.408332006783253307e-02 -6.151214655099674494e-03 -5.601706627653994797e-02 9.758737937134668444e-03 -5.845510263256864167e-03 -1.054801834272182069e-02 -5.383678839042574382e-03 1.231971470958758037e-02 -1.825257206607684701e-02 1.039542962707836480e-03 -9.027567607426081886e-03 8.194722878461764906e-03 6.095460550841439330e-04 1.772332844573762650e-03 -3.216541433414988205e-04 -4.625688100176229486e-03 3.821375270969237185e-03 3.477935149882801515e-04 2.427763078060738637e-04 -3.925613343147412067e-03 -2.159590273600335824e-03 -1.742340975166559907e-03 -1.190629586118678671e-03 -3.588489929163733196e-03 -9.064525775506965862e-04 -1.614354989351932980e-03 -9.737596646597454671e-04 3.315852197161462279e-04 -3.279326353363606330e-04 5.918975895713998129e-04 -1.426996630759077818e-03 1.756683960359683148e-04 -1.933313376884351128e-04 -7.321484812386399319e-04 3.328794623415409293e-04 8.212394439028473979e-04 4.343489815629223414e-04 -3.500987158077584118e-04 -1.130116428412040075e-04 4.087422435621750646e-04 -5.704366919558491846e-04 -5.868538177124427071e-04 1.373423637410903220e-04 -5.062969973488864669e-04 -5.608414806902770175e-05 -3.675312950472417266e-04 2.587669698167485645e-04 -3.000441450680023858e-04 3.837449460455872913e-04 1.509100524129397417e-04 -3.152033391106525063e-05 -8.601380473427661653e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.152507950352235286e-02 4.344369957781281655e-01 1.045631150388001002e-01 -1.792176994200362916e-01 -1.945061705966302790e-02 -5.666401363447104433e-02 -2.064771741008611730e-02 -4.557374147472958992e-02 4.637145342907503243e-01 5.150007871198930554e-03 -1.285401762570439685e-02 1.721261163546915218e-02 -1.112590841527301422e-02 -6.596789770022610383e-03 -1.828716004000847245e-02 1.554972873255434235e-02 -3.812996928884731965e-04 -4.432512306554896482e-03 -6.739088913497015727e-03 -1.999037351447509065e-03 3.066288581407881388e-03 4.459711615308573074e-03 6.445268259478287472e-03 2.340847144843778097e-03 4.552451457637894487e-03 -1.678123241249541509e-03 3.280729926455470524e-03 2.626359832329390725e-03 3.502283021502295736e-03 -1.059984577356706207e-03 4.138797813790392327e-03 -1.894362267501049652e-03 7.793195996889062775e-04 8.079623681966091207e-04 -1.378566053456487840e-03 -1.697646610893527037e-04 -8.551983336113985366e-04 -1.697483782648856975e-05 -5.964808838322232642e-05 1.146886932017502411e-03 1.270617692901698510e-04 3.995590159973253864e-04 5.505907578660442251e-04 1.050607567543126704e-03 8.732580326638342228e-04 2.639236607369127205e-04 2.572116940188130959e-04 -1.619982327604379342e-04 2.312364450064562788e-04 -1.910278405466634126e-04 2.209769696867038241e-04 -1.431669959726685599e-04 2.596861749881037261e-04 4.178139323991092078e-04 -2.066262633278977189e-04 -2.548774159884048345e-04 -1.454051844275693344e-04 1.532391806602044623e-04 1.424301882282700711e-05 -1.652235397087890671e-04 1.773999476661630855e-04 1.423947592396369595e-04 1.785027403686350716e-05 4.638409297047827546e-05 7.346248575918590066e-06 1.186915534952127222e-04 -4.531752882718998512e-05 2.966063375459098217e-05 -1.199678218197835426e-04 -3.192562675352125106e-05 -4.080204655220601227e-05 3.304973508223608302e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-4.338289847711910840e-01 -2.526889519151093166e-01 -3.344536386260400507e-01 2.780445565390611898e-02 4.201754769936406203e-02 -6.696079659257146884e-03 1.499066463870979693e-01 -4.045135956455547799e-02 5.150007871198930554e-03 3.436179558384725441e-01 1.432219079038020253e-02 -3.961171148952239741e-03 8.678947118464454616e-03 -2.953911262524450826e-02 -1.626865111680820403e-02 -3.699919738645729428e-02 -5.432211920875000761e-04 -5.135710204460787638e-03 2.231278768444570730e-03 -1.697376073194466900e-02 8.317856024104265666e-03 5.182299787189210104e-03 1.800238640603106814e-03 5.114547478684042595e-03 9.445036776675377194e-03 -3.832992557475448092e-03 4.779714928986471961e-03 -3.913517752860052983e-03 5.217861035799904561e-03 -1.170680844491593956e-03 3.138880234090649351e-03 -1.142582302888416447e-03 7.919725769418088342e-04 -1.613149844466397554e-03 -3.805846766344923599e-03 -1.225351861585471599e-04 -1.036607601927765597e-03 3.018486008669335344e-04 -1.570362131730773539e-04 1.106434334517699021e-03 -6.714113394380810630e-04 7.806432925114898834e-04 -3.132000358591949772e-04 8.115175016731108115e-04 6.459289791180008013e-04 1.430034794857577385e-04 5.527219489132634452e-04 -1.286426693722703596e-04 3.320717201368599469e-04 -7.345176618270439755e-04 3.675661138048835373e-04 9.318588992216788160e-05 5.430124259517040818e-04 2.751264187707396511e-04 -2.479188002071436238e-04 -2.713262949381180472e-04 -4.173524619451913662e-05 2.734508069355598816e-04 -5.008977114734306516e-05 -1.550726952441152035e-04 8.576108740456644822e-05 1.769664599625240956e-04 6.358159169554963250e-06 1.949052086431426307e-05 -2.164654935748751868e-05 9.583075395224556134e-05 -1.400937237103345347e-05 7.498769918492608052e-05 -1.365032371695977130e-04 4.579691092921398039e-06 -3.923515742795041572e-05 3.715300410740473907e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-3.401275733838883819e-01 8.326195591152033004e-02 1.741287783241217968e-01 6.493542518831344479e-01 6.167707962710364256e-02 4.842902154225438294e-02 1.207251220858855147e-02 1.252956148898498323e-01 -1.285401762570439685e-02 1.432219079038020253e-02 2.902762716067964766e-01 -8.438899359980883697e-02 4.628031624620187329e-02 4.927628912603573136e-02 4.277614148639907743e-02 1.922913982762273523e-05 -6.468563172826152785e-03 2.685242336957238164e-04 -1.094583357494958824e-02 -1.321465060076825945e-02 -5.700953812953500448e-03 -9.283845036390253212e-04 -2.407251378757491178e-02 1.102772741750784012e-02 1.468284387003779322e-03 -7.437651582550317865e-03 -2.467034554263061990e-03 1.165490373517529118e-03 -7.737024968229888958e-03 3.935813628679790330e-03 -3.847482166206612000e-03 5.254733595653720071e-03 1.862725404281342315e-03 7.961596556436208345e-04 -3.381147508871534466e-03 -2.797947901200110478e-03 -2.662110429142099219e-04 1.469506847186703203e-04 7.935364213870098804e-04 -1.038256781440506475e-03 3.189870671397571604e-05 -1.062239593747478598e-04 -3.595895568218276895e-04 -4.783972182669784731e-04 -3.436107147687976959e-04 -4.973829027087110473e-04 -3.086822674090106350e-04 -2.510151805379643472e-04 -2.977317871531469438e-04 4.615267774400981374e-05 -7.656863575997993956e-04 -9.786264130944935806e-05 3.274027618828997260e-04 -4.197429973490441387e-04 -5.012709582171678201e-05 4.080012368100587711e-04 2.906118615715132161e-04 2.564591034887766358e-05 -1.700797368805104972e-05 -6.726275318846428248e-05 -2.809209171415647205e-04 -2.464440007424115175e-04 8.286101392258520351e-05 -1.554134880401983939e-04 -7.932729298327602168e-05 -8.974607311011348083e-05 1.002567567476807784e-04 -5.748580610106335069e-05 1.441556942788149449e-04 1.175654119172519119e-04 -8.561841165571346991e-05 -4.330943989420773821e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+5.689215050599324419e-01 -7.520827094709188476e-02 -2.664143284336018547e-01 -2.386087842797707803e-01 -1.072760989612006749e-01 -1.161629731783638403e-01 -1.002104840612136295e-03 -3.542441939939956763e-02 1.721261163546915218e-02 -3.961171148952239741e-03 -8.438899359980883697e-02 1.768430324280996779e-01 -3.899251963635186335e-02 -3.414773119987830896e-02 -3.217796380248081484e-02 -2.823397722306164592e-03 3.152655878656152478e-04 -1.257949671362645558e-04 3.269201407881622366e-04 5.336352268570332576e-03 6.368852743297139483e-03 1.705485984162050756e-03 1.768668422418588626e-02 -2.712575607967017497e-03 2.308675312508146581e-03 3.729649370726399477e-03 4.157051336719028677e-03 8.902382493743725075e-04 6.471736187121623386e-03 -3.513027018854808503e-03 4.326282700659268322e-03 -3.906259042023865757e-03 -2.457840888164765565e-04 1.775996784108843185e-04 8.048355178005666940e-04 1.346486239837061056e-03 -4.664427625857726499e-04 -2.892238503284062278e-06 -5.856131947498137338e-04 1.304291284961128653e-03 8.801562947264802755e-05 3.750214757501575983e-04 5.394567568466616296e-04 9.541493320946346172e-04 7.835072610342217088e-04 4.315810209107502366e-04 2.918155046263512954e-04 2.491262395017059157e-05 2.762509129655807590e-04 -1.267070661924935080e-04 5.384795007120238373e-04 -3.271117092449631671e-05 1.321407559272677948e-05 4.970276305833691956e-04 -1.106648806819851522e-04 -3.797680244812201111e-04 -2.339368899992294972e-04 8.159629996139466761e-05 1.726961695952565002e-05 -7.952983547860629154e-05 2.573933471591039477e-04 2.230769955928124323e-04 -4.145263907774513271e-05 1.177414519606735405e-04 4.246329822297799291e-05 1.239391176102544554e-04 -8.385444860698813255e-05 5.527428574179398175e-05 -1.516473257118092558e-04 -7.963193715660975822e-05 1.695685455514628240e-05 4.318576738557640984e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-3.097027278861538280e-01 6.943652510527546784e-02 -4.896028240900517081e-02 1.225924285017379323e-01 1.305190989575184535e-01 8.263368302807722054e-02 5.285108954674297248e-03 7.001846806117750788e-02 -1.112590841527301422e-02 8.678947118464454616e-03 4.628031624620187329e-02 -3.899251963635186335e-02 7.222433770811745313e-02 2.565862624801900499e-02 1.678789670491558778e-02 -5.233032472961582533e-03 5.311850598124766851e-04 3.407035518444877838e-03 7.452372437977786710e-04 -4.883406110288172550e-03 -5.254073264472882560e-03 -2.237504799997298843e-03 -1.491241780836166306e-02 1.310488642117836828e-03 -2.995801584065712220e-03 -9.868457998174734024e-04 -3.154848816802627298e-03 -9.980837406088067902e-04 -3.693356072824367266e-03 1.172514931775789056e-03 -3.919903763877725586e-03 2.818155249548921597e-03 1.330698958537328942e-04 -1.520044735293811153e-04 -4.614322867348142330e-04 -1.203051260551521668e-03 7.955879039841102758e-04 1.989652585234439209e-04 -2.457396359884059548e-05 -1.073323093580156587e-03 -1.103409000641131292e-04 -3.434447021190476759e-04 -5.991738730812662915e-04 -8.629054732455797338e-04 -8.215441441424524297e-04 -3.644333711806019326e-04 -2.048706643014099505e-04 -1.675822531678407452e-05 -2.125490400953882733e-04 1.262041168006230606e-04 -3.920820873962633808e-04 1.044093377326587571e-04 -1.715927866854435196e-05 -3.789489734872249584e-04 1.284930642570246732e-04 2.851203361386337267e-04 2.076412641980796188e-04 -9.178673244065273023e-05 -2.730721226785900552e-05 9.161435896423319634e-05 -2.163422852775889966e-04 -1.732638640541950530e-04 -2.232957947620680493e-05 -8.050780145039284730e-05 -3.664437981076439262e-05 -1.101407510512106974e-04 6.845626704766919846e-05 -3.261142130610199464e-05 1.191873877090309593e-04 6.758111595659033351e-05 -3.869676706766434773e-06 -3.395330272608326056e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-2.833415285070417511e-02 1.362615960812576521e-01 2.232688657510112362e-02 1.439259958948584384e-01 2.326801245543231450e-02 4.732740217156439916e-02 -3.342228005487063967e-03 3.342719240190829971e-02 -6.596789770022610383e-03 -2.953911262524450826e-02 4.927628912603573136e-02 -3.414773119987830896e-02 2.565862624801900499e-02 6.836746802291424063e-02 1.419532268498854485e-02 2.721940245984399466e-03 -2.167948009631868057e-03 -1.683975365558925243e-03 -1.480526137990068394e-03 1.434582774241320796e-03 -6.308783566505484115e-03 -1.763572665396114869e-03 -9.956831208208730827e-03 2.523305760618918645e-04 -4.582001305302226685e-03 6.552578009697521035e-05 -3.390383489985308255e-03 -3.887910968207112638e-04 -3.806534210614214743e-03 1.480662272274278204e-03 -3.656522318157191722e-03 2.627394097146271406e-03 7.497098243030908332e-05 9.340403032652623170e-05 3.092930730160475621e-04 -8.542441700414416363e-04 5.048392763196367502e-04 1.051826620809544026e-04 1.914005644956672143e-04 -1.012554869924134826e-03 3.510788574865323025e-04 -3.174650742862805881e-04 -3.360360859534597421e-04 -7.388312323848840284e-04 -8.453221026733371939e-04 -2.969801118790762811e-04 -3.093626214115018287e-04 -3.043075405758708762e-05 -2.809577030003918138e-04 2.645706306226516965e-04 -4.229057390909805869e-04 4.047941292383729067e-05 -1.015083499359514634e-04 -3.875181483215191821e-04 1.566389073083454932e-04 3.055058350044929818e-04 1.929761524776394592e-04 -1.284935877549385551e-04 1.151653329240882028e-05 8.610784700552452273e-05 -1.942009767699294563e-04 -1.701672803557519997e-04 -2.339445347895309526e-05 -4.502432519145212989e-05 -3.144613366287266401e-05 -1.013235960799303072e-04 4.898765424359943798e-05 -2.586770301034950381e-05 1.313762786545367523e-04 5.956652853712097594e-05 -2.209112295969237752e-06 -3.887832644184033862e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-1.040460600700565397e-01 2.427410779932691332e-01 1.157145018649991253e-01 2.265742490552618282e-01 1.425708935287473000e-02 1.202511649782939673e-01 -5.271049712093802547e-02 1.116010369903142480e-01 -1.828716004000847245e-02 -1.626865111680820403e-02 4.277614148639907743e-02 -3.217796380248081484e-02 1.678789670491558778e-02 1.419532268498854485e-02 7.170629998521166337e-02 1.675986931908048441e-02 8.890097930257447045e-03 5.583956898578970943e-03 -2.506484762942387447e-03 -3.131062199809471480e-03 -6.812197597610415885e-03 1.604774149897017976e-03 -1.225172775533825940e-02 3.189014178565000891e-03 -4.200111374742632837e-04 -4.625548605118727855e-03 -2.326149180753861109e-03 3.021016440910324809e-03 -6.881193918559826150e-03 3.485438309444381568e-03 -1.074336088082343890e-03 2.281230461455415522e-03 1.401400241086509988e-04 2.409052822526026362e-04 -9.901314602646511234e-06 -8.603542599898057492e-04 2.230924985282982983e-04 -1.993433369347716603e-04 9.891698183537053594e-04 -1.039702984742725718e-03 -5.366925073934080683e-04 -3.711606177586931486e-04 -9.169001515543528144e-05 -7.224265795527657751e-04 1.268031949826734926e-04 -3.062120751659442577e-04 -2.281221370976299428e-04 1.275194464743666838e-04 -6.342131327856133773e-05 1.245803107783006395e-04 -3.682204463024329934e-04 -8.712116724412810066e-05 -1.027671751514216465e-04 -2.014033601247824348e-04 1.540805575801159984e-05 2.143513887402765088e-04 4.442715404391582714e-05 -4.514887168860532579e-05 -1.559956736529611996e-05 6.997906267666731146e-05 -9.858476835379464140e-05 -1.547083441551266078e-04 1.505219805254211696e-04 -1.638680526033623234e-04 3.011594955519207173e-06 -7.304101701076040230e-05 6.258740786674220011e-05 -1.082117074699512452e-04 9.118261053711964898e-05 1.663750071311323125e-05 -1.094373204478781997e-05 -1.840132810766053652e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.668967666699228969e-01 3.179616379126863901e-01 1.511613113560522081e-01 3.395860575355240385e-02 3.827659815208730998e-02 1.749076915000560145e-02 -4.156031192112023126e-02 8.879749127415932988e-03 1.554972873255434235e-02 -3.699919738645729428e-02 1.922913982762273523e-05 -2.823397722306164592e-03 -5.233032472961582533e-03 2.721940245984399466e-03 1.675986931908048441e-02 4.503472989581848440e-02 6.436044456347588330e-03 -6.755299561612848223e-04 -6.207507709105157534e-03 1.820102718105364760e-03 -4.766172632646627737e-03 4.779476608990755886e-04 5.394213391090120274e-04 4.430959170703384093e-04 3.013932140130247761e-05 -1.774368536331197895e-03 -4.875248018847493331e-04 3.588395787761624461e-03 -2.642715777734656966e-03 1.273570503250886620e-03 1.061210958372956229e-03 1.798694274767937719e-04 1.353971587808432757e-05 9.361344841736540020e-04 5.487643804992537204e-04 -2.631873942788620678e-04 -2.487929698910111586e-04 -2.255091276659282155e-04 5.104426009867599808e-04 -1.440655346240990572e-04 9.297025633720107146e-05 -1.548957133294823966e-04 4.084964146682874462e-04 3.954494801942396923e-05 4.083560853243847289e-04 3.803768830954258548e-06 -1.229589706334976735e-04 2.148386022113897762e-05 1.074226820892733201e-05 1.668625787538440192e-04 -1.207919522923245212e-04 -1.487183137346915682e-04 -8.896790069370060275e-05 7.438771471328983059e-05 -2.903357740954307925e-05 1.522961526926271581e-05 -6.819667277899345422e-05 -1.976404985515583046e-05 2.398217858936560807e-05 -1.209882720752283555e-05 4.729272065444615949e-05 -2.952024135095532626e-05 8.400983156904637120e-05 -4.807114097661961390e-05 1.746409068140498570e-05 1.103481550083112451e-05 -8.262883536709441980e-08 -5.452579764039189135e-05 1.031574862174494287e-05 -1.967508694630980544e-05 -1.102828314044841943e-05 1.009513262184747910e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-3.655751626048145235e-03 9.336235545642160294e-02 1.591003650484551848e-02 -1.319189431473041449e-03 1.150418195994163707e-02 3.586971688656934165e-02 1.486084946736747001e-02 2.480908584158345725e-02 -3.812996928884731965e-04 -5.432211920875000761e-04 -6.468563172826152785e-03 3.152655878656152478e-04 5.311850598124766851e-04 -2.167948009631868057e-03 8.890097930257447045e-03 6.436044456347588330e-03 1.676764597538559984e-02 4.628320136178269185e-03 1.278830070889204126e-03 7.067037132786664773e-05 -1.971822341340464385e-03 -1.799382333123832316e-04 -2.675055164073589752e-03 -5.978315481268490876e-04 -1.011843909059047556e-03 -5.392419537127347449e-04 -7.055094794564287664e-04 1.043169492387162984e-03 -1.329088400733842348e-03 1.902555318144708116e-04 -3.808421974609921073e-04 1.033262481102144005e-04 -2.415749168699570610e-04 1.171321568730126736e-05 5.825800671459812350e-04 2.312927810437176782e-05 3.388654338747912808e-04 -8.490971891275008701e-05 1.106784242145971250e-04 -2.957664017947809728e-04 -2.725903336230746593e-04 -1.912387058494055504e-04 -4.037304409865920089e-05 -2.888266145771641188e-04 5.079914324192425695e-06 -7.966098745478557535e-05 -7.081656011143034821e-05 9.178877782970832646e-05 2.385567057563616475e-06 5.894932964829071426e-05 -3.980702707595470551e-05 3.659911184103275777e-07 -1.030581862258247744e-04 -3.754814963187983075e-05 3.522918891502022152e-05 3.760780923906276461e-05 -9.590977309758241776e-06 -3.799800755075941999e-05 -7.286690988683969754e-06 4.740544522842580718e-05 -1.350012440037236598e-05 -3.381407687621824644e-05 2.918444484219841353e-05 -3.818976764690451472e-05 1.071219150162841299e-05 -2.412684349136371963e-05 1.253491280423375008e-05 -3.167150562663330430e-05 2.138650756828288323e-05 -6.464195128284297281e-06 1.125386429729200394e-05 -3.279097190529021467e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-6.152690891532206635e-02 2.218548551035433303e-02 2.658138484315402880e-02 -1.743981455678999442e-03 2.496776811789163872e-02 8.134442904583346137e-03 -1.360019803585902536e-03 4.912849170366320739e-02 -4.432512306554896482e-03 -5.135710204460787638e-03 2.685242336957238164e-04 -1.257949671362645558e-04 3.407035518444877838e-03 -1.683975365558925243e-03 5.583956898578970943e-03 -6.755299561612848223e-04 4.628320136178269185e-03 1.453499079137124221e-02 2.645668646588169616e-03 -7.595249498148268880e-05 -1.815142769810023076e-03 -1.760792760329558601e-03 -5.518538694702808216e-03 -7.072284785392206861e-04 -2.403526837502806702e-03 2.637142530718321700e-04 -7.680007744375171724e-04 4.270266984178131670e-04 -1.085427198344034042e-03 -6.897226859164118113e-04 -1.729604533562058722e-03 5.354239008036794107e-04 -2.525686083427757292e-04 1.749127824341579460e-05 6.264388372001468766e-04 -1.006499877423837255e-04 7.516385072223825872e-04 4.245931484822043341e-05 -2.014981133876685994e-04 -4.801839307881957254e-04 -2.633832752406230541e-04 -2.671848709918102057e-04 -2.155108571851086484e-04 -5.438386911816651178e-04 -2.862664772099848256e-04 -1.812230952300036206e-04 -1.349250488998367486e-04 9.661214670752052425e-05 -6.280677139127239974e-05 1.034749074276622324e-04 -9.893362359739492318e-05 7.103930789682153218e-05 -1.150976633024325917e-04 -1.261930131866746555e-04 9.829875362053884394e-05 9.433849002991393493e-05 5.329500197236881215e-05 -7.905207434952274208e-05 -1.170912993767467181e-05 8.321222128147629904e-05 -7.262269616274195211e-05 -6.338714832564684416e-05 -1.357777875004645048e-05 -3.424479931692787205e-05 -6.271314874292827288e-07 -5.488623407373613178e-05 2.392357020591055552e-05 -2.369261888117398920e-05 5.029700999785894169e-05 1.202569628670462827e-05 1.631703500261672758e-05 -1.225753753039942129e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-5.079790817042751511e-02 -1.018694147650027332e-01 -5.248662567188168253e-03 -2.460260664839713021e-02 -2.123331875180941436e-02 -5.452149313153071067e-03 2.434158737318100826e-02 -1.446677289120288680e-02 -6.739088913497015727e-03 2.231278768444570730e-03 -1.094583357494958824e-02 3.269201407881622366e-04 7.452372437977786710e-04 -1.480526137990068394e-03 -2.506484762942387447e-03 -6.207507709105157534e-03 1.278830070889204126e-03 2.645668646588169616e-03 8.560390276644331761e-03 1.966031177283113999e-03 1.007810187141859777e-03 -4.258125942002810013e-05 -3.300447252448150220e-05 -1.695399202214003349e-03 -1.820325268481104777e-03 1.218509142105119337e-03 -6.466875669763548192e-04 -1.504976777683909341e-03 4.284783580182444855e-04 -1.772250824145480980e-04 -1.007720168071118798e-03 -2.907968938133322693e-06 -3.871385589746427014e-04 -5.607091834858962372e-04 6.773889758014666612e-04 4.417733801215549061e-04 3.283306969011225802e-04 6.436355465701951057e-06 -7.004319966350164067e-05 -2.041786331484989709e-04 -5.143860647685531961e-05 -7.463439813510775822e-05 -1.901135084538398774e-04 -2.812374416427710646e-04 -2.971949086587208312e-04 -3.903524450908422538e-05 -3.784454396056382680e-05 8.433634513163666257e-05 -4.484308341650900171e-05 1.613424785838553404e-05 3.593033501757885383e-05 7.823192938571150079e-05 -1.132490445764535711e-04 -1.110141165884808990e-04 8.278639553576999308e-05 3.548016452425954448e-05 2.395673682887788938e-05 -4.825688537135270278e-05 -2.014119064777501315e-06 6.273269034200364971e-05 -3.272787576133775707e-05 -1.115510852775361141e-05 -2.158586750201944908e-05 1.101095964996979488e-05 4.113848447189669721e-06 -2.961329832319939138e-05 1.677888984873861993e-06 6.872670257777988186e-06 2.245775810930749321e-05 -2.641726103323011439e-07 2.506983046276114026e-05 -6.491311546372692975e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.036981279680113943e-01 -1.462219314022686692e-02 3.464263962777509864e-02 -4.785921068039373366e-02 -2.949796384868130436e-02 -1.826690131046378868e-02 -1.225357831199443527e-03 -1.876475144628519595e-02 -1.999037351447509065e-03 -1.697376073194466900e-02 -1.321465060076825945e-02 5.336352268570332576e-03 -4.883406110288172550e-03 1.434582774241320796e-03 -3.131062199809471480e-03 1.820102718105364760e-03 7.067037132786664773e-05 -7.595249498148268880e-05 1.966031177283113999e-03 5.768507895115459159e-03 -3.243759038963192945e-04 -1.645840879193126536e-04 3.012127287295433831e-03 -1.952999875366275961e-03 -1.870635703399381325e-03 1.473579017099631424e-03 -5.768348737119198977e-04 -3.122883498363335968e-04 2.606828062173910062e-04 -2.423600933522951565e-05 -2.684856217439798159e-04 -3.249508094280805621e-04 -3.263483673991327233e-04 -8.846850483923994533e-05 8.716505710515635302e-04 4.184983045758587324e-04 8.983929827779942392e-05 -4.597176197375354089e-05 -7.614329869236398344e-06 -5.207835632181696031e-05 1.465784292608470743e-04 -5.946312603184241579e-05 2.936785822407788271e-05 -5.439459020052170974e-05 -1.356407150257626611e-04 3.928966127838070280e-05 -4.329783396901574187e-05 4.277845936663848055e-05 -3.856222133391050373e-05 8.402171174543566069e-05 3.696345502416460618e-05 6.785670635266622666e-06 -1.169612546560399284e-04 -3.445169180452193865e-05 5.045860131227750123e-05 6.071890491009686422e-06 -1.213815566567590402e-05 -4.127316819706257665e-05 1.297918473183293150e-05 2.837607269731748960e-05 8.331262365369322380e-06 1.730804189942077524e-06 -1.358537293691424296e-05 2.409725031316143024e-05 8.839937295179304308e-06 -6.206150197116174429e-06 -1.099085940953485781e-05 4.205784704953239896e-06 7.280412361667163857e-06 -1.059320910487804246e-05 1.762029255048584790e-05 -2.856760980796503435e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+2.487505367640383946e-03 -8.515627328232243387e-02 -1.232921929371953718e-02 -1.122643117203653285e-02 -2.769334623211424512e-02 -2.549651563684083438e-02 1.885404382326395095e-02 -2.408332006783253307e-02 3.066288581407881388e-03 8.317856024104265666e-03 -5.700953812953500448e-03 6.368852743297139483e-03 -5.254073264472882560e-03 -6.308783566505484115e-03 -6.812197597610415885e-03 -4.766172632646627737e-03 -1.971822341340464385e-03 -1.815142769810023076e-03 1.007810187141859777e-03 -3.243759038963192945e-04 4.522439727216282478e-03 7.014519131897985899e-04 2.835611973276760001e-03 2.016493938344990366e-04 1.252382410139099567e-03 1.800026020879419647e-04 8.151225722083059585e-04 -8.851627178089273079e-04 1.368946266513604597e-03 -1.653631857346187341e-04 7.611629325203054554e-04 -5.712318153414074062e-04 8.057533125204805586e-05 -1.964594885194169871e-04 -3.484407694893896243e-04 2.662861527330279577e-04 -2.536132134073926410e-04 -6.260381157028321778e-06 -4.352107442386669259e-05 3.312129948334918976e-04 1.486535961937916430e-05 1.632459050684923331e-04 4.918599061060245571e-05 2.744670848652552039e-04 1.382401756396188747e-04 1.016887591901071081e-04 1.012100033971225914e-04 -2.616523859577740360e-05 4.835642165239822769e-05 -1.020011365048926094e-04 1.162559982472377779e-04 -5.391923506097276152e-06 5.908384257914070531e-05 6.948687280706797157e-05 -4.775879807954449348e-05 -7.364840423994678390e-05 -3.183819260697743454e-05 4.736178007677883028e-05 -1.739022101393012079e-06 -3.874307642333332422e-05 4.044977077169832294e-05 4.839918593244457295e-05 -4.694375939188052714e-06 2.817619471277102867e-05 1.706415187433887983e-06 2.968321568296398508e-05 -1.498008435521469495e-05 2.182520979866108807e-05 -3.565504276687142781e-05 -9.019950713535521624e-06 -1.927180635361201216e-06 8.738700086174951658e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.864175632519755790e-03 -1.667165495639554018e-02 -9.134557837192599714e-03 -3.568183323302856653e-03 -2.084246841584775495e-02 1.519359913114586838e-03 -5.423311931582636657e-03 -6.151214655099674494e-03 4.459711615308573074e-03 5.182299787189210104e-03 -9.283845036390253212e-04 1.705485984162050756e-03 -2.237504799997298843e-03 -1.763572665396114869e-03 1.604774149897017976e-03 4.779476608990755886e-04 -1.799382333123832316e-04 -1.760792760329558601e-03 -4.258125942002810013e-05 -1.645840879193126536e-04 7.014519131897985899e-04 2.643921681801679382e-03 1.866551169707488878e-03 5.270088914823480453e-04 1.140549523275584138e-03 -4.753844223790174234e-04 4.864565094716186665e-04 2.200168863650032200e-04 3.111607113208349662e-04 2.938636070302206335e-04 8.916597929282378760e-04 -3.026806718251334976e-04 8.306296523422155567e-05 -6.025628425052561069e-05 -2.131203670656386191e-04 7.717178147406601079e-05 -2.443929317798967439e-04 -2.526495568213821407e-05 1.453327404881754126e-04 1.653079834617696576e-04 -1.859113722987878231e-05 8.767632565389094404e-05 8.749737287264133865e-05 1.680975742098546593e-04 1.919891459958841589e-04 5.125034090298781058e-05 5.636830222826259840e-05 -5.371415009717788067e-06 5.107349213937666560e-05 -5.548855601551589084e-05 5.220392213886009810e-05 -2.799592446795981098e-05 3.117511928153476902e-05 6.355389627764696859e-05 -3.982260448260355889e-05 -4.575513403626815990e-05 -3.513220848263571829e-05 3.216209297280147640e-05 2.351611783131146252e-06 -2.410416773984527317e-05 3.634853596056908945e-05 2.700482553860230595e-05 2.097276772660508921e-05 4.997823791640521698e-07 4.813994611635240856e-06 2.064771220062671449e-05 -7.835524427522651912e-06 7.880189997979381831e-07 -2.155250999479986628e-05 -9.878143351485325893e-06 -4.014266191500382714e-06 6.539603459619405530e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.337781488808269248e-01 -6.646628461108314290e-02 -2.095143721411957347e-03 -9.157885447170877047e-02 -5.449407848789514924e-02 -4.819508472752650347e-02 4.046117822688864198e-05 -5.601706627653994797e-02 6.445268259478287472e-03 1.800238640603106814e-03 -2.407251378757491178e-02 1.768668422418588626e-02 -1.491241780836166306e-02 -9.956831208208730827e-03 -1.225172775533825940e-02 5.394213391090120274e-04 -2.675055164073589752e-03 -5.518538694702808216e-03 -3.300447252448150220e-05 3.012127287295433831e-03 2.835611973276760001e-03 1.866551169707488878e-03 1.086070441731627582e-02 -1.020557489796180455e-03 1.578806165229580541e-03 1.042052333721634654e-03 1.347019268385230787e-03 -2.252088641628014463e-04 2.412673899301134156e-03 -2.362257444240291023e-04 2.263519249156990907e-03 -1.569447502422650648e-03 -8.306416692962121122e-05 -5.507521922215945188e-05 1.381775605523132683e-04 6.514477746111904969e-04 -6.207548256840998138e-04 -8.647535977216263777e-05 4.267909102033757760e-05 6.502432108773307899e-04 2.201906802068381657e-04 2.449535564438783987e-04 3.045463289711690933e-04 5.961804649219777348e-04 3.709380092418532323e-04 2.532840156431448702e-04 1.569126502620466737e-04 -3.056605883515924320e-05 1.089835155161093165e-04 -8.535787821331027687e-05 2.383773424764495821e-04 -5.748065438169394495e-05 2.464557817347243888e-05 2.008320983299538435e-04 -7.815817313063548523e-05 -1.663406516150708848e-04 -1.111370021819231704e-04 6.222884794804520896e-05 1.737811944582189113e-05 -6.551548653357978046e-05 1.249595678743513371e-04 1.047970812976505299e-04 1.359872113459600767e-06 6.111366215037772331e-05 1.749185054416228763e-05 6.893237602102621588e-05 -4.346491983334417299e-05 3.011062711617032802e-05 -7.396075841911611392e-05 -3.665168900447145645e-05 2.482641239272415903e-06 1.938933017776950794e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-3.737769550000364510e-02 1.002835273138996501e-02 -3.577813510545549847e-03 3.430652428805573001e-02 4.669923561202599452e-03 6.803095351806253722e-03 -7.640766951509455653e-03 9.758737937134668444e-03 2.340847144843778097e-03 5.114547478684042595e-03 1.102772741750784012e-02 -2.712575607967017497e-03 1.310488642117836828e-03 2.523305760618918645e-04 3.189014178565000891e-03 4.430959170703384093e-04 -5.978315481268490876e-04 -7.072284785392206861e-04 -1.695399202214003349e-03 -1.952999875366275961e-03 2.016493938344990366e-04 5.270088914823480453e-04 -1.020557489796180455e-03 1.733687698230787456e-03 1.317502213999072495e-03 -1.009976387742404043e-03 3.754856065583937362e-04 4.741209975660040524e-04 -2.917397276640198121e-04 2.330453814807570607e-04 4.442144705206396618e-04 1.277427973101712794e-04 2.456737936508177175e-04 1.233984625964894483e-04 -5.201633738084890726e-04 -2.309349773713467193e-04 -1.672404020777632058e-04 2.486646315936503789e-06 8.877956339960775404e-05 7.622196346345109738e-05 -5.787701319750750096e-05 5.402237051788644472e-05 4.480290040601607254e-05 1.042813581497493866e-04 1.562459086809760373e-04 -8.883868886529052944e-07 2.825818207777050354e-05 -3.145428601218939677e-05 2.889247171469957936e-05 -4.568187801893168134e-05 -1.899572236031351981e-05 -2.798861798288737056e-05 7.107510720990766536e-05 3.972918025309603757e-05 -4.472319138962333222e-05 -1.238032433542634784e-05 -5.728314305330774386e-06 3.218844963970522033e-05 -3.801764942571860164e-06 -2.860069369014079827e-05 7.964801404673995096e-06 3.425780415251275803e-06 1.940351985619487247e-05 -1.457526547360924839e-05 -3.235758777958224096e-06 1.112598383396138746e-05 3.418760597799435933e-06 -4.682519895680275368e-06 -9.269779821215523431e-06 2.537986766628896710e-06 -1.283994696546267278e-05 3.087536976312557659e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-2.038035250050124897e-02 -1.038734072738129594e-02 -2.643522848780513121e-02 3.498016579144666745e-03 -5.450203698759430344e-03 1.413744038755732112e-03 -6.524746431913095238e-03 -5.845510263256864167e-03 4.552451457637894487e-03 9.445036776675377194e-03 1.468284387003779322e-03 2.308675312508146581e-03 -2.995801584065712220e-03 -4.582001305302226685e-03 -4.200111374742632837e-04 3.013932140130247761e-05 -1.011843909059047556e-03 -2.403526837502806702e-03 -1.820325268481104777e-03 -1.870635703399381325e-03 1.252382410139099567e-03 1.140549523275584138e-03 1.578806165229580541e-03 1.317502213999072495e-03 2.613580561006286895e-03 -9.242169073291558751e-04 9.513275078688447108e-04 4.244134328933859093e-04 5.456567998809008216e-04 9.369353315822943776e-05 1.292838730805615823e-03 -4.202281474412479880e-04 2.468972082513710541e-04 1.151467664414329014e-04 -6.490093879215159609e-04 -6.975650736620711328e-05 -3.492772361914492081e-04 -2.513055918995981420e-05 7.751377562063401839e-05 3.189773304073738433e-04 -7.114987680563050765e-05 1.406778550751632581e-04 1.332419548602751657e-04 3.179751575854486683e-04 3.306482775362179212e-04 7.939591175394494793e-05 1.025846833687811782e-04 -4.412278691333452454e-05 8.370737410337935453e-05 -1.008655802261642384e-04 6.883436391737588758e-05 -4.760171871408830509e-05 1.023698271592309023e-04 1.234103292312075761e-04 -8.587398173328237823e-05 -7.741443824593154316e-05 -4.609227082655975816e-05 6.518161996065522561e-05 -3.883459328998760283e-06 -5.708364169165551608e-05 5.195896048986034174e-05 4.068185142951697043e-05 2.377769038147314323e-05 -3.279271768332279236e-07 2.182149862595999134e-06 3.691953320679626505e-05 -8.627071711881909884e-06 3.667819974991981131e-06 -3.957574539235436629e-05 -9.487116763320696108e-06 -1.452514558753619001e-05 1.141352144914724086e-05 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+3.317323145375998539e-02 -3.065052472017425894e-02 -9.681710450184304387e-03 -3.151761163000381394e-02 -3.004664780039125512e-03 -1.267078478199945221e-02 5.801337548866239945e-03 -1.054801834272182069e-02 -1.678123241249541509e-03 -3.832992557475448092e-03 -7.437651582550317865e-03 3.729649370726399477e-03 -9.868457998174734024e-04 6.552578009697521035e-05 -4.625548605118727855e-03 -1.774368536331197895e-03 -5.392419537127347449e-04 2.637142530718321700e-04 1.218509142105119337e-03 1.473579017099631424e-03 1.800026020879419647e-04 -4.753844223790174234e-04 1.042052333721634654e-03 -1.009976387742404043e-03 -9.242169073291558751e-04 1.129453410017139479e-03 -1.386469166063106012e-04 -4.917645446851103865e-04 5.426792043418046663e-04 -3.750269885534458455e-04 -3.187637934744245766e-04 -1.611356953906554264e-04 -1.401848394672955476e-04 -7.677944481678048707e-05 3.257932243362044551e-04 1.665054814051381980e-04 1.149044669282406702e-04 2.473960366990284472e-05 -1.261968638958663787e-04 -7.151887473172537076e-06 7.574018796592448541e-05 -1.872220081018962807e-05 -3.178730654083734982e-05 -3.525049883731141992e-05 -1.278092534021466737e-04 1.253210738391748749e-05 -1.046129649571480581e-05 9.841713067873026068e-06 -2.097864240534080431e-05 2.919133673579400899e-05 2.471566555353368183e-05 2.425414315825142952e-05 -3.952972067252586764e-05 -1.985209863670846320e-05 2.984916631664480448e-05 1.346281417309728781e-06 5.959694830935224293e-06 -2.029395041366274528e-05 2.541802357936133121e-06 1.486842645720343737e-05 -3.664472364811013231e-06 3.117057916943555314e-06 -2.359239972260627580e-05 1.848159946157711304e-05 8.524233667140363205e-07 -4.735025456121083780e-06 -4.700024548426196912e-06 9.354806367504218072e-06 2.780785246132303761e-06 -1.057715179344046737e-06 8.583475687027740820e-06 -1.659893884481251963e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.105579576228833509e-02 -1.163394565331549689e-02 -1.403434944757496645e-02 -1.202845038385303746e-02 -6.122062869122686310e-03 -8.275060309749398754e-03 -1.968349866307412838e-03 -5.383678839042574382e-03 3.280729926455470524e-03 4.779714928986471961e-03 -2.467034554263061990e-03 4.157051336719028677e-03 -3.154848816802627298e-03 -3.390383489985308255e-03 -2.326149180753861109e-03 -4.875248018847493331e-04 -7.055094794564287664e-04 -7.680007744375171724e-04 -6.466875669763548192e-04 -5.768348737119198977e-04 8.151225722083059585e-04 4.864565094716186665e-04 1.347019268385230787e-03 3.754856065583937362e-04 9.513275078688447108e-04 -1.386469166063106012e-04 7.668065814521060576e-04 1.389206679780682625e-04 6.562319483012264568e-04 -2.639939418227045336e-04 6.259585747634654725e-04 -3.179651379633020468e-04 9.714306432827830135e-05 2.813111952751205609e-05 -2.299045177790063970e-04 2.828722425977193839e-05 -1.374268742767797784e-04 1.197051589022076795e-05 -2.711255147364069983e-05 1.835971036390868390e-04 -1.977657512414242061e-06 7.951262341270437376e-05 6.625894623680233392e-05 1.485683823090772906e-04 1.344479138687552694e-04 4.152071391489673839e-05 4.761928036796589663e-05 -1.708000584329017916e-05 4.052025271379717966e-05 -4.575300914646652335e-05 4.852215881495333568e-05 -1.082525169705217294e-05 4.323715198092023933e-05 6.470867439153597377e-05 -3.107127595486874574e-05 -4.452914318490555553e-05 -2.265310399131617344e-05 2.569316755656437336e-05 3.096820590687096804e-07 -2.216039283795443821e-05 2.742509308260049841e-05 2.689783546719265786e-05 4.066557742105795237e-07 8.829015911065418360e-06 1.209951174471548300e-06 1.804907980220542488e-05 -7.524772052778355714e-06 7.204325110679516539e-06 -2.012872919268360905e-05 -5.363924043127883849e-06 -4.734013888028378455e-06 5.646142497610599296e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-2.141248002026539636e-03 4.204954623394343421e-02 1.104551920852550433e-02 -7.458392146961832780e-03 6.233805808468058374e-03 1.087132752046652759e-02 -1.804186365578614240e-02 1.231971470958758037e-02 2.626359832329390725e-03 -3.913517752860052983e-03 1.165490373517529118e-03 8.902382493743725075e-04 -9.980837406088067902e-04 -3.887910968207112638e-04 3.021016440910324809e-03 3.588395787761624461e-03 1.043169492387162984e-03 4.270266984178131670e-04 -1.504976777683909341e-03 -3.122883498363335968e-04 -8.851627178089273079e-04 2.200168863650032200e-04 -2.252088641628014463e-04 4.741209975660040524e-04 4.244134328933859093e-04 -4.917645446851103865e-04 1.389206679780682625e-04 1.126916973243353038e-03 -4.554882858263551381e-04 4.354474400235718126e-05 4.150768015567073556e-04 -2.223572389449304438e-05 6.626174924332953685e-05 2.178288173858740351e-04 -1.078411713797125247e-05 -1.288240107584725754e-04 -3.074396538955436912e-05 -1.827040935261651087e-05 6.102562724020901082e-05 -2.909156527445239891e-06 -4.691672478553635280e-05 -2.798826795291091106e-05 7.569840298381625820e-05 1.992168040127275958e-05 1.317244639019954052e-04 -9.884881462045714255e-06 -7.601524173214015897e-06 1.239373746393535050e-07 2.434015192715054498e-05 1.459656998404559716e-05 -1.906435424985897559e-05 -2.644753046596944903e-05 7.320998379218508268e-06 4.146182199234314423e-05 -1.710117236045339509e-05 -8.669988289556429683e-06 -1.671470170324210506e-05 3.995331409053667589e-06 9.978083517501553669e-07 -6.236158305946444497e-06 1.329767618899083223e-05 -1.532151181107127183e-06 1.701090009507083236e-05 -1.523210546967050076e-05 3.092951924829124510e-06 4.858289063490108892e-06 1.514019671119549726e-06 -1.236184731011569861e-05 -3.006006897108690873e-06 -3.706203189320364598e-06 -5.183400175509628066e-06 2.083226006221070852e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+2.045836320046340653e-02 -4.170448371326857667e-02 -2.469024459754528070e-02 -3.382011397184808005e-02 -1.059148907796189122e-02 -1.981364156951569491e-02 7.183562127396692261e-03 -1.825257206607684701e-02 3.502283021502295736e-03 5.217861035799904561e-03 -7.737024968229888958e-03 6.471736187121623386e-03 -3.693356072824367266e-03 -3.806534210614214743e-03 -6.881193918559826150e-03 -2.642715777734656966e-03 -1.329088400733842348e-03 -1.085427198344034042e-03 4.284783580182444855e-04 2.606828062173910062e-04 1.368946266513604597e-03 3.111607113208349662e-04 2.412673899301134156e-03 -2.917397276640198121e-04 5.456567998809008216e-04 5.426792043418046663e-04 6.562319483012264568e-04 -4.554882858263551381e-04 1.313741065287420866e-03 -5.008074969355836431e-04 4.946774300674776229e-04 -5.033657027714170994e-04 1.292665775410111890e-05 -7.141924153938437479e-05 -9.929089130586301150e-05 1.678300877207874455e-04 -1.045324932552046742e-04 2.533728377613676965e-05 -1.187888524722009043e-04 2.328068611839607656e-04 5.044112881382639327e-05 9.425338115027374409e-05 4.051813201752389374e-05 1.737517742752643570e-04 5.930101093216064032e-05 6.740852065398805163e-05 6.179734244485218353e-05 -1.779741568398513350e-05 3.287561375902811091e-05 -4.704632411754642413e-05 8.430042972964706528e-05 5.622448629487225905e-06 2.935383087498631062e-05 5.842537760117746194e-05 -1.964316983865507357e-05 -5.417032141895257490e-05 -2.161438175340583750e-05 2.146330299452465351e-05 1.097472540727606803e-06 -1.981844847869940077e-05 2.980423179699647133e-05 3.609775553776042401e-05 -1.673640655910415894e-05 2.611831632897302510e-05 1.400950562742646336e-06 1.959476315135822108e-05 -1.268797995598120692e-05 1.782002123857340893e-05 -2.426534125996547553e-05 -6.775574730600470551e-06 6.160655990713519977e-07 5.822975947004847651e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.007175750097230984e-03 1.087817630512656619e-02 1.663954052897647848e-02 2.268132337456118738e-02 -5.415285995096942064e-03 5.835644220262561736e-03 -3.678048081015129289e-05 1.039542962707836480e-03 -1.059984577356706207e-03 -1.170680844491593956e-03 3.935813628679790330e-03 -3.513027018854808503e-03 1.172514931775789056e-03 1.480662272274278204e-03 3.485438309444381568e-03 1.273570503250886620e-03 1.902555318144708116e-04 -6.897226859164118113e-04 -1.772250824145480980e-04 -2.423600933522951565e-05 -1.653631857346187341e-04 2.938636070302206335e-04 -2.362257444240291023e-04 2.330453814807570607e-04 9.369353315822943776e-05 -3.750269885534458455e-04 -2.639939418227045336e-04 4.354474400235718126e-05 -5.008074969355836431e-04 5.377446522531280750e-04 3.321461039025902544e-05 1.369683777958976005e-04 1.801810130702841890e-05 -1.799852237604125261e-06 -4.493277896854808818e-05 -2.486582679135781668e-05 -8.012493248162752481e-05 -3.819951936765670166e-05 1.224522666169458867e-04 -3.637012028608596815e-05 -6.689758556449086598e-06 -2.826085288630150085e-06 1.208227343569248989e-05 8.560396033962525605e-06 2.973134430871156067e-05 2.591846283129491195e-06 -3.580467502520106946e-06 -7.253828185990158829e-07 -4.965901556143198011e-06 1.088706128533796189e-06 -1.809853748366374533e-05 -1.798222797820642365e-05 -1.087555307562278207e-06 -1.190211285508807330e-05 -9.709462844534395056e-06 1.039659930702087835e-05 -1.408013707447572600e-06 5.163930837194974878e-06 3.452036096618240679e-07 -4.923936818311940982e-06 -1.183189705351824860e-06 -7.628323280116127415e-06 1.578285756380876529e-05 -8.598321084319601780e-06 5.708194263860368086e-07 -1.404691564408958912e-07 2.422721831154066190e-06 -6.089931177946056856e-06 2.609179499638542356e-06 -9.517390056273872338e-08 -1.935110762279943310e-06 -5.571924755037483409e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+4.439784802908462377e-03 -1.732046916316509104e-03 -5.035082209166950006e-03 -1.953464482207950201e-02 -1.110247762279015400e-02 -3.886638133343893204e-03 -7.442906987078637578e-03 -9.027567607426081886e-03 4.138797813790392327e-03 3.138880234090649351e-03 -3.847482166206612000e-03 4.326282700659268322e-03 -3.919903763877725586e-03 -3.656522318157191722e-03 -1.074336088082343890e-03 1.061210958372956229e-03 -3.808421974609921073e-04 -1.729604533562058722e-03 -1.007720168071118798e-03 -2.684856217439798159e-04 7.611629325203054554e-04 8.916597929282378760e-04 2.263519249156990907e-03 4.442144705206396618e-04 1.292838730805615823e-03 -3.187637934744245766e-04 6.259585747634654725e-04 4.150768015567073556e-04 4.946774300674776229e-04 3.321461039025902544e-05 1.047822271094917034e-03 -4.536775509337888108e-04 9.244820210055634889e-05 8.068729862481372850e-05 -2.124169805017258933e-04 6.029472054120667903e-05 -2.482803510501307249e-04 -3.009361838847046027e-05 5.935424103050175181e-05 2.320810052954278360e-04 -1.196679841246278841e-05 8.747675973652503461e-05 1.219937052635580667e-04 2.301777524720061748e-04 2.314217095811989667e-04 7.253743679030753659e-05 6.716102478518191216e-05 -1.966035039756706929e-05 6.027469854297206171e-05 -5.147021234027474452e-05 6.533743387235991596e-05 -3.535956176246084036e-05 4.370083191495560947e-05 9.436133224870364711e-05 -5.189646719407638674e-05 -6.224839936889071826e-05 -4.309900716834277131e-05 3.748944313615323285e-05 1.252191167092071518e-06 -3.440682084594948235e-05 4.626570801835959678e-05 3.380181836686178192e-05 1.508575400122563936e-05 5.614313206646561022e-06 5.205934084507476042e-06 2.756501480916861564e-05 -1.040127265533171532e-05 2.674628025521483561e-06 -2.949137234165936239e-05 -1.164242206705045290e-05 -6.115248721708328273e-06 8.537378672080554018e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-1.685676718110984754e-02 1.203341128201278372e-02 5.605064729526872819e-03 2.246783238280236975e-02 8.572812394065364508e-03 7.079470819375142422e-03 -6.222918040796957255e-05 8.194722878461764906e-03 -1.894362267501049652e-03 -1.142582302888416447e-03 5.254733595653720071e-03 -3.906259042023865757e-03 2.818155249548921597e-03 2.627394097146271406e-03 2.281230461455415522e-03 1.798694274767937719e-04 1.033262481102144005e-04 5.354239008036794107e-04 -2.907968938133322693e-06 -3.249508094280805621e-04 -5.712318153414074062e-04 -3.026806718251334976e-04 -1.569447502422650648e-03 1.277427973101712794e-04 -4.202281474412479880e-04 -1.611356953906554264e-04 -3.179651379633020468e-04 -2.223572389449304438e-05 -5.033657027714170994e-04 1.369683777958976005e-04 -4.536775509337888108e-04 3.557930968344109055e-04 6.309799251859507265e-06 -7.857526065492956814e-06 7.588188649111209760e-06 -1.069139331671348030e-04 8.558550529614802373e-05 1.325817266919849309e-05 1.823539457239775581e-05 -1.341040487561334508e-04 1.362598934502869733e-06 -4.195485712262068002e-05 -5.314380876304060407e-05 -1.169202702145882660e-04 -8.987423339666121048e-05 -4.654430341167410169e-05 -3.734457756489318291e-05 4.579091392974890907e-06 -2.870753391864871841e-05 2.315450287848702662e-05 -5.125594161664319464e-05 1.069229654326895493e-05 -1.012426230431816701e-05 -4.705395074426423242e-05 2.000601588891236926e-05 3.651256218174141129e-05 2.347264212339972837e-05 -1.530876930630510477e-05 -4.467412858776474094e-07 1.434357932981531756e-05 -2.534332939671674192e-05 -2.086505980371535947e-05 -5.988971778615703879e-07 -9.637710714554720788e-06 -3.580186081461951709e-06 -1.411808669071131051e-05 7.469338312344688899e-06 -4.893387598911064119e-06 1.674535861864697502e-05 7.429608291530769151e-06 -5.540345183124908851e-08 -4.516055978757111959e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-4.656950705253447634e-03 9.836552638151495765e-04 -1.215676722061089475e-03 4.576317146986056024e-03 7.128708546968626518e-04 -2.979997323784594368e-04 -1.296914580242981624e-03 6.095460550841439330e-04 7.793195996889062775e-04 7.919725769418088342e-04 1.862725404281342315e-03 -2.457840888164765565e-04 1.330698958537328942e-04 7.497098243030908332e-05 1.401400241086509988e-04 1.353971587808432757e-05 -2.415749168699570610e-04 -2.525686083427757292e-04 -3.871385589746427014e-04 -3.263483673991327233e-04 8.057533125204805586e-05 8.306296523422155567e-05 -8.306416692962121122e-05 2.456737936508177175e-04 2.468972082513710541e-04 -1.401848394672955476e-04 9.714306432827830135e-05 6.626174924332953685e-05 1.292665775410111890e-05 1.801810130702841890e-05 9.244820210055634889e-05 6.309799251859507265e-06 6.727894171416686723e-05 3.368141983006110843e-05 -1.150020872594138138e-04 -4.874418156942134247e-05 -3.562545393079515223e-05 2.474011551095587269e-06 5.844024674533445054e-06 2.868320340117940670e-05 -2.300261900997253171e-07 1.521254618324511859e-05 1.196192945374381583e-05 3.282073924231119198e-05 2.843469129066405118e-05 4.064226591516367788e-06 7.977232498512331669e-06 -9.517881704531060521e-06 5.468361072748906989e-06 -8.860707590965827979e-06 -1.939324212113316872e-06 -6.395155202141345346e-06 1.697192331549514672e-05 1.020880550852967391e-05 -1.022802388207869547e-05 -3.924677753713201706e-06 -1.078125966336999745e-06 7.345126866473323740e-06 -5.968847734853612985e-07 -7.535596496274431370e-06 2.394867082775297257e-06 1.788177715733127806e-06 1.949078437102915484e-06 -8.655682791416388824e-07 -9.546483302030205814e-07 3.247663731768738397e-06 1.239047338506378742e-07 3.375959516802242957e-07 -2.758980642931760011e-06 6.641776865761746599e-07 -2.937681147608335843e-06 7.485383834480511158e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.815515545207396341e-03 9.960975155926839533e-03 2.807330023336020424e-03 6.584277012313372686e-04 2.996796720101171322e-03 3.454862717470914011e-04 -3.988051704029558879e-03 1.772332844573762650e-03 8.079623681966091207e-04 -1.613149844466397554e-03 7.961596556436208345e-04 1.775996784108843185e-04 -1.520044735293811153e-04 9.340403032652623170e-05 2.409052822526026362e-04 9.361344841736540020e-04 1.171321568730126736e-05 1.749127824341579460e-05 -5.607091834858962372e-04 -8.846850483923994533e-05 -1.964594885194169871e-04 -6.025628425052561069e-05 -5.507521922215945188e-05 1.233984625964894483e-04 1.151467664414329014e-04 -7.677944481678048707e-05 2.813111952751205609e-05 2.178288173858740351e-04 -7.141924153938437479e-05 -1.799852237604125261e-06 8.068729862481372850e-05 -7.857526065492956814e-06 3.368141983006110843e-05 9.427218482550665996e-05 -3.424154349964921625e-05 -4.921050670444001478e-05 -1.145847428286707899e-05 -4.441628923635466940e-06 -1.912138841388181700e-06 1.426309096792489457e-05 5.712277726107419677e-07 -3.589518608259397332e-06 2.285463648667794326e-05 2.319104224306618080e-05 3.079039352591044789e-05 2.643696290875509855e-06 5.046959061241534157e-07 -7.017391103219607219e-06 3.867305401593752115e-06 5.103238724525191747e-06 -5.262919842512270090e-06 -9.244604838003954317e-06 7.136541731530771237e-06 1.214625130173310566e-05 -7.308100544518531364e-06 -2.741621726529121970e-06 -3.102991560853553051e-06 2.837240748603816887e-06 6.291754235926314919e-08 -5.514026326871582559e-06 3.404384745888746358e-06 -3.701625436715135731e-07 2.294900239893299961e-06 -1.913759436627111031e-06 3.028951594111010033e-08 2.503808698320056548e-06 1.382027345221392177e-07 -1.866643085103470539e-06 -1.787551810868550063e-06 -3.013488480233703126e-07 -2.256296329196521833e-06 5.835247193119556621e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.169220407981105304e-02 3.138547620051086642e-03 8.827408786928744519e-03 -1.042763728856110385e-02 -1.741763489132111238e-03 2.063169313516587263e-04 -3.892401569001677241e-04 -3.216541433414988205e-04 -1.378566053456487840e-03 -3.805846766344923599e-03 -3.381147508871534466e-03 8.048355178005666940e-04 -4.614322867348142330e-04 3.092930730160475621e-04 -9.901314602646511234e-06 5.487643804992537204e-04 5.825800671459812350e-04 6.264388372001468766e-04 6.773889758014666612e-04 8.716505710515635302e-04 -3.484407694893896243e-04 -2.131203670656386191e-04 1.381775605523132683e-04 -5.201633738084890726e-04 -6.490093879215159609e-04 3.257932243362044551e-04 -2.299045177790063970e-04 -1.078411713797125247e-05 -9.929089130586301150e-05 -4.493277896854808818e-05 -2.124169805017258933e-04 7.588188649111209760e-06 -1.150020872594138138e-04 -3.424154349964921625e-05 2.901986687961608967e-04 8.735939071526511567e-05 8.436460426683583549e-05 -5.458991964790919369e-06 -9.680512493903198224e-06 -7.241027204875522016e-05 1.394798053822659278e-05 -4.207441328361416992e-05 -1.624936670815752245e-05 -7.833593538346316442e-05 -6.428167515886386491e-05 -1.181551196627063655e-05 -2.587937865140625107e-05 2.089749305718938295e-05 -1.507407638101638670e-05 3.017862134695004859e-05 -3.934433804098296680e-06 1.113500339441221572e-05 -4.061302338855376083e-05 -2.239001785294139908e-05 2.431184592572230771e-05 1.204083302239302892e-05 2.867027949945631701e-06 -1.927756400207750220e-05 2.293113640873436705e-06 1.768219890474673252e-05 -5.878703003389204148e-06 -6.112934713502807634e-06 -4.217305006095327628e-06 1.156119728519702807e-06 2.152052358296868830e-06 -7.905891003495266577e-06 -5.583867029207058747e-08 -1.948489579976874265e-06 7.879491266596105349e-06 -1.232446194142769427e-06 6.034361051763264398e-06 -2.066701051634804764e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.148060084547841177e-02 -7.358525800900077470e-03 5.929448120624693317e-04 -6.769232956996613058e-03 -6.088494511819885803e-03 -3.435342426303449222e-03 2.757588195258237149e-03 -4.625688100176229486e-03 -1.697646610893527037e-04 -1.225351861585471599e-04 -2.797947901200110478e-03 1.346486239837061056e-03 -1.203051260551521668e-03 -8.542441700414416363e-04 -8.603542599898057492e-04 -2.631873942788620678e-04 2.312927810437176782e-05 -1.006499877423837255e-04 4.417733801215549061e-04 4.184983045758587324e-04 2.662861527330279577e-04 7.717178147406601079e-05 6.514477746111904969e-04 -2.309349773713467193e-04 -6.975650736620711328e-05 1.665054814051381980e-04 2.828722425977193839e-05 -1.288240107584725754e-04 1.678300877207874455e-04 -2.486582679135781668e-05 6.029472054120667903e-05 -1.069139331671348030e-04 -4.874418156942134247e-05 -4.921050670444001478e-05 8.735939071526511567e-05 9.802291567718069039e-05 -1.347911256022050393e-05 -8.386957807217726709e-06 -3.137598864033839118e-07 2.386081729973116696e-05 8.544928170347654970e-06 8.317825367927256727e-06 6.647249561691967649e-06 1.368674940885488690e-05 1.157280784111554328e-06 1.385755934463472669e-05 5.212955677735263505e-06 6.482834458326142685e-06 2.438304961354058196e-06 -2.488935798930191309e-06 1.934408015085754568e-05 2.257333160928001229e-06 -1.102687116430676679e-05 3.333898583051274303e-06 2.476749362949500278e-06 -7.987003688511803322e-06 -6.082712743557790222e-06 -7.144381671121075155e-07 1.180266878730879184e-06 1.445941706861449538e-06 5.866293946541508920e-06 5.928175575070123599e-06 -1.344823803488316267e-06 5.077762950940495802e-06 1.788502990387381573e-06 1.957469775509698313e-06 -2.963416202114559072e-06 2.285544417533018403e-06 -2.908806574047764107e-06 -2.860501407617433755e-06 2.653816167222156197e-06 6.942865891642761306e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-5.564095072554813524e-03 1.092238113838667809e-03 -4.846002188642694968e-04 -1.376551906797528506e-03 4.008696944312153607e-03 2.463836144642395781e-03 1.897077921357448612e-04 3.821375270969237185e-03 -8.551983336113985366e-04 -1.036607601927765597e-03 -2.662110429142099219e-04 -4.664427625857726499e-04 7.955879039841102758e-04 5.048392763196367502e-04 2.230924985282982983e-04 -2.487929698910111586e-04 3.388654338747912808e-04 7.516385072223825872e-04 3.283306969011225802e-04 8.983929827779942392e-05 -2.536132134073926410e-04 -2.443929317798967439e-04 -6.207548256840998138e-04 -1.672404020777632058e-04 -3.492772361914492081e-04 1.149044669282406702e-04 -1.374268742767797784e-04 -3.074396538955436912e-05 -1.045324932552046742e-04 -8.012493248162752481e-05 -2.482803510501307249e-04 8.558550529614802373e-05 -3.562545393079515223e-05 -1.145847428286707899e-05 8.436460426683583549e-05 -1.347911256022050393e-05 9.303139122309046067e-05 9.822195311960713442e-06 -3.016273890743084401e-05 -6.255770385218326058e-05 -1.213088466033839053e-05 -3.053081655978904466e-05 -3.176523878212858282e-05 -6.821081315478550290e-05 -5.548510108120160408e-05 -2.149398868392671597e-05 -1.727673735808113337e-05 8.936707056237777922e-06 -1.180051189404065174e-05 1.455375776676027369e-05 -1.396606707117246489e-05 1.093698129557466111e-05 -1.450352502026317552e-05 -2.026768107366224783e-05 1.462629232776474269e-05 1.392484503939799502e-05 9.275560884924556533e-06 -1.105009309962866440e-05 -9.442254103996122026e-07 1.094873355643614071e-05 -1.081791504272099396e-05 -8.197258708627354093e-06 -4.520218891688836376e-06 -2.188892631384336935e-06 -7.121247743544521665e-07 -7.400343796500760958e-06 2.754419246429198776e-06 -1.478233467050837911e-06 7.102630811025131088e-06 2.286482093831679833e-06 2.207581058065775442e-06 -1.928715511090751001e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-2.677025192730993902e-03 -2.239600529002745265e-03 -2.030774166295079650e-03 -8.247915704885734394e-04 1.222558514807537636e-03 -1.790756573481944343e-04 9.668611821740146343e-06 3.477935149882801515e-04 -1.697483782648856975e-05 3.018486008669335344e-04 1.469506847186703203e-04 -2.892238503284062278e-06 1.989652585234439209e-04 1.051826620809544026e-04 -1.993433369347716603e-04 -2.255091276659282155e-04 -8.490971891275008701e-05 4.245931484822043341e-05 6.436355465701951057e-06 -4.597176197375354089e-05 -6.260381157028321778e-06 -2.526495568213821407e-05 -8.647535977216263777e-05 2.486646315936503789e-06 -2.513055918995981420e-05 2.473960366990284472e-05 1.197051589022076795e-05 -1.827040935261651087e-05 2.533728377613676965e-05 -3.819951936765670166e-05 -3.009361838847046027e-05 1.325817266919849309e-05 2.474011551095587269e-06 -4.441628923635466940e-06 -5.458991964790919369e-06 -8.386957807217726709e-06 9.822195311960713442e-06 8.783631053146363852e-06 -1.102662179653430615e-05 -4.163913269899950292e-06 2.415817722809076909e-06 -3.764190443330229722e-07 -6.215244389022759133e-06 -7.155838613521513471e-06 -9.668894732626187561e-06 -3.380765647214483032e-06 -1.078488852741710683e-06 -5.455577815573183179e-07 -1.304723913211521285e-06 3.445052234615014429e-07 -1.727466791681901653e-06 2.829709605938323824e-06 9.679010072603353959e-07 -2.078422167655400132e-06 2.066964082206017694e-06 1.364908270497994581e-06 2.167598576720688378e-06 -1.078217198981128033e-06 -5.739802866674219406e-08 1.081595007448658467e-06 -1.773597557768825527e-06 -4.765796079956568955e-07 -1.793829506858857287e-06 3.265944023673884129e-07 -4.795630528262782521e-07 -8.572387720501610372e-07 2.716388685215162655e-07 5.466909777618913965e-07 8.075929895951456615e-07 7.054773758388763858e-07 2.548934493415157755e-08 -2.465542904164077415e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+2.439087679672048183e-03 4.752863203526146857e-03 4.196060681700150910e-03 5.092052409944678587e-03 -2.218909087883145451e-03 1.550999229382867906e-03 -6.229138107702427221e-04 2.427763078060738637e-04 -5.964808838322232642e-05 -1.570362131730773539e-04 7.935364213870098804e-04 -5.856131947498137338e-04 -2.457396359884059548e-05 1.914005644956672143e-04 9.891698183537053594e-04 5.104426009867599808e-04 1.106784242145971250e-04 -2.014981133876685994e-04 -7.004319966350164067e-05 -7.614329869236398344e-06 -4.352107442386669259e-05 1.453327404881754126e-04 4.267909102033757760e-05 8.877956339960775404e-05 7.751377562063401839e-05 -1.261968638958663787e-04 -2.711255147364069983e-05 6.102562724020901082e-05 -1.187888524722009043e-04 1.224522666169458867e-04 5.935424103050175181e-05 1.823539457239775581e-05 5.844024674533445054e-06 -1.912138841388181700e-06 -9.680512493903198224e-06 -3.137598864033839118e-07 -3.016273890743084401e-05 -1.102662179653430615e-05 4.028950662722588234e-05 -4.105051894655659482e-06 -3.594989801916280060e-06 1.929086349922197184e-06 9.479159275171946409e-06 5.475985363772317986e-06 1.895128937439195002e-05 1.469437460128408083e-06 -3.518651543090167144e-07 1.288342471941475028e-06 1.711445970985179715e-06 -1.297525302473626258e-06 -2.707761353645294296e-06 -5.504502137195369002e-06 -2.426278806542722122e-07 7.412740665651100469e-07 -3.308489402446947155e-06 4.814906546349460043e-07 -2.468506915476454285e-06 1.878451403649680725e-06 5.021619230492306986e-07 -1.331132845070079568e-06 1.645726579510317166e-06 -5.845286673326124633e-07 5.285486341164765641e-06 -2.568587223661355899e-06 5.911841397468341472e-07 6.847770918536194793e-07 2.059826954265711963e-07 -1.877485817558192944e-06 2.787930686689427851e-09 -6.728395584106437630e-07 -5.512449774399507191e-07 1.501595337672258894e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+4.431950574244813632e-03 -5.405994964182561820e-03 -3.109451772478799732e-03 -4.999359320387679155e-03 -3.208002769059118634e-03 -3.578119726556800665e-03 1.064672622853200996e-04 -3.925613343147412067e-03 1.146886932017502411e-03 1.106434334517699021e-03 -1.038256781440506475e-03 1.304291284961128653e-03 -1.073323093580156587e-03 -1.012554869924134826e-03 -1.039702984742725718e-03 -1.440655346240990572e-04 -2.957664017947809728e-04 -4.801839307881957254e-04 -2.041786331484989709e-04 -5.207835632181696031e-05 3.312129948334918976e-04 1.653079834617696576e-04 6.502432108773307899e-04 7.622196346345109738e-05 3.189773304073738433e-04 -7.151887473172537076e-06 1.835971036390868390e-04 -2.909156527445239891e-06 2.328068611839607656e-04 -3.637012028608596815e-05 2.320810052954278360e-04 -1.341040487561334508e-04 2.868320340117940670e-05 1.426309096792489457e-05 -7.241027204875522016e-05 2.386081729973116696e-05 -6.255770385218326058e-05 -4.163913269899950292e-06 -4.105051894655659482e-06 7.530256611958342707e-05 6.337357163232075404e-06 2.947044527301752103e-05 2.786569138425588183e-05 6.948513442766098165e-05 4.906004394034500256e-05 2.292671069648070736e-05 2.036736571186164918e-05 -8.434779764105679198e-06 1.359228074691992829e-05 -1.557323017706856893e-05 2.090405963190130668e-05 -7.381204451135006781e-06 1.415663352786250091e-05 2.369222652896436524e-05 -1.364615610053332868e-05 -1.732841042848313161e-05 -9.755164647601209142e-06 1.066065251489464527e-05 1.363901822043775267e-07 -1.046525984277859629e-05 1.160045306978643457e-05 1.010677114815098564e-05 6.825416653715177304e-07 4.493145665000338146e-06 7.337635060687008703e-07 7.794045247850907022e-06 -3.319241788662588442e-06 2.949532782914500818e-06 -8.532069981360760933e-06 -2.559617824564432568e-06 -1.551784977278313577e-06 2.225555119456500632e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+5.046338254006689852e-03 -8.217651829980714660e-04 1.415284352986231333e-03 -3.325428224714398195e-05 -4.632979544627331800e-04 -2.335462239830079795e-03 2.389085342522498042e-04 -2.159590273600335824e-03 1.270617692901698510e-04 -6.714113394380810630e-04 3.189870671397571604e-05 8.801562947264802755e-05 -1.103409000641131292e-04 3.510788574865323025e-04 -5.366925073934080683e-04 9.297025633720107146e-05 -2.725903336230746593e-04 -2.633832752406230541e-04 -5.143860647685531961e-05 1.465784292608470743e-04 1.486535961937916430e-05 -1.859113722987878231e-05 2.201906802068381657e-04 -5.787701319750750096e-05 -7.114987680563050765e-05 7.574018796592448541e-05 -1.977657512414242061e-06 -4.691672478553635280e-05 5.044112881382639327e-05 -6.689758556449086598e-06 -1.196679841246278841e-05 1.362598934502869733e-06 -2.300261900997253171e-07 5.712277726107419677e-07 1.394798053822659278e-05 8.544928170347654970e-06 -1.213088466033839053e-05 2.415817722809076909e-06 -3.594989801916280060e-06 6.337357163232075404e-06 2.878611611714699018e-05 5.029042122248169385e-06 4.419161853887530842e-06 6.379255883904667832e-06 -1.250358378065874605e-05 3.586293789586353999e-06 -2.020826639016264258e-06 -3.169650377257643919e-06 -3.962377197077293171e-06 3.722893403273824010e-06 -2.934810054821553446e-07 3.175935824066929143e-07 -1.062346626811649969e-06 -1.911137160487016754e-06 2.139252040758540964e-06 5.530440119588939438e-07 1.106357928396410959e-06 -1.358082820715775353e-06 1.395681512852135573e-06 -1.989058803864267365e-07 -5.853055752114133955e-08 7.644322134427383022e-07 -2.494994158508781376e-06 2.905825468077906112e-06 -3.630507234047271527e-07 2.733974071616375142e-07 -9.665305065538302648e-07 1.607001181889887493e-06 3.658469103455231337e-07 2.549259421130587456e-07 1.140847154687163659e-07 -2.639406416367486161e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+2.213997132808996540e-03 -2.819862669663264314e-03 -1.889606872391593915e-03 -9.544811823055334332e-05 -1.617040938609345772e-03 -1.556059883643280987e-03 5.701075533514868461e-04 -1.742340975166559907e-03 3.995590159973253864e-04 7.806432925114898834e-04 -1.062239593747478598e-04 3.750214757501575983e-04 -3.434447021190476759e-04 -3.174650742862805881e-04 -3.711606177586931486e-04 -1.548957133294823966e-04 -1.912387058494055504e-04 -2.671848709918102057e-04 -7.463439813510775822e-05 -5.946312603184241579e-05 1.632459050684923331e-04 8.767632565389094404e-05 2.449535564438783987e-04 5.402237051788644472e-05 1.406778550751632581e-04 -1.872220081018962807e-05 7.951262341270437376e-05 -2.798826795291091106e-05 9.425338115027374409e-05 -2.826085288630150085e-06 8.747675973652503461e-05 -4.195485712262068002e-05 1.521254618324511859e-05 -3.589518608259397332e-06 -4.207441328361416992e-05 8.317825367927256727e-06 -3.053081655978904466e-05 -3.764190443330229722e-07 1.929086349922197184e-06 2.947044527301752103e-05 5.029042122248169385e-06 1.529144157831724993e-05 9.096643916824904136e-06 2.744806566254162929e-05 1.747371475040332704e-05 8.640680083660196096e-06 8.379287411027476628e-06 -3.955730401618703084e-06 4.760746285390024031e-06 -7.881019556272645310e-06 7.577563623740091602e-06 -2.409581597178095695e-06 7.073240452666525235e-06 7.919505315857677669e-06 -5.399479194969076261e-06 -6.261674525858754543e-06 -3.022974674937096198e-06 4.647385575945352769e-06 1.556935327229822862e-07 -4.280487011281745532e-06 3.962421325826498033e-06 4.011513952685078814e-06 2.868649521387481056e-07 1.873495983140783944e-06 3.206360733619750699e-08 2.968359228022742138e-06 -1.245790702972656555e-06 1.490334204550915240e-06 -3.141582332661747018e-06 -6.934444253678881195e-07 -7.651431845921730587e-07 7.968122185065661467e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+5.193463389783090946e-03 1.845953114194293565e-03 1.219009408413760124e-03 -1.535916539039981071e-03 -1.546519078914351504e-03 -9.883812527745642826e-04 -1.204375879828245132e-03 -1.190629586118678671e-03 5.505907578660442251e-04 -3.132000358591949772e-04 -3.595895568218276895e-04 5.394567568466616296e-04 -5.991738730812662915e-04 -3.360360859534597421e-04 -9.169001515543528144e-05 4.084964146682874462e-04 -4.037304409865920089e-05 -2.155108571851086484e-04 -1.901135084538398774e-04 2.936785822407788271e-05 4.918599061060245571e-05 8.749737287264133865e-05 3.045463289711690933e-04 4.480290040601607254e-05 1.332419548602751657e-04 -3.178730654083734982e-05 6.625894623680233392e-05 7.569840298381625820e-05 4.051813201752389374e-05 1.208227343569248989e-05 1.219937052635580667e-04 -5.314380876304060407e-05 1.196192945374381583e-05 2.285463648667794326e-05 -1.624936670815752245e-05 6.647249561691967649e-06 -3.176523878212858282e-05 -6.215244389022759133e-06 9.479159275171946409e-06 2.786569138425588183e-05 4.419161853887530842e-06 9.096643916824904136e-06 2.009507192331900531e-05 2.952395277562631061e-05 2.882525121873564286e-05 9.542743771651826355e-06 5.959750083259247533e-06 -2.935516185752795710e-06 6.037880202843412944e-06 -3.078564114781466775e-06 6.354626764191847245e-06 -5.979371741981122249e-06 4.032983912418701358e-06 1.153015920914821118e-05 -6.261544245082840230e-06 -6.938164115755575699e-06 -5.489216061217567638e-06 3.974061668211252625e-06 5.627451737778124551e-07 -4.477002761651396961e-06 5.847792654192616114e-06 3.661322885777267821e-06 2.022587020424334970e-06 8.462438902755486208e-07 6.989193149268232001e-07 3.453476701392859521e-06 -1.388953618726526472e-06 9.477599942483534111e-08 -3.334366138122170089e-06 -1.499228545909969905e-06 -8.462155261383484718e-07 9.531474666883881774e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+5.660849797375821536e-03 -3.546134342665884214e-03 -1.837120395731124331e-03 -2.922443301953129863e-03 -3.049026376504262482e-03 -3.081589754149671560e-03 -1.304031393580420788e-04 -3.588489929163733196e-03 1.050607567543126704e-03 8.115175016731108115e-04 -4.783972182669784731e-04 9.541493320946346172e-04 -8.629054732455797338e-04 -7.388312323848840284e-04 -7.224265795527657751e-04 3.954494801942396923e-05 -2.888266145771641188e-04 -5.438386911816651178e-04 -2.812374416427710646e-04 -5.439459020052170974e-05 2.744670848652552039e-04 1.680975742098546593e-04 5.961804649219777348e-04 1.042813581497493866e-04 3.179751575854486683e-04 -3.525049883731141992e-05 1.485683823090772906e-04 1.992168040127275958e-05 1.737517742752643570e-04 8.560396033962525605e-06 2.301777524720061748e-04 -1.169202702145882660e-04 3.282073924231119198e-05 2.319104224306618080e-05 -7.833593538346316442e-05 1.368674940885488690e-05 -6.821081315478550290e-05 -7.155838613521513471e-06 5.475985363772317986e-06 6.948513442766098165e-05 6.379255883904667832e-06 2.744806566254162929e-05 2.952395277562631061e-05 7.144932702953881137e-05 5.208673522317376045e-05 2.267227656605930213e-05 1.949179715938880236e-05 -9.580831643899639981e-06 1.275340795916226898e-05 -1.415220618149975630e-05 1.777904756536834838e-05 -9.779899184125209849e-06 1.444910009559585241e-05 2.286104204180863323e-05 -1.498363731989965778e-05 -1.596442897150091088e-05 -9.620516779594095812e-06 1.109327673663470174e-05 9.190228564598966351e-08 -1.141090768556271121e-05 1.130423449019091326e-05 8.850913096854469371e-06 2.113159837487578682e-06 3.446030961292402225e-06 6.900464664618175632e-07 7.804546864240200063e-06 -2.929940477000970144e-06 2.150981448188072515e-06 -8.243786852794770921e-06 -2.443885427018496430e-06 -1.921276949844956293e-06 2.140766307316300918e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+2.464094766111543135e-03 1.198254889496519352e-03 -6.057768759081008604e-04 -1.565931142795850021e-03 -2.268542944204944393e-03 -3.771722094229367286e-04 -1.767009884975181185e-03 -9.064525775506965862e-04 8.732580326638342228e-04 6.459289791180008013e-04 -3.436107147687976959e-04 7.835072610342217088e-04 -8.215441441424524297e-04 -8.453221026733371939e-04 1.268031949826734926e-04 4.083560853243847289e-04 5.079914324192425695e-06 -2.862664772099848256e-04 -2.971949086587208312e-04 -1.356407150257626611e-04 1.382401756396188747e-04 1.919891459958841589e-04 3.709380092418532323e-04 1.562459086809760373e-04 3.306482775362179212e-04 -1.278092534021466737e-04 1.344479138687552694e-04 1.317244639019954052e-04 5.930101093216064032e-05 2.973134430871156067e-05 2.314217095811989667e-04 -8.987423339666121048e-05 2.843469129066405118e-05 3.079039352591044789e-05 -6.428167515886386491e-05 1.157280784111554328e-06 -5.548510108120160408e-05 -9.668894732626187561e-06 1.895128937439195002e-05 4.906004394034500256e-05 -1.250358378065874605e-05 1.747371475040332704e-05 2.882525121873564286e-05 5.208673522317376045e-05 6.089186450913507256e-05 1.470517295256233879e-05 1.470639473452320980e-05 -4.569903688218609784e-06 1.416258107306534181e-05 -1.182280555503361752e-05 1.219598067300511722e-05 -1.016089970414775407e-05 1.147272234133177340e-05 2.189169629585192348e-05 -1.374385246317045761e-05 -1.332237439723613659e-05 -9.877169350830304099e-06 9.437741545247710243e-06 -1.744242092803788552e-07 -8.802113822596226839e-06 1.019701689666718185e-05 6.496454758418873856e-06 5.248028022361048417e-06 -4.166059706029630658e-07 1.161915142443018902e-06 6.255331325275291722e-06 -1.731187758662104372e-06 -4.425739901996717051e-07 -6.583893727190819142e-06 -2.515662274499372568e-06 -1.902153501175332396e-06 1.960779243665265307e-06 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+2.981491338960395952e-03 -1.700863377455956189e-03 -2.839874995105646149e-04 -1.922747140009082494e-03 -1.399779265698711380e-03 -1.269988977558030724e-03 1.728603701557353224e-04 -1.614354989351932980e-03 2.639236607369127205e-04 1.430034794857577385e-04 -4.973829027087110473e-04 4.315810209107502366e-04 -3.644333711806019326e-04 -2.969801118790762811e-04 -3.062120751659442577e-04 3.803768830954258548e-06 -7.966098745478557535e-05 -1.812230952300036206e-04 -3.903524450908422538e-05 3.928966127838070280e-05 1.016887591901071081e-04 5.125034090298781058e-05 2.532840156431448702e-04 -8.883868886529052944e-07 7.939591175394494793e-05 1.253210738391748749e-05 4.152071391489673839e-05 -9.884881462045714255e-06 6.740852065398805163e-05 2.591846283129491195e-06 7.253743679030753659e-05 -4.654430341167410169e-05 4.064226591516367788e-06 2.643696290875509855e-06 -1.181551196627063655e-05 1.385755934463472669e-05 -2.149398868392671597e-05 -3.380765647214483032e-06 1.469437460128408083e-06 2.292671069648070736e-05 3.586293789586353999e-06 8.640680083660196096e-06 9.542743771651826355e-06 2.267227656605930213e-05 1.470517295256233879e-05 8.655032277766257576e-06 6.303944582870550021e-06 -2.189400104671499462e-06 3.853613309540507540e-06 -4.000935085326142621e-06 7.238736723284593916e-06 -2.782083138790549868e-06 2.882136142687736470e-06 7.010691177348684640e-06 -4.140718472595640450e-06 -5.446566939781176248e-06 -3.489958304884681854e-06 3.123687817996285668e-06 1.427779708549952225e-07 -3.203179728794817852e-06 3.961397829228578143e-06 3.164632210695487641e-06 4.295143794725945575e-07 1.616690627601893040e-06 4.082027279735085798e-07 2.502096210697561868e-06 -1.185267581962648641e-06 8.774470799514677198e-07 -2.714595514723615007e-06 -1.029811962493573144e-06 -2.711318268477722743e-07 6.952706117080231386e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-5.039407332605000921e-04 -2.232496914427477795e-03 -1.557658019967413966e-03 -1.587669058724534669e-03 -7.249757762839474868e-04 -5.734387254282873264e-04 9.973754860486884640e-05 -9.737596646597454671e-04 2.572116940188130959e-04 5.527219489132634452e-04 -3.086822674090106350e-04 2.918155046263512954e-04 -2.048706643014099505e-04 -3.093626214115018287e-04 -2.281221370976299428e-04 -1.229589706334976735e-04 -7.081656011143034821e-05 -1.349250488998367486e-04 -3.784454396056382680e-05 -4.329783396901574187e-05 1.012100033971225914e-04 5.636830222826259840e-05 1.569126502620466737e-04 2.825818207777050354e-05 1.025846833687811782e-04 -1.046129649571480581e-05 4.761928036796589663e-05 -7.601524173214015897e-06 6.179734244485218353e-05 -3.580467502520106946e-06 6.716102478518191216e-05 -3.734457756489318291e-05 7.977232498512331669e-06 5.046959061241534157e-07 -2.587937865140625107e-05 5.212955677735263505e-06 -1.727673735808113337e-05 -1.078488852741710683e-06 -3.518651543090167144e-07 2.036736571186164918e-05 -2.020826639016264258e-06 8.379287411027476628e-06 5.959750083259247533e-06 1.949179715938880236e-05 1.470639473452320980e-05 6.303944582870550021e-06 6.925162254327323042e-06 -2.256585059241239042e-06 4.466259148539595091e-06 -5.646407104457424253e-06 6.251254231432899341e-06 -1.939175146595596139e-06 4.598877234682472580e-06 6.690483276952634707e-06 -4.340387616433900202e-06 -5.027878221884626228e-06 -2.769214026303703186e-06 3.474295984875394753e-06 -2.418595629698586762e-07 -3.071391167060676544e-06 3.210818637135277918e-06 2.801136943885153694e-06 4.937447913884027109e-07 9.169862365809033600e-07 2.071990301677956430e-07 2.195690276996135189e-06 -7.793384914738346457e-07 7.168292527881375623e-07 -2.551645834044204410e-06 -7.083146078245313091e-07 -4.572128154508208419e-07 6.777934399011767929e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.104232973668697177e-04 3.259613064902853997e-04 5.622254273715385133e-04 -4.004820107785562349e-04 -1.886377365029176978e-04 3.694338794210490153e-04 4.116716833139327900e-05 3.315852197161462279e-04 -1.619982327604379342e-04 -1.286426693722703596e-04 -2.510151805379643472e-04 2.491262395017059157e-05 -1.675822531678407452e-05 -3.043075405758708762e-05 1.275194464743666838e-04 2.148386022113897762e-05 9.178877782970832646e-05 9.661214670752052425e-05 8.433634513163666257e-05 4.277845936663848055e-05 -2.616523859577740360e-05 -5.371415009717788067e-06 -3.056605883515924320e-05 -3.145428601218939677e-05 -4.412278691333452454e-05 9.841713067873026068e-06 -1.708000584329017916e-05 1.239373746393535050e-07 -1.779741568398513350e-05 -7.253828185990158829e-07 -1.966035039756706929e-05 4.579091392974890907e-06 -9.517881704531060521e-06 -7.017391103219607219e-06 2.089749305718938295e-05 6.482834458326142685e-06 8.936707056237777922e-06 -5.455577815573183179e-07 1.288342471941475028e-06 -8.434779764105679198e-06 -3.169650377257643919e-06 -3.955730401618703084e-06 -2.935516185752795710e-06 -9.580831643899639981e-06 -4.569903688218609784e-06 -2.189400104671499462e-06 -2.256585059241239042e-06 2.716727131708136508e-06 -8.687760734230810691e-07 1.686632009547813521e-06 -6.536343225668043440e-07 1.280545125358809257e-06 -3.375070242283026249e-06 -2.337575233506946801e-06 2.121848135503199373e-06 1.330613998634167611e-06 3.430217692163812584e-07 -1.576095558695757116e-06 2.540164245568345618e-08 1.872160918418307378e-06 -7.900129071038328737e-07 -7.304907271886556930e-07 9.389201192268997458e-08 -4.411985690207469642e-07 2.061474817143951820e-07 -9.091417055326498837e-07 2.036425027179706819e-07 -4.029148883813514869e-07 8.497834199086664439e-07 -6.115123991309164014e-08 5.262549842241051362e-07 -1.876227190431781646e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-6.429725104459368306e-04 -5.442695775069143630e-04 -8.241478693521974439e-04 -1.574875083945025079e-03 -4.308194383398527335e-04 -3.640846215850657462e-05 -4.365667882573547823e-04 -3.279326353363606330e-04 2.312364450064562788e-04 3.320717201368599469e-04 -2.977317871531469438e-04 2.762509129655807590e-04 -2.125490400953882733e-04 -2.809577030003918138e-04 -6.342131327856133773e-05 1.074226820892733201e-05 2.385567057563616475e-06 -6.280677139127239974e-05 -4.484308341650900171e-05 -3.856222133391050373e-05 4.835642165239822769e-05 5.107349213937666560e-05 1.089835155161093165e-04 2.889247171469957936e-05 8.370737410337935453e-05 -2.097864240534080431e-05 4.052025271379717966e-05 2.434015192715054498e-05 3.287561375902811091e-05 -4.965901556143198011e-06 6.027469854297206171e-05 -2.870753391864871841e-05 5.468361072748906989e-06 3.867305401593752115e-06 -1.507407638101638670e-05 2.438304961354058196e-06 -1.180051189404065174e-05 -1.304723913211521285e-06 1.711445970985179715e-06 1.359228074691992829e-05 -3.962377197077293171e-06 4.760746285390024031e-06 6.037880202843412944e-06 1.275340795916226898e-05 1.416258107306534181e-05 3.853613309540507540e-06 4.466259148539595091e-06 -8.687760734230810691e-07 4.132330913240351556e-06 -3.705748773750076205e-06 4.221030287748274174e-06 -1.751461073787741292e-06 2.914306328892382108e-06 5.883474801021724747e-06 -3.209125777306771323e-06 -3.877510155523458133e-06 -2.565519014113158503e-06 2.361127261225001846e-06 -1.469967737568143296e-07 -1.956292483009539031e-06 2.692792218159663394e-06 2.014323166199261004e-06 8.786604776940826638e-07 1.402335452626340010e-07 3.153753308885258713e-07 1.592868814401295604e-06 -5.144973849797472816e-07 1.050991465490803802e-07 -1.836955692941085523e-06 -6.809883126650592243e-07 -3.528808483551804227e-07 5.440266063005058542e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.360132763429056308e-03 1.975837796380677802e-03 1.502217389674169153e-03 1.449883573711475688e-04 6.515212322346771931e-04 2.717591615092382188e-04 -3.607238173240592577e-04 5.918975895713998129e-04 -1.910278405466634126e-04 -7.345176618270439755e-04 4.615267774400981374e-05 -1.267070661924935080e-04 1.262041168006230606e-04 2.645706306226516965e-04 1.245803107783006395e-04 1.668625787538440192e-04 5.894932964829071426e-05 1.034749074276622324e-04 1.613424785838553404e-05 8.402171174543566069e-05 -1.020011365048926094e-04 -5.548855601551589084e-05 -8.535787821331027687e-05 -4.568187801893168134e-05 -1.008655802261642384e-04 2.919133673579400899e-05 -4.575300914646652335e-05 1.459656998404559716e-05 -4.704632411754642413e-05 1.088706128533796189e-06 -5.147021234027474452e-05 2.315450287848702662e-05 -8.860707590965827979e-06 5.103238724525191747e-06 3.017862134695004859e-05 -2.488935798930191309e-06 1.455375776676027369e-05 3.445052234615014429e-07 -1.297525302473626258e-06 -1.557323017706856893e-05 3.722893403273824010e-06 -7.881019556272645310e-06 -3.078564114781466775e-06 -1.415220618149975630e-05 -1.182280555503361752e-05 -4.000935085326142621e-06 -5.646407104457424253e-06 1.686632009547813521e-06 -3.705748773750076205e-06 6.073378759932624651e-06 -4.633428434044027068e-06 1.054397994483061984e-06 -4.623317697071467969e-06 -4.750652754041915879e-06 3.561914850065346495e-06 3.763060497415580388e-06 1.820460817792525778e-06 -3.088204781582648776e-06 3.114812183521898988e-07 2.380886240347798927e-06 -2.158441305987497869e-06 -2.199449246741858755e-06 -5.629709558485042845e-07 -4.718595582111062624e-07 -4.708774646964350196e-08 -1.621182518149477748e-06 4.753974273269954999e-07 -6.034572823031384464e-07 1.928637139865854643e-06 3.988932017683496332e-07 4.481606569169479549e-07 -5.201149921661059864e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.775117901592001245e-03 -2.876295397663526307e-03 -1.202958423119133329e-03 -2.811842460202660862e-03 -1.402266046738543043e-03 -1.116892764687757314e-03 3.813165935074124663e-04 -1.426996630759077818e-03 2.209769696867038241e-04 3.675661138048835373e-04 -7.656863575997993956e-04 5.384795007120238373e-04 -3.920820873962633808e-04 -4.229057390909805869e-04 -3.682204463024329934e-04 -1.207919522923245212e-04 -3.980702707595470551e-05 -9.893362359739492318e-05 3.593033501757885383e-05 3.696345502416460618e-05 1.162559982472377779e-04 5.220392213886009810e-05 2.383773424764495821e-04 -1.899572236031351981e-05 6.883436391737588758e-05 2.471566555353368183e-05 4.852215881495333568e-05 -1.906435424985897559e-05 8.430042972964706528e-05 -1.809853748366374533e-05 6.533743387235991596e-05 -5.125594161664319464e-05 -1.939324212113316872e-06 -5.262919842512270090e-06 -3.934433804098296680e-06 1.934408015085754568e-05 -1.396606707117246489e-05 -1.727466791681901653e-06 -2.707761353645294296e-06 2.090405963190130668e-05 -2.934810054821553446e-07 7.577563623740091602e-06 6.354626764191847245e-06 1.777904756536834838e-05 1.219598067300511722e-05 7.238736723284593916e-06 6.251254231432899341e-06 -6.536343225668043440e-07 4.221030287748274174e-06 -4.633428434044027068e-06 8.524429117485150535e-06 -1.029776904529695593e-06 1.656953692847807899e-06 6.483710467658691176e-06 -2.918671689243741997e-06 -5.655981509599153290e-06 -3.389528729965360737e-06 2.501800703228239412e-06 -7.683341459634192972e-09 -2.124808128807146396e-06 3.710297957041738367e-06 3.367623979336610056e-06 -8.133314301264890880e-08 1.675973928333784768e-06 4.921648471345376190e-07 2.124741192214210916e-06 -1.177852082179017883e-06 9.794430374977572633e-07 -2.618825049467742968e-06 -1.100819409228126838e-06 1.101729431419549878e-07 6.850225531593700684e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-1.312927549033827924e-03 -9.324694243844581371e-04 -6.476197422369884453e-04 -2.902965304598182153e-04 4.319301770724475451e-04 8.581806776768562540e-05 3.407847379157533585e-04 1.756683960359683148e-04 -1.431669959726685599e-04 9.318588992216788160e-05 -9.786264130944935806e-05 -3.271117092449631671e-05 1.044093377326587571e-04 4.047941292383729067e-05 -8.712116724412810066e-05 -1.487183137346915682e-04 3.659911184103275777e-07 7.103930789682153218e-05 7.823192938571150079e-05 6.785670635266622666e-06 -5.391923506097276152e-06 -2.799592446795981098e-05 -5.748065438169394495e-05 -2.798861798288737056e-05 -4.760171871408830509e-05 2.425414315825142952e-05 -1.082525169705217294e-05 -2.644753046596944903e-05 5.622448629487225905e-06 -1.798222797820642365e-05 -3.535956176246084036e-05 1.069229654326895493e-05 -6.395155202141345346e-06 -9.244604838003954317e-06 1.113500339441221572e-05 2.257333160928001229e-06 1.093698129557466111e-05 2.829709605938323824e-06 -5.504502137195369002e-06 -7.381204451135006781e-06 3.175935824066929143e-07 -2.409581597178095695e-06 -5.979371741981122249e-06 -9.779899184125209849e-06 -1.016089970414775407e-05 -2.782083138790549868e-06 -1.939175146595596139e-06 1.280545125358809257e-06 -1.751461073787741292e-06 1.054397994483061984e-06 -1.029776904529695593e-06 2.603720635750851243e-06 -1.843470849677332688e-06 -3.285868975962145958e-06 2.620749911705252821e-06 1.705418342532179209e-06 1.565859072752698983e-06 -1.609300089288061926e-06 -3.547334399437753414e-08 1.810685726428372914e-06 -1.613043449561351195e-06 -7.021910115984665012e-07 -1.135239144986429012e-06 1.558726609517725083e-07 -1.631223295628799150e-07 -1.075647794979512148e-06 2.351241025409695108e-07 2.147865401885966521e-07 9.825262836479527427e-07 3.718288975547510311e-07 4.332903121024942731e-07 -2.798048580126361409e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-1.444024689562493609e-03 -7.700508081800569362e-04 -1.368272532333382691e-03 5.021273242985428795e-04 1.020509572499054277e-04 -2.404760466676691521e-04 -1.415973275824117239e-04 -1.933313376884351128e-04 2.596861749881037261e-04 5.430124259517040818e-04 3.274027618828997260e-04 1.321407559272677948e-05 -1.715927866854435196e-05 -1.015083499359514634e-04 -1.027671751514216465e-04 -8.896790069370060275e-05 -1.030581862258247744e-04 -1.150976633024325917e-04 -1.132490445764535711e-04 -1.169612546560399284e-04 5.908384257914070531e-05 3.117511928153476902e-05 2.464557817347243888e-05 7.107510720990766536e-05 1.023698271592309023e-04 -3.952972067252586764e-05 4.323715198092023933e-05 7.320998379218508268e-06 2.935383087498631062e-05 -1.087555307562278207e-06 4.370083191495560947e-05 -1.012426230431816701e-05 1.697192331549514672e-05 7.136541731530771237e-06 -4.061302338855376083e-05 -1.102687116430676679e-05 -1.450352502026317552e-05 9.679010072603353959e-07 -2.426278806542722122e-07 1.415663352786250091e-05 -1.062346626811649969e-06 7.073240452666525235e-06 4.032983912418701358e-06 1.444910009559585241e-05 1.147272234133177340e-05 2.882136142687736470e-06 4.598877234682472580e-06 -3.375070242283026249e-06 2.914306328892382108e-06 -4.623317697071467969e-06 1.656953692847807899e-06 -1.843470849677332688e-06 6.381815427648454320e-06 4.718559253256473442e-06 -3.986027295930950249e-06 -2.688710005630674433e-06 -9.426179518482667415e-07 3.093196486223183913e-06 -2.690514092164469542e-07 -2.927576588300960042e-06 1.497587544528397354e-06 1.429123480071990441e-06 4.813632140993974395e-07 1.178864428719920067e-07 -2.422749887842529828e-07 1.512565731365066565e-06 -1.886191442630988585e-07 4.258336874976450148e-07 -1.541153199274385434e-06 2.707059365850561205e-08 -9.235060554910595943e-07 4.118274312070683750e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+8.961119087039794671e-04 -4.251240495769562327e-04 -1.019984835938101114e-03 -2.414618813575055301e-03 -6.796400945814878424e-04 -5.551853017720563771e-04 -7.524497350096670404e-04 -7.321484812386399319e-04 4.178139323991092078e-04 2.751264187707396511e-04 -4.197429973490441387e-04 4.970276305833691956e-04 -3.789489734872249584e-04 -3.875181483215191821e-04 -2.014033601247824348e-04 7.438771471328983059e-05 -3.754814963187983075e-05 -1.261930131866746555e-04 -1.110141165884808990e-04 -3.445169180452193865e-05 6.948687280706797157e-05 6.355389627764696859e-05 2.008320983299538435e-04 3.972918025309603757e-05 1.234103292312075761e-04 -1.985209863670846320e-05 6.470867439153597377e-05 4.146182199234314423e-05 5.842537760117746194e-05 -1.190211285508807330e-05 9.436133224870364711e-05 -4.705395074426423242e-05 1.020880550852967391e-05 1.214625130173310566e-05 -2.239001785294139908e-05 3.333898583051274303e-06 -2.026768107366224783e-05 -2.078422167655400132e-06 7.412740665651100469e-07 2.369222652896436524e-05 -1.911137160487016754e-06 7.919505315857677669e-06 1.153015920914821118e-05 2.286104204180863323e-05 2.189169629585192348e-05 7.010691177348684640e-06 6.690483276952634707e-06 -2.337575233506946801e-06 5.883474801021724747e-06 -4.750652754041915879e-06 6.483710467658691176e-06 -3.285868975962145958e-06 4.718559253256473442e-06 9.672688775726297073e-06 -5.179451445156963750e-06 -6.221581491844940516e-06 -4.040491937231683393e-06 3.658212284481525167e-06 -4.014768952824643968e-08 -3.523051392901234644e-06 4.430460900238035969e-06 3.271253553295749486e-06 1.003592023520345874e-06 6.914051304331447348e-07 4.372430151221756035e-07 2.724324675352447177e-06 -9.808526583773796530e-07 3.648457548501457316e-07 -2.987430060670993418e-06 -1.061057584118697662e-06 -6.451307026894031458e-07 8.482046796619436662e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-2.864247778127231589e-04 2.447128421590097488e-04 3.933241409104297720e-04 1.264376639758373337e-04 4.611333450761655769e-04 1.994072185424956747e-04 2.315153918241995594e-04 3.328794623415409293e-04 -2.066262633278977189e-04 -2.479188002071436238e-04 -5.012709582171678201e-05 -1.106648806819851522e-04 1.284930642570246732e-04 1.566389073083454932e-04 1.540805575801159984e-05 -2.903357740954307925e-05 3.522918891502022152e-05 9.829875362053884394e-05 8.278639553576999308e-05 5.045860131227750123e-05 -4.775879807954449348e-05 -3.982260448260355889e-05 -7.815817313063548523e-05 -4.472319138962333222e-05 -8.587398173328237823e-05 2.984916631664480448e-05 -3.107127595486874574e-05 -1.710117236045339509e-05 -1.964316983865507357e-05 -9.709462844534395056e-06 -5.189646719407638674e-05 2.000601588891236926e-05 -1.022802388207869547e-05 -7.308100544518531364e-06 2.431184592572230771e-05 2.476749362949500278e-06 1.462629232776474269e-05 2.066964082206017694e-06 -3.308489402446947155e-06 -1.364615610053332868e-05 2.139252040758540964e-06 -5.399479194969076261e-06 -6.261544245082840230e-06 -1.498363731989965778e-05 -1.374385246317045761e-05 -4.140718472595640450e-06 -4.340387616433900202e-06 2.121848135503199373e-06 -3.209125777306771323e-06 3.561914850065346495e-06 -2.918671689243741997e-06 2.620749911705252821e-06 -3.986027295930950249e-06 -5.179451445156963750e-06 3.950499829060012952e-06 3.211579348283109678e-06 2.066663350080569649e-06 -2.804604493293587866e-06 1.872948777070972689e-07 2.706116065454659712e-06 -2.305944234858295912e-06 -1.574489598583802937e-06 -1.033875713584539823e-06 -1.089844370203106477e-07 -1.197760611193875035e-07 -1.667263527375392983e-06 3.874135517243814812e-07 -1.220588616646706117e-07 1.757916487526182713e-06 4.522218419323059412e-07 6.126073841398011643e-07 -4.819444633103938764e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-8.464465279258864307e-04 1.229124310444356695e-03 8.560688600932326144e-04 1.797333339895381939e-03 7.396731215819471374e-04 6.421932874547304788e-04 1.385093346708970183e-04 8.212394439028473979e-04 -2.548774159884048345e-04 -2.713262949381180472e-04 4.080012368100587711e-04 -3.797680244812201111e-04 2.851203361386337267e-04 3.055058350044929818e-04 2.143513887402765088e-04 1.522961526926271581e-05 3.760780923906276461e-05 9.433849002991393493e-05 3.548016452425954448e-05 6.071890491009686422e-06 -7.364840423994678390e-05 -4.575513403626815990e-05 -1.663406516150708848e-04 -1.238032433542634784e-05 -7.741443824593154316e-05 1.346281417309728781e-06 -4.452914318490555553e-05 -8.669988289556429683e-06 -5.417032141895257490e-05 1.039659930702087835e-05 -6.224839936889071826e-05 3.651256218174141129e-05 -3.924677753713201706e-06 -2.741621726529121970e-06 1.204083302239302892e-05 -7.987003688511803322e-06 1.392484503939799502e-05 1.364908270497994581e-06 4.814906546349460043e-07 -1.732841042848313161e-05 5.530440119588939438e-07 -6.261674525858754543e-06 -6.938164115755575699e-06 -1.596442897150091088e-05 -1.332237439723613659e-05 -5.446566939781176248e-06 -5.027878221884626228e-06 1.330613998634167611e-06 -3.877510155523458133e-06 3.763060497415580388e-06 -5.655981509599153290e-06 1.705418342532179209e-06 -2.688710005630674433e-06 -6.221581491844940516e-06 3.211579348283109678e-06 4.576458959700248942e-06 2.820709573828509445e-06 -2.457718493172810656e-06 9.765121131940949685e-09 2.262800743238458841e-06 -3.117711997642337299e-06 -2.564900743559410674e-06 -3.738348244264067441e-07 -9.023456634402493654e-07 -3.288630928719700085e-07 -1.892625624642474791e-06 8.270184774607554144e-07 -5.290899147998660109e-07 2.171430907103045016e-06 8.070943083083056657e-07 2.449333467384177252e-07 -5.911238294596699391e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-9.756760977634742647e-04 1.206556055791778166e-04 -5.614850361310474052e-06 1.109398921968313047e-03 6.127183030049979887e-04 2.268459615907873707e-04 2.544403137931774061e-04 4.343489815629223414e-04 -1.454051844275693344e-04 -4.173524619451913662e-05 2.906118615715132161e-04 -2.339368899992294972e-04 2.076412641980796188e-04 1.929761524776394592e-04 4.442715404391582714e-05 -6.819667277899345422e-05 -9.590977309758241776e-06 5.329500197236881215e-05 2.395673682887788938e-05 -1.213815566567590402e-05 -3.183819260697743454e-05 -3.513220848263571829e-05 -1.111370021819231704e-04 -5.728314305330774386e-06 -4.609227082655975816e-05 5.959694830935224293e-06 -2.265310399131617344e-05 -1.671470170324210506e-05 -2.161438175340583750e-05 -1.408013707447572600e-06 -4.309900716834277131e-05 2.347264212339972837e-05 -1.078125966336999745e-06 -3.102991560853553051e-06 2.867027949945631701e-06 -6.082712743557790222e-06 9.275560884924556533e-06 2.167598576720688378e-06 -2.468506915476454285e-06 -9.755164647601209142e-06 1.106357928396410959e-06 -3.022974674937096198e-06 -5.489216061217567638e-06 -9.620516779594095812e-06 -9.877169350830304099e-06 -3.489958304884681854e-06 -2.769214026303703186e-06 3.430217692163812584e-07 -2.565519014113158503e-06 1.820460817792525778e-06 -3.389528729965360737e-06 1.565859072752698983e-06 -9.426179518482667415e-07 -4.040491937231683393e-06 2.066663350080569649e-06 2.820709573828509445e-06 2.140865216661582855e-06 -1.427366368217538664e-06 -4.124616866734174811e-08 1.318115460623469290e-06 -2.170884134927336841e-06 -1.490768445814296790e-06 -6.724822694526814815e-07 -3.208527741106775289e-07 -3.344206919601681913e-07 -1.187510584279517615e-06 5.121505231084029935e-07 -7.118200681333569061e-08 1.325072347680664225e-06 6.451959739914996943e-07 1.124056139436556829e-07 -3.802152702742011288e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-3.647752939562452941e-05 -5.982927810114174825e-04 -5.169084691654691208e-04 -5.955108024460374020e-05 -3.875612569340887762e-04 -2.223383411733124862e-04 -1.830521657478542726e-05 -3.500987158077584118e-04 1.532391806602044623e-04 2.734508069355598816e-04 2.564591034887766358e-05 8.159629996139466761e-05 -9.178673244065273023e-05 -1.284935877549385551e-04 -4.514887168860532579e-05 -1.976404985515583046e-05 -3.799800755075941999e-05 -7.905207434952274208e-05 -4.825688537135270278e-05 -4.127316819706257665e-05 4.736178007677883028e-05 3.216209297280147640e-05 6.222884794804520896e-05 3.218844963970522033e-05 6.518161996065522561e-05 -2.029395041366274528e-05 2.569316755656437336e-05 3.995331409053667589e-06 2.146330299452465351e-05 5.163930837194974878e-06 3.748944313615323285e-05 -1.530876930630510477e-05 7.345126866473323740e-06 2.837240748603816887e-06 -1.927756400207750220e-05 -7.144381671121075155e-07 -1.105009309962866440e-05 -1.078217198981128033e-06 1.878451403649680725e-06 1.066065251489464527e-05 -1.358082820715775353e-06 4.647385575945352769e-06 3.974061668211252625e-06 1.109327673663470174e-05 9.437741545247710243e-06 3.123687817996285668e-06 3.474295984875394753e-06 -1.576095558695757116e-06 2.361127261225001846e-06 -3.088204781582648776e-06 2.501800703228239412e-06 -1.609300089288061926e-06 3.093196486223183913e-06 3.658212284481525167e-06 -2.804604493293587866e-06 -2.457718493172810656e-06 -1.427366368217538664e-06 2.121994613578282115e-06 -1.391394604477387179e-07 -1.960586520167081204e-06 1.651750144553522052e-06 1.299009689277231839e-06 6.109991152534232822e-07 2.233513800170228754e-07 5.810114992156379628e-08 1.226471668432847983e-06 -3.209796755609037273e-07 2.332753194032712167e-07 -1.324945247705549996e-06 -3.082668280780681431e-07 -4.207015347722937853e-07 3.562898732327273170e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+4.278344264430664436e-04 1.164977902888665536e-04 1.654993333653115880e-04 -9.319197974060024274e-06 -1.046716921732588962e-04 -1.154801572722961508e-04 -1.177644575281913038e-05 -1.130116428412040075e-04 1.424301882282700711e-05 -5.008977114734306516e-05 -1.700797368805104972e-05 1.726961695952565002e-05 -2.730721226785900552e-05 1.151653329240882028e-05 -1.559956736529611996e-05 2.398217858936560807e-05 -7.286690988683969754e-06 -1.170912993767467181e-05 -2.014119064777501315e-06 1.297918473183293150e-05 -1.739022101393012079e-06 2.351611783131146252e-06 1.737811944582189113e-05 -3.801764942571860164e-06 -3.883459328998760283e-06 2.541802357936133121e-06 3.096820590687096804e-07 9.978083517501553669e-07 1.097472540727606803e-06 3.452036096618240679e-07 1.252191167092071518e-06 -4.467412858776474094e-07 -5.968847734853612985e-07 6.291754235926314919e-08 2.293113640873436705e-06 1.180266878730879184e-06 -9.442254103996122026e-07 -5.739802866674219406e-08 5.021619230492306986e-07 1.363901822043775267e-07 1.395681512852135573e-06 1.556935327229822862e-07 5.627451737778124551e-07 9.190228564598966351e-08 -1.744242092803788552e-07 1.427779708549952225e-07 -2.418595629698586762e-07 2.540164245568345618e-08 -1.469967737568143296e-07 3.114812183521898988e-07 -7.683341459634192972e-09 -3.547334399437753414e-08 -2.690514092164469542e-07 -4.014768952824643968e-08 1.872948777070972689e-07 9.765121131940949685e-09 -4.124616866734174811e-08 -1.391394604477387179e-07 1.280819975055685659e-07 6.152421608225694934e-08 6.533632844831702479e-08 5.479491221781849323e-08 -2.354082642550371842e-08 1.052935526495300558e-07 1.885200358073118182e-08 6.717050013040078925e-09 -6.611498790736172998e-08 3.126060939594609605e-08 4.378566757183995197e-08 -2.658221837917943339e-08 2.483055840296865591e-08 -1.217219875083811203e-08 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-6.169017101996530960e-04 3.934508564326714671e-04 3.046880790726673124e-04 -2.081231757659247837e-05 3.382293471064566994e-04 3.883024271859300590e-04 2.446907488225337826e-05 4.087422435621750646e-04 -1.652235397087890671e-04 -1.550726952441152035e-04 -6.726275318846428248e-05 -7.952983547860629154e-05 9.161435896423319634e-05 8.610784700552452273e-05 6.997906267666731146e-05 -1.209882720752283555e-05 4.740544522842580718e-05 8.321222128147629904e-05 6.273269034200364971e-05 2.837607269731748960e-05 -3.874307642333332422e-05 -2.410416773984527317e-05 -6.551548653357978046e-05 -2.860069369014079827e-05 -5.708364169165551608e-05 1.486842645720343737e-05 -2.216039283795443821e-05 -6.236158305946444497e-06 -1.981844847869940077e-05 -4.923936818311940982e-06 -3.440682084594948235e-05 1.434357932981531756e-05 -7.535596496274431370e-06 -5.514026326871582559e-06 1.768219890474673252e-05 1.445941706861449538e-06 1.094873355643614071e-05 1.081595007448658467e-06 -1.331132845070079568e-06 -1.046525984277859629e-05 -1.989058803864267365e-07 -4.280487011281745532e-06 -4.477002761651396961e-06 -1.141090768556271121e-05 -8.802113822596226839e-06 -3.203179728794817852e-06 -3.071391167060676544e-06 1.872160918418307378e-06 -1.956292483009539031e-06 2.380886240347798927e-06 -2.124808128807146396e-06 1.810685726428372914e-06 -2.927576588300960042e-06 -3.523051392901234644e-06 2.706116065454659712e-06 2.262800743238458841e-06 1.318115460623469290e-06 -1.960586520167081204e-06 6.152421608225694934e-08 2.039729183683565556e-06 -1.589987643305573474e-06 -1.170201967326126821e-06 -5.008787775159576589e-07 -3.199702980284367967e-07 -3.308318185634876183e-08 -1.217600306085150577e-06 3.372820488770256206e-07 -2.443094538007762890e-07 1.250912130983744191e-06 2.766226050129901218e-07 4.412495922938877818e-07 -3.241867897344961420e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.079130082100790552e-03 -3.678931779718224424e-04 -2.135306201034897398e-04 -1.166760509179642870e-03 -6.319629589999666962e-04 -3.930092242162990554e-04 -2.110517031258192554e-04 -5.704366919558491846e-04 1.773999476661630855e-04 8.576108740456644822e-05 -2.809209171415647205e-04 2.573933471591039477e-04 -2.163422852775889966e-04 -1.942009767699294563e-04 -9.858476835379464140e-05 4.729272065444615949e-05 -1.350012440037236598e-05 -7.262269616274195211e-05 -3.272787576133775707e-05 8.331262365369322380e-06 4.044977077169832294e-05 3.634853596056908945e-05 1.249595678743513371e-04 7.964801404673995096e-06 5.195896048986034174e-05 -3.664472364811013231e-06 2.742509308260049841e-05 1.329767618899083223e-05 2.980423179699647133e-05 -1.183189705351824860e-06 4.626570801835959678e-05 -2.534332939671674192e-05 2.394867082775297257e-06 3.404384745888746358e-06 -5.878703003389204148e-06 5.866293946541508920e-06 -1.081791504272099396e-05 -1.773597557768825527e-06 1.645726579510317166e-06 1.160045306978643457e-05 -5.853055752114133955e-08 3.962421325826498033e-06 5.847792654192616114e-06 1.130423449019091326e-05 1.019701689666718185e-05 3.961397829228578143e-06 3.210818637135277918e-06 -7.900129071038328737e-07 2.692792218159663394e-06 -2.158441305987497869e-06 3.710297957041738367e-06 -1.613043449561351195e-06 1.497587544528397354e-06 4.430460900238035969e-06 -2.305944234858295912e-06 -3.117711997642337299e-06 -2.170884134927336841e-06 1.651750144553522052e-06 6.533632844831702479e-08 -1.589987643305573474e-06 2.316936104213538590e-06 1.706059707087408838e-06 5.280531158844842350e-07 5.143642416617753936e-07 2.946699177780015563e-07 1.346016725219559526e-06 -5.867895176446860112e-07 2.248310799444130483e-07 -1.479500812528560802e-06 -6.342364952525569928e-07 -1.751452776925217729e-07 4.119664053190312385e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+6.190047939003729977e-04 -8.986558550858034277e-04 -5.162774997913627999e-04 -1.006034125247487997e-03 -5.205848245358031751e-04 -4.734285813143009167e-04 1.737038247731891851e-05 -5.868538177124427071e-04 1.423947592396369595e-04 1.769664599625240956e-04 -2.464440007424115175e-04 2.230769955928124323e-04 -1.732638640541950530e-04 -1.701672803557519997e-04 -1.547083441551266078e-04 -2.952024135095532626e-05 -3.381407687621824644e-05 -6.338714832564684416e-05 -1.115510852775361141e-05 1.730804189942077524e-06 4.839918593244457295e-05 2.700482553860230595e-05 1.047970812976505299e-04 3.425780415251275803e-06 4.068185142951697043e-05 3.117057916943555314e-06 2.689783546719265786e-05 -1.532151181107127183e-06 3.609775553776042401e-05 -7.628323280116127415e-06 3.380181836686178192e-05 -2.086505980371535947e-05 1.788177715733127806e-06 -3.701625436715135731e-07 -6.112934713502807634e-06 5.928175575070123599e-06 -8.197258708627354093e-06 -4.765796079956568955e-07 -5.845286673326124633e-07 1.010677114815098564e-05 7.644322134427383022e-07 4.011513952685078814e-06 3.661322885777267821e-06 8.850913096854469371e-06 6.496454758418873856e-06 3.164632210695487641e-06 2.801136943885153694e-06 -7.304907271886556930e-07 2.014323166199261004e-06 -2.199449246741858755e-06 3.367623979336610056e-06 -7.021910115984665012e-07 1.429123480071990441e-06 3.271253553295749486e-06 -1.574489598583802937e-06 -2.564900743559410674e-06 -1.490768445814296790e-06 1.299009689277231839e-06 5.479491221781849323e-08 -1.170201967326126821e-06 1.706059707087408838e-06 1.545774844130071967e-06 3.237902120145317280e-08 6.912422458394094178e-07 1.652557430263761312e-07 1.040203878685845432e-06 -5.220438373871647083e-07 4.322258460435611765e-07 -1.190525910562934544e-06 -4.365924896381269316e-07 -9.463326753003680938e-08 3.168713330548058070e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+2.818281733047701311e-05 6.643350647300518864e-04 4.125334398601202533e-04 5.069567595259139806e-04 -2.277651550409889435e-04 2.751146519578616346e-04 -1.930093891928036170e-04 1.373423637410903220e-04 1.785027403686350716e-05 6.358159169554963250e-06 8.286101392258520351e-05 -4.145263907774513271e-05 -2.232957947620680493e-05 -2.339445347895309526e-05 1.505219805254211696e-04 8.400983156904637120e-05 2.918444484219841353e-05 -1.357777875004645048e-05 -2.158586750201944908e-05 -1.358537293691424296e-05 -4.694375939188052714e-06 2.097276772660508921e-05 1.359872113459600767e-06 1.940351985619487247e-05 2.377769038147314323e-05 -2.359239972260627580e-05 4.066557742105795237e-07 1.701090009507083236e-05 -1.673640655910415894e-05 1.578285756380876529e-05 1.508575400122563936e-05 -5.988971778615703879e-07 1.949078437102915484e-06 2.294900239893299961e-06 -4.217305006095327628e-06 -1.344823803488316267e-06 -4.520218891688836376e-06 -1.793829506858857287e-06 5.285486341164765641e-06 6.825416653715177304e-07 -2.494994158508781376e-06 2.868649521387481056e-07 2.022587020424334970e-06 2.113159837487578682e-06 5.248028022361048417e-06 4.295143794725945575e-07 4.937447913884027109e-07 9.389201192268997458e-08 8.786604776940826638e-07 -5.629709558485042845e-07 -8.133314301264890880e-08 -1.135239144986429012e-06 4.813632140993974395e-07 1.003592023520345874e-06 -1.033875713584539823e-06 -3.738348244264067441e-07 -6.724822694526814815e-07 6.109991152534232822e-07 -2.354082642550371842e-08 -5.008787775159576589e-07 5.280531158844842350e-07 3.237902120145317280e-08 9.844355242158493616e-07 -5.369272456745101662e-07 1.264610290927072623e-07 2.773614787472243069e-07 4.693179826079660192e-08 -3.757772904475181683e-07 -2.238910614736542455e-07 -1.652226890956848718e-07 -1.671097967152165881e-07 9.644210845622574173e-08 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+9.609878685960862848e-04 -8.422135278248198435e-04 -2.122226495076474830e-04 -5.895327056902730694e-04 -3.012706984038758767e-04 -5.347046413702953018e-04 2.420012816257650695e-04 -5.062969973488864669e-04 4.638409297047827546e-05 1.949052086431426307e-05 -1.554134880401983939e-04 1.177414519606735405e-04 -8.050780145039284730e-05 -4.502432519145212989e-05 -1.638680526033623234e-04 -4.807114097661961390e-05 -3.818976764690451472e-05 -3.424479931692787205e-05 1.101095964996979488e-05 2.409725031316143024e-05 2.817619471277102867e-05 4.997823791640521698e-07 6.111366215037772331e-05 -1.457526547360924839e-05 -3.279271768332279236e-07 1.848159946157711304e-05 8.829015911065418360e-06 -1.523210546967050076e-05 2.611831632897302510e-05 -8.598321084319601780e-06 5.614313206646561022e-06 -9.637710714554720788e-06 -8.655682791416388824e-07 -1.913759436627111031e-06 1.156119728519702807e-06 5.077762950940495802e-06 -2.188892631384336935e-06 3.265944023673884129e-07 -2.568587223661355899e-06 4.493145665000338146e-06 2.905825468077906112e-06 1.873495983140783944e-06 8.462438902755486208e-07 3.446030961292402225e-06 -4.166059706029630658e-07 1.616690627601893040e-06 9.169862365809033600e-07 -4.411985690207469642e-07 1.402335452626340010e-07 -4.718595582111062624e-07 1.675973928333784768e-06 1.558726609517725083e-07 1.178864428719920067e-07 6.914051304331447348e-07 -1.089844370203106477e-07 -9.023456634402493654e-07 -3.208527741106775289e-07 2.233513800170228754e-07 1.052935526495300558e-07 -3.199702980284367967e-07 5.143642416617753936e-07 6.912422458394094178e-07 -5.369272456745101662e-07 7.605789178327284012e-07 1.418295811485823363e-08 3.501865469762592105e-07 -3.196570533600927594e-07 4.630537272896599901e-07 -4.122747577448550149e-07 -1.290419720095190119e-07 8.035156245125613078e-08 7.679963110009508542e-08 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+2.295267939094641546e-04 2.382580677210656544e-05 8.274816585958972204e-05 -2.587150484819884084e-04 -1.229826903864710236e-04 -7.542974327072151297e-06 -4.332019741084994040e-05 -5.608414806902770175e-05 7.346248575918590066e-06 -2.164654935748751868e-05 -7.932729298327602168e-05 4.246329822297799291e-05 -3.664437981076439262e-05 -3.144613366287266401e-05 3.011594955519207173e-06 1.746409068140498570e-05 1.071219150162841299e-05 -6.271314874292827288e-07 4.113848447189669721e-06 8.839937295179304308e-06 1.706415187433887983e-06 4.813994611635240856e-06 1.749185054416228763e-05 -3.235758777958224096e-06 2.182149862595999134e-06 8.524233667140363205e-07 1.209951174471548300e-06 3.092951924829124510e-06 1.400950562742646336e-06 5.708194263860368086e-07 5.205934084507476042e-06 -3.580186081461951709e-06 -9.546483302030205814e-07 3.028951594111010033e-08 2.152052358296868830e-06 1.788502990387381573e-06 -7.121247743544521665e-07 -4.795630528262782521e-07 5.911841397468341472e-07 7.337635060687008703e-07 -3.630507234047271527e-07 3.206360733619750699e-08 6.989193149268232001e-07 6.900464664618175632e-07 1.161915142443018902e-06 4.082027279735085798e-07 2.071990301677956430e-07 2.061474817143951820e-07 3.153753308885258713e-07 -4.708774646964350196e-08 4.921648471345376190e-07 -1.631223295628799150e-07 -2.422749887842529828e-07 4.372430151221756035e-07 -1.197760611193875035e-07 -3.288630928719700085e-07 -3.344206919601681913e-07 5.810114992156379628e-08 1.885200358073118182e-08 -3.308318185634876183e-08 2.946699177780015563e-07 1.652557430263761312e-07 1.264610290927072623e-07 1.418295811485823363e-08 8.303216573390880032e-08 1.083432172912870196e-07 -6.944579551612665824e-08 -3.527084626982077290e-08 -1.316343535257127021e-07 -1.204839562803103417e-07 3.862573728065906331e-08 4.327575135289293103e-08 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+5.580392661284996371e-04 -3.531540171926622791e-04 -2.181493946457583211e-04 -4.634401566693654908e-04 -3.414273330987119119e-04 -2.967869383235858236e-04 -6.412510133169684490e-05 -3.675312950472417266e-04 1.186915534952127222e-04 9.583075395224556134e-05 -8.974607311011348083e-05 1.239391176102544554e-04 -1.101407510512106974e-04 -1.013235960799303072e-04 -7.304101701076040230e-05 1.103481550083112451e-05 -2.412684349136371963e-05 -5.488623407373613178e-05 -2.961329832319939138e-05 -6.206150197116174429e-06 2.968321568296398508e-05 2.064771220062671449e-05 6.893237602102621588e-05 1.112598383396138746e-05 3.691953320679626505e-05 -4.735025456121083780e-06 1.804907980220542488e-05 4.858289063490108892e-06 1.959476315135822108e-05 -1.404691564408958912e-07 2.756501480916861564e-05 -1.411808669071131051e-05 3.247663731768738397e-06 2.503808698320056548e-06 -7.905891003495266577e-06 1.957469775509698313e-06 -7.400343796500760958e-06 -8.572387720501610372e-07 6.847770918536194793e-07 7.794045247850907022e-06 2.733974071616375142e-07 2.968359228022742138e-06 3.453476701392859521e-06 7.804546864240200063e-06 6.255331325275291722e-06 2.502096210697561868e-06 2.195690276996135189e-06 -9.091417055326498837e-07 1.592868814401295604e-06 -1.621182518149477748e-06 2.124741192214210916e-06 -1.075647794979512148e-06 1.512565731365066565e-06 2.724324675352447177e-06 -1.667263527375392983e-06 -1.892625624642474791e-06 -1.187510584279517615e-06 1.226471668432847983e-06 6.717050013040078925e-09 -1.217600306085150577e-06 1.346016725219559526e-06 1.040203878685845432e-06 2.773614787472243069e-07 3.501865469762592105e-07 1.083432172912870196e-07 8.834511553743324937e-07 -3.376650019860001406e-07 2.073943066255195998e-07 -9.505831856041918227e-07 -3.132164614613367280e-07 -1.958439367833677765e-07 2.539848026137207434e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-5.315420357003356735e-04 2.682198201414081164e-04 5.357926619562840943e-05 3.674312314872735631e-04 2.273362831969488219e-04 2.297955253836822293e-04 -2.786727367576333878e-05 2.587669698167485645e-04 -4.531752882718998512e-05 -1.400937237103345347e-05 1.002567567476807784e-04 -8.385444860698813255e-05 6.845626704766919846e-05 4.898765424359943798e-05 6.258740786674220011e-05 -8.262883536709441980e-08 1.253491280423375008e-05 2.392357020591055552e-05 1.677888984873861993e-06 -1.099085940953485781e-05 -1.498008435521469495e-05 -7.835524427522651912e-06 -4.346491983334417299e-05 3.418760597799435933e-06 -8.627071711881909884e-06 -4.700024548426196912e-06 -7.524772052778355714e-06 1.514019671119549726e-06 -1.268797995598120692e-05 2.422721831154066190e-06 -1.040127265533171532e-05 7.469338312344688899e-06 1.239047338506378742e-07 1.382027345221392177e-07 -5.583867029207058747e-08 -2.963416202114559072e-06 2.754419246429198776e-06 2.716388685215162655e-07 2.059826954265711963e-07 -3.319241788662588442e-06 -9.665305065538302648e-07 -1.245790702972656555e-06 -1.388953618726526472e-06 -2.929940477000970144e-06 -1.731187758662104372e-06 -1.185267581962648641e-06 -7.793384914738346457e-07 2.036425027179706819e-07 -5.144973849797472816e-07 4.753974273269954999e-07 -1.177852082179017883e-06 2.351241025409695108e-07 -1.886191442630988585e-07 -9.808526583773796530e-07 3.874135517243814812e-07 8.270184774607554144e-07 5.121505231084029935e-07 -3.209796755609037273e-07 -6.611498790736172998e-08 3.372820488770256206e-07 -5.867895176446860112e-07 -5.220438373871647083e-07 4.693179826079660192e-08 -3.196570533600927594e-07 -6.944579551612665824e-08 -3.376650019860001406e-07 2.124393231444417436e-07 -1.701482461536939049e-07 3.747274027873016332e-07 1.669279261624370374e-07 -7.795072051892597802e-09 -9.473445100276057431e-08 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+3.105891209168311308e-04 -6.816781157892096155e-04 -2.982760727724785696e-04 -2.448136261292540509e-04 -1.444089206042048783e-04 -3.230854200382767580e-04 1.953436422973198801e-04 -3.000441450680023858e-04 2.966063375459098217e-05 7.498769918492608052e-05 -5.748580610106335069e-05 5.527428574179398175e-05 -3.261142130610199464e-05 -2.586770301034950381e-05 -1.082117074699512452e-04 -5.452579764039189135e-05 -3.167150562663330430e-05 -2.369261888117398920e-05 6.872670257777988186e-06 4.205784704953239896e-06 2.182520979866108807e-05 7.880189997979381831e-07 3.011062711617032802e-05 -4.682519895680275368e-06 3.667819974991981131e-06 9.354806367504218072e-06 7.204325110679516539e-06 -1.236184731011569861e-05 1.782002123857340893e-05 -6.089931177946056856e-06 2.674628025521483561e-06 -4.893387598911064119e-06 3.375959516802242957e-07 -1.866643085103470539e-06 -1.948489579976874265e-06 2.285544417533018403e-06 -1.478233467050837911e-06 5.466909777618913965e-07 -1.877485817558192944e-06 2.949532782914500818e-06 1.607001181889887493e-06 1.490334204550915240e-06 9.477599942483534111e-08 2.150981448188072515e-06 -4.425739901996717051e-07 8.774470799514677198e-07 7.168292527881375623e-07 -4.029148883813514869e-07 1.050991465490803802e-07 -6.034572823031384464e-07 9.794430374977572633e-07 2.147865401885966521e-07 4.258336874976450148e-07 3.648457548501457316e-07 -1.220588616646706117e-07 -5.290899147998660109e-07 -7.118200681333569061e-08 2.332753194032712167e-07 3.126060939594609605e-08 -2.443094538007762890e-07 2.248310799444130483e-07 4.322258460435611765e-07 -3.757772904475181683e-07 4.630537272896599901e-07 -3.527084626982077290e-08 2.073943066255195998e-07 -1.701482461536939049e-07 3.325710548617902510e-07 -2.523410326951177269e-07 -2.409377188374546319e-08 1.399540700057668182e-08 4.512172827113042028e-08 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-3.727824087911550568e-04 5.838583736189928734e-04 3.905792364323644656e-04 6.904644320631980530e-04 3.502878522383107000e-04 2.919044781840080178e-04 4.301171552185920234e-05 3.837449460455872913e-04 -1.199678218197835426e-04 -1.365032371695977130e-04 1.441556942788149449e-04 -1.516473257118092558e-04 1.191873877090309593e-04 1.313762786545367523e-04 9.118261053711964898e-05 1.031574862174494287e-05 2.138650756828288323e-05 5.029700999785894169e-05 2.245775810930749321e-05 7.280412361667163857e-06 -3.565504276687142781e-05 -2.155250999479986628e-05 -7.396075841911611392e-05 -9.269779821215523431e-06 -3.957574539235436629e-05 2.780785246132303761e-06 -2.012872919268360905e-05 -3.006006897108690873e-06 -2.426534125996547553e-05 2.609179499638542356e-06 -2.949137234165936239e-05 1.674535861864697502e-05 -2.758980642931760011e-06 -1.787551810868550063e-06 7.879491266596105349e-06 -2.908806574047764107e-06 7.102630811025131088e-06 8.075929895951456615e-07 2.787930686689427851e-09 -8.532069981360760933e-06 3.658469103455231337e-07 -3.141582332661747018e-06 -3.334366138122170089e-06 -8.243786852794770921e-06 -6.583893727190819142e-06 -2.714595514723615007e-06 -2.551645834044204410e-06 8.497834199086664439e-07 -1.836955692941085523e-06 1.928637139865854643e-06 -2.618825049467742968e-06 9.825262836479527427e-07 -1.541153199274385434e-06 -2.987430060670993418e-06 1.757916487526182713e-06 2.171430907103045016e-06 1.325072347680664225e-06 -1.324945247705549996e-06 4.378566757183995197e-08 1.250912130983744191e-06 -1.479500812528560802e-06 -1.190525910562934544e-06 -2.238910614736542455e-07 -4.122747577448550149e-07 -1.316343535257127021e-07 -9.505831856041918227e-07 3.747274027873016332e-07 -2.523410326951177269e-07 1.080574424633334636e-06 3.637063025925489903e-07 1.642863979219542062e-07 -2.876957992033536281e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+-4.065618679394270886e-04 8.236515472408407828e-05 -3.053091997332592019e-05 4.044698707275740470e-04 2.218242144966101937e-04 8.333152384976322876e-05 5.206229048447722196e-05 1.509100524129397417e-04 -3.192562675352125106e-05 4.579691092921398039e-06 1.175654119172519119e-04 -7.963193715660975822e-05 6.758111595659033351e-05 5.956652853712097594e-05 1.663750071311323125e-05 -1.967508694630980544e-05 -6.464195128284297281e-06 1.202569628670462827e-05 -2.641726103323011439e-07 -1.059320910487804246e-05 -9.019950713535521624e-06 -9.878143351485325893e-06 -3.665168900447145645e-05 2.537986766628896710e-06 -9.487116763320696108e-06 -1.057715179344046737e-06 -5.363924043127883849e-06 -3.706203189320364598e-06 -6.775574730600470551e-06 -9.517390056273872338e-08 -1.164242206705045290e-05 7.429608291530769151e-06 6.641776865761746599e-07 -3.013488480233703126e-07 -1.232446194142769427e-06 -2.860501407617433755e-06 2.286482093831679833e-06 7.054773758388763858e-07 -6.728395584106437630e-07 -2.559617824564432568e-06 2.549259421130587456e-07 -6.934444253678881195e-07 -1.499228545909969905e-06 -2.443885427018496430e-06 -2.515662274499372568e-06 -1.029811962493573144e-06 -7.083146078245313091e-07 -6.115123991309164014e-08 -6.809883126650592243e-07 3.988932017683496332e-07 -1.100819409228126838e-06 3.718288975547510311e-07 2.707059365850561205e-08 -1.061057584118697662e-06 4.522218419323059412e-07 8.070943083083056657e-07 6.451959739914996943e-07 -3.082668280780681431e-07 -2.658221837917943339e-08 2.766226050129901218e-07 -6.342364952525569928e-07 -4.365924896381269316e-07 -1.652226890956848718e-07 -1.290419720095190119e-07 -1.204839562803103417e-07 -3.132164614613367280e-07 1.669279261624370374e-07 -2.409377188374546319e-08 3.637063025925489903e-07 2.138044558169515873e-07 -2.073349235477066993e-08 -1.039739484659833556e-07 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+1.284437493004002675e-04 -1.659019626392819633e-04 5.285111519227582232e-05 -2.093652980207591518e-04 -4.713838284201491395e-05 -1.263008079194895969e-05 8.274412408941154951e-05 -3.152033391106525063e-05 -4.080204655220601227e-05 -3.923515742795041572e-05 -8.561841165571346991e-05 1.695685455514628240e-05 -3.869676706766434773e-06 -2.209112295969237752e-06 -1.094373204478781997e-05 -1.102828314044841943e-05 1.125386429729200394e-05 1.631703500261672758e-05 2.506983046276114026e-05 1.762029255048584790e-05 -1.927180635361201216e-06 -4.014266191500382714e-06 2.482641239272415903e-06 -1.283994696546267278e-05 -1.452514558753619001e-05 8.583475687027740820e-06 -4.734013888028378455e-06 -5.183400175509628066e-06 6.160655990713519977e-07 -1.935110762279943310e-06 -6.115248721708328273e-06 -5.540345183124908851e-08 -2.937681147608335843e-06 -2.256296329196521833e-06 6.034361051763264398e-06 2.653816167222156197e-06 2.207581058065775442e-06 2.548934493415157755e-08 -5.512449774399507191e-07 -1.551784977278313577e-06 1.140847154687163659e-07 -7.651431845921730587e-07 -8.462155261383484718e-07 -1.921276949844956293e-06 -1.902153501175332396e-06 -2.711318268477722743e-07 -4.572128154508208419e-07 5.262549842241051362e-07 -3.528808483551804227e-07 4.481606569169479549e-07 1.101729431419549878e-07 4.332903121024942731e-07 -9.235060554910595943e-07 -6.451307026894031458e-07 6.126073841398011643e-07 2.449333467384177252e-07 1.124056139436556829e-07 -4.207015347722937853e-07 2.483055840296865591e-08 4.412495922938877818e-07 -1.751452776925217729e-07 -9.463326753003680938e-08 -1.671097967152165881e-07 8.035156245125613078e-08 3.862573728065906331e-08 -1.958439367833677765e-07 -7.795072051892597802e-09 1.399540700057668182e-08 1.642863979219542062e-07 -2.073349235477066993e-08 1.686091769640819167e-07 -4.733607263756243156e-08 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+5.971855691692778257e-05 -1.190544512946899207e-04 -1.028831685821939240e-04 -2.030131803318480489e-04 -8.863960017866568026e-05 -5.500167373470180108e-05 -3.448450156663372961e-05 -8.601380473427661653e-05 3.304973508223608302e-05 3.715300410740473907e-05 -4.330943989420773821e-05 4.318576738557640984e-05 -3.395330272608326056e-05 -3.887832644184033862e-05 -1.840132810766053652e-05 1.009513262184747910e-06 -3.279097190529021467e-06 -1.225753753039942129e-05 -6.491311546372692975e-06 -2.856760980796503435e-06 8.738700086174951658e-06 6.539603459619405530e-06 1.938933017776950794e-05 3.087536976312557659e-06 1.141352144914724086e-05 -1.659893884481251963e-06 5.646142497610599296e-06 2.083226006221070852e-06 5.822975947004847651e-06 -5.571924755037483409e-07 8.537378672080554018e-06 -4.516055978757111959e-06 7.485383834480511158e-07 5.835247193119556621e-07 -2.066701051634804764e-06 6.942865891642761306e-07 -1.928715511090751001e-06 -2.465542904164077415e-07 1.501595337672258894e-07 2.225555119456500632e-06 -2.639406416367486161e-07 7.968122185065661467e-07 9.531474666883881774e-07 2.140766307316300918e-06 1.960779243665265307e-06 6.952706117080231386e-07 6.777934399011767929e-07 -1.876227190431781646e-07 5.440266063005058542e-07 -5.201149921661059864e-07 6.850225531593700684e-07 -2.798048580126361409e-07 4.118274312070683750e-07 8.482046796619436662e-07 -4.819444633103938764e-07 -5.911238294596699391e-07 -3.802152702742011288e-07 3.562898732327273170e-07 -1.217219875083811203e-08 -3.241867897344961420e-07 4.119664053190312385e-07 3.168713330548058070e-07 9.644210845622574173e-08 7.679963110009508542e-08 4.327575135289293103e-08 2.539848026137207434e-07 -9.473445100276057431e-08 4.512172827113042028e-08 -2.876957992033536281e-07 -1.039739484659833556e-07 -4.733607263756243156e-08 8.012387488057421074e-08 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.171610205890385004e+01 6.138725528407092957e-01 6.051985242262117204e+00 -7.099352891006426303e-01 -3.254790542318478086e-02 -1.995593860061603553e-01 4.922963494191650025e-01 -5.358410097825231366e-01 -4.542178610391035820e-02 6.446493187890307563e-01 1.808106467768729186e-01 -1.879563971277720125e-01 -2.407681308346238114e-01 -4.218296569368950449e-02 -1.184450600737501670e-02 -6.928899595091043973e-02 -2.314328394561051064e-02 -1.010290524722116424e-02 -1.867650347404954322e-02 -8.086492966917406197e-03 -3.179099939464212488e-03 2.692206271712309362e-02 -3.716463874820270623e-03 2.435999345302212321e-02 -1.855317653362035038e-02 -3.916002264634483622e-03 1.961426584621759912e-03 1.379483211838189970e-02 -7.656023950829765265e-03 3.873034561114936658e-03 6.859048760585152553e-03 2.459744844100453850e-03 8.623712329662630485e-04 8.200720604920498993e-03 -4.962248305341734151e-04 2.155067622225258391e-03 -1.085927798701154245e-03 -9.174919813057303471e-04 2.688954467356825784e-04 8.173338144558544866e-04 9.479600814782030609e-04 1.518845556756011740e-03 5.483009342941916461e-04 -1.197985824812628722e-03 3.508727782263668235e-04 6.764947375475536618e-04 6.306790547611602167e-04 4.876088149690555626e-04 -2.608135762638004972e-04 2.860754560132674584e-04 2.207174968183846578e-04 1.893092101203785885e-04 -2.447489546475116852e-04 4.300228571694425051e-05 7.921976770215414132e-05 -7.911798454858443225e-05 -1.366093265264323501e-04 5.193283897093149476e-05 4.030193279876571194e-05 -6.123426162489244888e-05 -3.540274696008339036e-05 -8.647178073554945026e-07 2.829028106930004177e-05 -4.570226560718281065e-05 -3.280438509074233420e-05 7.186743291255005798e-06 -3.516281247233987301e-06 1.085044567018165149e-05 -2.007248700368360647e-05 -5.295553162504519813e-06 4.122111153218879565e-07 -3.161746684132797919e-06
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.138725528407092957e-01 2.349060023774982042e+01 6.787033242426361301e-01 -2.963483316520625910e-01 4.852413495239171204e-01 2.492589112755692615e-01 -1.090332244798968425e-01 -9.819225986247018256e-02 1.860973548664426602e-01 -2.476753075192492364e-03 7.828318504444251036e-02 5.922150297877085828e-02 -4.490960216424789986e-02 -7.855375238176477704e-02 -2.203889846035033198e-02 -1.114645866418713036e-02 7.139725802889677897e-03 7.953197102929781409e-03 -1.099099467253558844e-02 2.170026299784197713e-03 1.313678162907609692e-03 3.129994640285305336e-03 -6.800850072633726068e-03 -6.428282939916199999e-03 6.266700245703227777e-03 -6.067260715852028363e-03 1.115941082599483745e-03 -1.396379023468275827e-03 -2.423758407962214607e-03 1.276625619007184847e-04 -2.289587561184892438e-03 -2.066377076444324184e-05 6.608127734306666135e-04 -7.884496315363308120e-04 1.884541811965423060e-03 -1.380882336863154284e-03 9.773706906995947158e-04 5.288017268068624915e-04 1.806362606807766032e-04 -7.617746781500025176e-05 -7.535002575803852361e-04 -9.496218659871184071e-05 -2.588207834887589973e-04 2.344236943892276923e-05 7.998107364222783295e-05 -1.306611249122533532e-04 -7.032096143211686646e-05 -2.791938146040060572e-04 -1.030727866645449065e-06 -3.156305908982825462e-05 -9.416422033959063800e-07 8.729346800827646679e-05 -2.325496455731076726e-05 1.476121983403341457e-05 -1.198720387177489829e-06 -8.472052786346177494e-06 -2.086726260716456291e-06 1.305057917378352134e-05 1.705204147931304874e-05 -2.382961695293209979e-05 -1.049045148166791454e-05 -1.984277781703176489e-05 1.944921768762868179e-05 7.938510256294796510e-06 -4.343349351053582001e-06 1.495891802290004830e-05 1.992834674433305802e-05 9.781241534505454783e-06 -1.291133773362803928e-06 5.442262500731319240e-06 4.509456850882449668e-06 -1.030483147388550617e-06
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.051985242262117204e+00 6.787033242426361301e-01 2.265958570856846777e+01 -1.987739970442822024e+00 4.246944903979951169e-01 -1.299377772427932343e-01 6.333722852519021451e-01 -8.317730172411923195e-02 8.650405589413229235e-02 5.167214169734140050e-01 2.348566603962279620e-01 -2.264645284984671270e-01 -2.998861469254355816e-01 -3.674637508355390464e-02 -4.786744050147226544e-03 -3.433939784802754469e-02 -3.875496288806829642e-02 -4.154055695242609564e-02 -1.502694183874799840e-02 1.287632953748129878e-03 -1.628824760070252467e-02 2.046943851606475642e-02 -3.980856425898263336e-03 1.816575145232657285e-02 -2.260077600759738056e-02 1.782993145803323269e-03 -3.031765251208297937e-04 1.327211202166610378e-02 -9.696093218920108850e-03 4.891419642007533222e-04 5.859269966536665222e-03 3.916619596057983876e-03 -1.264826368334277069e-04 6.642541907409913772e-03 -1.190319643811191871e-03 1.601381672276517686e-03 -1.655224502893727591e-03 -1.306269580295902786e-03 1.835583799984067982e-04 1.165215422154015131e-03 1.224958061405036170e-03 1.568707600725005461e-03 7.965590525515912944e-04 -1.022309298684174073e-03 1.706247875115830695e-04 6.022402369423375838e-04 5.216720730557096366e-04 5.800935098855347855e-04 -2.947047319011987347e-04 2.868423323168219985e-04 3.018829008353570548e-04 1.293252209069996285e-04 -2.466032303862507910e-04 5.118090102464685552e-05 8.869939690229217285e-05 -7.044373523278919097e-05 -1.569195688554354871e-04 4.730353989668989331e-05 4.758552861056551621e-05 -3.192421682876966211e-05 -3.853971097288183675e-05 1.882503195926090157e-06 3.108887283344801246e-05 -4.759406903307133156e-05 -2.869546955544409657e-05 2.930061778180938451e-08 -1.101203020547942824e-05 7.718093365400439297e-06 -2.626731872376759428e-05 -7.116804599121627626e-06 -9.901080463009747570e-07 -4.192148453277030133e-06
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -7.099352891006426303e-01 -2.963483316520625910e-01 -1.987739970442822024e+00 4.952870685866857414e+00 9.513988735388420603e-02 9.397460929068364444e-02 -1.253810337754111270e-01 6.243209406712477844e-02 -3.986469576862872177e-02 -1.222101997864808026e-01 -8.267036457223746193e-02 2.646213934245877919e-02 1.280539369826423635e-01 2.420968775319610328e-02 2.045107667186231784e-02 2.127367547077535156e-02 2.230341027418606542e-02 1.224807504030877951e-02 7.096089158303382424e-03 -1.776439619886364995e-03 5.657054541087331960e-03 -1.090035309174220346e-03 3.148674767432082470e-03 -6.230487835484074984e-03 9.266754937166011771e-03 4.271256944268427848e-04 -1.476806385973291193e-04 -2.570305171456441651e-03 2.907758513954590996e-03 7.050605490800735927e-04 -1.762903629297214291e-03 -1.260627616950483814e-03 6.708911820589068069e-05 -1.875080421848507621e-03 7.338179669298882605e-04 -4.134408128410163369e-04 6.234686898944534830e-04 3.109436448481413501e-04 -1.498344237680553439e-04 -3.984244383030050886e-04 -3.179923538187708456e-04 -4.575475998708749669e-04 -2.399203394430934725e-04 1.957288266084472659e-04 -2.039683793529065191e-05 -1.113649231132346148e-04 -1.661255559548034742e-04 -1.950803455168761871e-04 1.153937357392687826e-04 -7.432780230046229404e-05 -9.182450807080003836e-05 -6.701449858117690622e-05 9.163507154272652205e-05 -1.448637271028778137e-05 -3.383942663067263584e-05 3.197138926804353562e-05 6.182566649155243031e-05 -1.105883034174681005e-05 -2.988948902017643554e-05 9.469614553325014691e-06 7.631446310161534228e-06 8.032200875169178569e-06 -1.751672063897858822e-05 1.485528605159619028e-05 1.550313538489669733e-05 -4.281262605362500213e-06 -3.861948110716982535e-06 -1.757989961223755028e-06 5.600039129933297027e-06 1.226962407725760877e-06 -7.923939218379101901e-07 -4.526732459805152895e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -3.254790542318478086e-02 4.852413495239171204e-01 4.246944903979951169e-01 9.513988735388420603e-02 4.549657064628574332e+00 2.386111803489320560e-01 2.022007409580648535e-02 -1.064042384869845204e-01 5.181102428180909408e-02 -4.384272916013899624e-02 2.871945099969961182e-02 -3.057159479712759018e-02 -1.350451288416053622e-02 -2.789887764143550095e-02 4.990762965367843106e-03 5.971266459747406191e-03 1.203537092994293525e-02 2.940571028908996621e-03 -8.845344290973528034e-03 1.251727113446670899e-03 -4.204353811565990709e-03 -8.845449937712742544e-03 -1.427256574343709755e-03 -3.315342074928036634e-03 1.742051632048351043e-03 -3.440408587652007077e-04 1.626404982145517551e-04 -9.259165204774062264e-04 -9.223362785273737994e-05 -4.071991486931637546e-04 -2.090702825279103574e-03 -2.633703315170182854e-04 3.326612374644468068e-04 -1.812632608739227214e-03 9.830962311733130576e-04 -8.132202820873179376e-04 1.224434480423853850e-03 4.597299578832127198e-04 1.815483159931088398e-04 -4.989111164926325918e-04 -3.664481562893740331e-04 -2.144726938231217609e-04 -2.572500533951217576e-04 2.897270067419454751e-04 3.847942857561093563e-05 -2.092620685585261419e-04 -1.283616488059200812e-04 -2.200454153016562762e-04 -1.951886707610114675e-05 -1.287634508994955841e-04 -3.001391282702328488e-05 3.478822809372388730e-05 4.485919123229179930e-07 -2.166437761715150461e-05 -4.489077262878641498e-05 -3.351763805884130585e-06 8.932572823863872939e-08 6.158866306933454851e-06 1.456050878924346965e-05 -3.433938999049238844e-06 -7.884007815521751863e-07 -2.411226103936548980e-05 2.656817071957624942e-05 1.328632869806629799e-05 -2.956145215406492982e-06 1.409693082616226133e-05 1.993678211976121189e-05 7.417341896346888420e-06 6.267002453151422167e-06 6.181941802577398503e-06 4.611147689297883146e-06 2.499200290363847799e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -1.995593860061603553e-01 2.492589112755692615e-01 -1.299377772427932343e-01 9.397460929068364444e-02 2.386111803489320560e-01 7.815299994710920295e-01 -8.906237718720297694e-03 -5.191106002632053101e-02 -1.211858442161915621e-02 -1.128261602647903997e-02 1.357778155417070946e-04 1.834402410723585258e-02 1.819115801128566232e-02 -7.156586299672412256e-04 8.014608579143876407e-03 1.503589045077791814e-03 6.377880613768815987e-03 -5.149093723735230152e-03 4.372275155176838187e-04 -7.621165378955127101e-04 9.707828895168025586e-04 -2.590707319120030133e-03 -5.700356000010933918e-04 -2.266320428973580981e-05 1.188109077144210201e-03 2.282423709557635579e-04 2.959414735838545099e-04 -9.685527569268893508e-04 3.882006593789460872e-04 -2.889895002230908889e-04 -8.115976316868670415e-04 -6.260789282172198207e-05 1.897887040932097133e-04 -7.367020915229897026e-04 2.718218190133865142e-04 -1.948414915555949885e-04 3.814933487859542755e-04 1.232344235519889524e-04 8.659112766189907585e-05 -1.092912447359346102e-04 -1.134043135378545611e-04 -1.197668074231670004e-04 -2.897056353938380001e-05 6.077410511736578482e-05 -2.507367715266709126e-05 -4.115587432262423018e-05 -7.827536125282888511e-05 -5.208487749324975231e-05 3.456386565905998561e-05 -4.884980097861421091e-05 -1.374164913006776855e-05 -2.331468774836307196e-05 2.623967104073993291e-05 -3.159038315610843235e-06 -1.401897468028334396e-05 9.342002799247602312e-06 1.689455884534502838e-05 -2.527292274620906343e-07 -4.921059296074571016e-06 6.175279472530517966e-06 9.352652663449905533e-07 3.976346635752156228e-08 2.041667214813223345e-08 3.767785622215511326e-06 4.937137719078863692e-06 -1.094703937690689581e-06 -1.676981979439286908e-06 1.710859712445590334e-06 1.532861605895005321e-07 7.188840269763651195e-07 1.315091717808854989e-07 -1.057256888599603949e-06
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.922963494191650025e-01 -1.090332244798968425e-01 6.333722852519021451e-01 -1.253810337754111270e-01 2.022007409580648535e-02 -8.906237718720297694e-03 6.736646007505369127e-01 2.447624002619254793e-02 1.249294354718288131e-03 7.301826193287695721e-02 4.536083016967299569e-02 -4.771062288580391053e-02 -3.970873186282850714e-02 -5.357164349378174158e-04 -8.386808518437965237e-03 -9.240316698801164658e-03 -2.331527786491728420e-03 -8.188836725314545956e-03 3.365603624996505068e-03 1.685761680162608340e-03 -3.514112749737811989e-03 1.910048530170708895e-03 6.396416396915338308e-04 5.119563475380033667e-03 -1.023192533045549349e-03 -8.532184721832028994e-04 -8.218052979748259081e-04 1.898185105172450326e-03 -1.065851493091645895e-03 6.421724751429869031e-04 2.071903486986630431e-04 6.506125407401229640e-05 -2.136427887362311656e-04 5.583919436789780465e-04 -1.081001096278010032e-04 -1.528124947070308605e-05 -9.239860251646188555e-05 -7.544187377709950118e-05 2.179376756322894178e-05 -5.166825657201732188e-05 1.242651621836379425e-04 2.233038249486575891e-04 -2.083800174040636169e-05 -5.199101002130805378e-05 3.839577502370147350e-05 1.074239288935346662e-05 1.082598213452782296e-04 3.677478308399171844e-05 -8.277894084684063415e-05 3.674818401997486430e-05 8.095439642159416729e-06 6.673979250841398185e-05 -6.246090667361027646e-05 -6.270041031876362709e-06 -2.322381621328806462e-06 -1.405854773861216713e-05 -3.658116265943532652e-05 9.874275028953842211e-06 2.060428261404691552e-05 -1.460365363383939131e-05 -6.080081318369939757e-06 -1.032983173646601689e-05 1.786385625911848468e-05 -3.504021046037497873e-06 -9.318399980112320740e-06 8.649171052292548817e-06 1.000871601215943410e-05 3.628568418847811236e-06 1.698996092568142021e-06 1.513032931868125849e-06 2.095895205823464670e-06 5.027824590938702883e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -5.358410097825231366e-01 -9.819225986247018256e-02 -8.317730172411923195e-02 6.243209406712477844e-02 -1.064042384869845204e-01 -5.191106002632053101e-02 2.447624002619254793e-02 6.164716448374212288e-01 3.929016124855774610e-03 -2.631356521914074431e-02 -2.662446148613487745e-02 3.035023349080870347e-02 1.857688170490350010e-02 1.697971574324704707e-03 6.622137524371058022e-03 1.310399574314126403e-02 2.888096498243742857e-03 5.761630324748338904e-04 4.791860386687295248e-03 -5.639705381500486215e-04 -4.769812684452682584e-04 1.300946422275066783e-03 4.429995338919762746e-04 2.651985220767857920e-04 -5.219545639991976429e-04 1.019919213013772627e-03 -6.209575863051346482e-04 -1.913951214517718637e-04 1.844775720904867752e-04 -3.721908418144862684e-05 8.577034660354077716e-04 1.241107454449386896e-04 -1.934416306290414798e-04 4.194341884179539240e-04 -2.375091734082166112e-04 1.685384801134868138e-04 -3.482094383594609125e-04 -2.178263632688422918e-04 -1.146781522127005216e-04 1.512742811430125862e-04 1.379892229610619493e-04 -9.824793223463326199e-06 1.151213068249090648e-04 -8.014948496242969226e-05 -5.035650309211807846e-05 9.522404593106969291e-05 -9.563561893121391626e-06 6.660100724028785888e-05 3.148035494170563529e-05 3.361328170202302788e-05 2.565746411228413533e-05 -3.990064124238967998e-05 1.406272239379101979e-05 1.413509911422772095e-05 2.131151768185685438e-05 1.300270781147599408e-05 1.290990402637764693e-05 -1.518064024033400747e-07 -1.454233196208581819e-05 8.528530866488123603e-06 -1.267722123747141608e-07 1.528607026872851461e-05 -1.515938125586694791e-05 -4.840014652142644194e-06 7.502761712063313596e-06 -1.039438806924597051e-05 -1.493156975057570465e-05 -2.674915835064053855e-06 -5.152958681383269683e-06 -3.027310308501267242e-06 -2.680007784735964941e-06 -1.739423428883073235e-06
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -4.542178610391035820e-02 1.860973548664426602e-01 8.650405589413229235e-02 -3.986469576862872177e-02 5.181102428180909408e-02 -1.211858442161915621e-02 1.249294354718288131e-03 3.929016124855774610e-03 1.378226648490247230e-01 3.003736549010300994e-04 -1.286802411315538571e-03 -2.881977500295646250e-04 -9.438429771735645790e-03 -1.031961717066643379e-02 1.488533894097579394e-03 2.921278368835744307e-03 2.014168246023944914e-03 -1.572297529803952506e-03 -2.701631919881379313e-03 -4.559322325244253714e-04 1.513123684842696990e-03 -8.641529465784039601e-05 -2.891520187456748654e-04 -2.117517713488148033e-04 -3.917143241348182534e-04 1.820065400901503831e-04 -4.317992615409150278e-04 -6.622490769088017638e-05 -2.611784072064916558e-04 1.568286528632138451e-05 2.322345424930624886e-04 7.720357750624254713e-05 9.321557582174938041e-06 1.223052945866118474e-04 -1.252927970015214162e-05 5.945758465737223909e-06 -7.143942230123425654e-05 -4.626372365928647343e-05 -3.284870675851983302e-05 5.163991588943059975e-05 1.635837233325937616e-05 -3.577896867832735290e-06 1.872788832206620713e-05 -1.511944187037296394e-05 -5.355860726879331885e-06 1.408938021508663544e-05 -5.335971353757113604e-06 6.841423103567814359e-06 -1.922103537959432319e-06 1.541714632343483258e-05 7.827915823939723294e-06 2.076167297879437017e-06 -6.690447756701302991e-07 4.365661606724274408e-06 7.597883185995727905e-06 2.173718635118782802e-06 -2.380443856231859398e-07 3.174397085963111570e-06 -3.662689477174183371e-06 1.611671919285619567e-06 -2.955290454887205809e-06 3.765199894143256358e-06 -3.086792200769470042e-06 -1.751284841526143866e-06 1.481553022075293797e-06 -2.684966038604770898e-06 -4.298212120426754376e-06 6.256314854520926767e-07 -3.717786414014746694e-06 -6.928184822989570810e-07 -5.338370232815017866e-07 -1.058159092309566331e-06
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.446493187890307563e-01 -2.476753075192492364e-03 5.167214169734140050e-01 -1.222101997864808026e-01 -4.384272916013899624e-02 -1.128261602647903997e-02 7.301826193287695721e-02 -2.631356521914074431e-02 3.003736549010300994e-04 1.493941542815085399e-01 2.589837251767865972e-02 -2.838200941000140753e-02 -3.254132600646036239e-02 -2.794517580320655514e-03 -8.040809158104750970e-03 -8.647506447880402153e-03 -4.080546950286072921e-03 -4.611175096582403009e-03 2.010445487816622627e-03 6.465624349423248259e-04 -2.638999513379862872e-03 1.384210934921840984e-03 4.153989173274799641e-04 3.929943804890193536e-03 -1.474737246469256515e-03 -8.952327384546862176e-04 -4.318245500662746248e-04 1.621879772926319418e-03 -8.664658670297145909e-04 5.523266222920545938e-04 1.319258944292221150e-04 1.498206408742286573e-04 -4.300420763623788499e-05 5.969079909692888691e-04 -7.500888771415703603e-05 8.037519744602711379e-05 -1.261702632390506406e-04 -7.157316449197834792e-05 2.658696764700058170e-05 -6.636851409038127663e-06 8.788404914792070585e-05 1.850251946994860915e-04 -3.921700243155538337e-06 -6.340670607870966960e-05 3.162609645467792235e-05 1.027481762106180603e-05 8.635562568171455090e-05 3.688489484844579153e-05 -6.144853042666176615e-05 3.189756072788974174e-05 7.482435597411070159e-06 5.190915115178377788e-05 -4.769669935150141820e-05 -5.038508685294498293e-06 1.681868023706654693e-06 -1.356519243751721837e-05 -2.829047263357563656e-05 6.569053642875453973e-06 1.592046946430446732e-05 -1.193357918772824713e-05 -5.162055851454997439e-06 -8.267627418773137351e-06 1.365005790821120530e-05 -2.620978606795394094e-06 -7.868677817429591437e-06 6.914677017780991490e-06 8.050492804267978341e-06 2.393845435813816572e-06 1.334268978704148648e-06 1.040729183977109180e-06 1.592673355379447283e-06 6.263309317770234638e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.808106467768729186e-01 7.828318504444251036e-02 2.348566603962279620e-01 -8.267036457223746193e-02 2.871945099969961182e-02 1.357778155417070946e-04 4.536083016967299569e-02 -2.662446148613487745e-02 -1.286802411315538571e-03 2.589837251767865972e-02 9.764618739332360520e-02 -1.482638344800362232e-02 -1.724443139153373009e-02 3.803234202872004524e-04 -5.623718701138279152e-03 -3.826512737802884988e-03 -9.775227094927225850e-04 -5.899508768327291468e-04 8.651394548020621255e-04 8.793760860145664188e-04 -2.102539007745656604e-03 8.371820020202150586e-04 -8.422057463650490334e-04 1.552126333632783102e-03 -1.823483594772343122e-04 -6.317269401444231295e-04 -1.540086329773595393e-04 7.538009697509935906e-04 -2.990433471298725655e-04 1.304392974530912811e-04 -1.527094718131040256e-04 6.205928923840062868e-05 -1.771457353379915124e-05 1.560986943266852260e-04 4.555590013488509287e-05 -2.720213059526273110e-05 4.466549351231284369e-05 1.821857359539658377e-05 3.805776701988292934e-05 -2.549466682091819849e-05 1.511074267337383769e-06 7.653819982564934545e-05 -1.958926611795705535e-05 -6.767340925218062386e-06 2.135864082915135857e-05 -1.268684764466603794e-05 3.566195657343805071e-05 -7.031695996204021260e-07 -2.611622615625184613e-05 4.829055327057635835e-06 -7.107727357430579850e-07 2.510774275624423757e-05 -1.972073898776988349e-05 -2.657060087150566561e-06 -3.432175703763764522e-06 -6.489055943141860449e-06 -1.228234037790994161e-05 2.034757309990990878e-06 8.626285529836870479e-06 -6.660283232602078376e-06 -1.195063361199627214e-06 -5.847615990734883575e-06 7.540510309074001110e-06 -5.402179963150813133e-08 -4.370368807009190035e-06 4.544034092349464228e-06 5.905198939696531584e-06 1.116429268396872302e-06 2.291183767977657140e-06 9.489634262735647839e-07 1.066002144511729646e-06 6.853496867453323004e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -1.879563971277720125e-01 5.922150297877085828e-02 -2.264645284984671270e-01 2.646213934245877919e-02 -3.057159479712759018e-02 1.834402410723585258e-02 -4.771062288580391053e-02 3.035023349080870347e-02 -2.881977500295646250e-04 -2.838200941000140753e-02 -1.482638344800362232e-02 9.010457061909563081e-02 2.161123960217148018e-02 1.260208087759761772e-03 7.581033223303884107e-03 5.631916138201302664e-03 1.902438188237369054e-03 -1.049340468015621001e-03 -1.097675554005412939e-03 -1.180698743346181908e-03 1.928498632502418340e-03 -1.398795150299020340e-04 -8.132969182586236533e-05 -1.139575101445562330e-03 1.475769819879471150e-05 5.316926219505077557e-04 2.501603267483476479e-04 -7.390892025240558593e-04 3.196742614371007440e-04 -2.351317767343346157e-04 3.325865956727879543e-04 1.368399555932141049e-04 8.976859120453504657e-05 -2.423731873066102703e-05 -4.927734689555310441e-05 1.085926629963905691e-04 -4.781254553463656416e-05 -7.568404615422542284e-05 -1.640081712721153180e-05 8.965088354318442713e-05 6.937157613623407486e-06 -5.540091942239721656e-05 7.592620801441106633e-05 -2.564710937049201640e-05 -4.188549991038170923e-05 4.234499075215722181e-05 -5.178197633985196749e-05 1.641287046299667044e-05 4.579202980697374780e-05 1.152707906296191173e-07 1.095455392325919880e-05 -4.618846412808259790e-05 2.966406717171467887e-05 9.322242005733639048e-06 1.048654694881498200e-05 1.090389373806454206e-05 2.027432401588401143e-05 -2.616271890777917323e-06 -1.335044618253140997e-05 9.580811289778705769e-06 1.277334091824381308e-06 1.114550408464794913e-05 -1.319045281535339718e-05 -1.155278832292491333e-06 7.374138184194983456e-06 -8.184525967841569600e-06 -1.144085679522630020e-05 -1.593019161874593051e-06 -3.656059094743960134e-06 -1.997452207645467794e-06 -2.002451489677425600e-06 -1.522927148308537333e-06
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -2.407681308346238114e-01 -4.490960216424789986e-02 -2.998861469254355816e-01 1.280539369826423635e-01 -1.350451288416053622e-02 1.819115801128566232e-02 -3.970873186282850714e-02 1.857688170490350010e-02 -9.438429771735645790e-03 -3.254132600646036239e-02 -1.724443139153373009e-02 2.161123960217148018e-02 3.836577770399322157e-02 2.655842001442469246e-03 4.981307762661317702e-03 5.392063475037497076e-03 3.235466475779910037e-03 1.315917776717218319e-03 -1.181257423413893936e-04 -4.016331728921166203e-04 1.336513853119313802e-03 -5.828860744392343066e-04 2.770168873476726540e-04 -1.718514846778839951e-03 9.648381522618979488e-04 4.877208911109993613e-04 2.870447328793081557e-04 -9.635738500791380615e-04 5.834546366025882046e-04 -2.230824282871540765e-04 -8.834108906807161662e-05 -1.047785179044574303e-04 4.408551797492701760e-05 -3.325316271699815351e-04 7.184448516392883278e-05 -1.242192901196323053e-05 6.302408409915136081e-05 2.818176826297703256e-05 -1.488219089555614500e-05 -5.451978491054562122e-06 -4.332988792362825058e-05 -9.768047664317769927e-05 6.423477259243303630e-06 2.267883033109129368e-05 -1.987818965056291188e-05 1.711686737882336770e-06 -4.901026187410532708e-05 -1.730822146712370386e-05 3.795870938052484637e-05 -1.721228523917412866e-05 -4.256380650441868637e-06 -3.307189074513850586e-05 2.863442294571222292e-05 2.118184174149720994e-06 -1.798061078961197359e-06 8.365292388722692004e-06 1.821986625390242394e-05 -3.944866672102277042e-06 -1.008376334965083370e-05 7.085648790238564006e-06 3.015605347617571720e-06 5.646054904762381247e-06 -8.216049733794172664e-06 1.244245699531189135e-06 5.104576992946524359e-06 -4.389484473512973286e-06 -5.419640140062355265e-06 -9.263970116627612412e-07 -6.173413782026520671e-07 -7.052907008841067151e-07 -1.037992718809561380e-06 -5.413516795648731065e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -4.218296569368950449e-02 -7.855375238176477704e-02 -3.674637508355390464e-02 2.420968775319610328e-02 -2.789887764143550095e-02 -7.156586299672412256e-04 -5.357164349378174158e-04 1.697971574324704707e-03 -1.031961717066643379e-02 -2.794517580320655514e-03 3.803234202872004524e-04 1.260208087759761772e-03 2.655842001442469246e-03 1.671364584526372279e-02 -6.984209635675925213e-04 -1.612786544524850291e-03 -1.150377183219982976e-03 8.909906886483122358e-04 1.423417856257976804e-03 2.050400087963895085e-04 -1.291866490741424948e-03 -7.850157273589783772e-05 3.085029114721717677e-04 2.497826587293777820e-04 4.722708274003915309e-04 -2.000083894173748113e-04 1.044987434562893139e-05 1.318789244529286638e-04 8.288457155387046326e-05 -3.157230427125036178e-05 -1.350921746972201640e-04 -4.006748714237507109e-05 -1.242419819597935633e-05 -3.555702382019164696e-05 5.480338726730083050e-06 -8.008739068485121101e-06 3.395432212217582360e-05 2.456636053431822874e-05 2.222611053478334284e-05 -2.823673957256211920e-05 -2.378083578042228489e-06 3.852144294755435534e-06 -9.416007522626111619e-06 4.764253380890537553e-06 2.654528515378756236e-06 -6.401279406414283033e-06 4.880955365653679169e-06 -3.023001551370472642e-06 -1.296651148190591137e-06 -8.258443736897127149e-06 -3.042455569613446421e-06 1.079840990757596066e-06 -8.676380647538681878e-07 -2.607562874279888942e-06 -3.670293928117995323e-06 -1.543772288019227860e-06 -1.141356908366740230e-06 -1.499600456045305431e-06 2.322605410427941311e-06 -1.216767464131830376e-06 1.424941754188119410e-06 -2.440796434761868840e-06 2.368856382328713002e-06 1.047180671087091911e-06 -1.006425173237491263e-06 1.777598704392067933e-06 2.684939129778658618e-06 -2.870167575707448028e-07 2.120510888560250851e-06 4.596497846412796187e-07 3.782072301957504328e-07 6.153488546045230070e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -1.184450600737501670e-02 -2.203889846035033198e-02 -4.786744050147226544e-03 2.045107667186231784e-02 4.990762965367843106e-03 8.014608579143876407e-03 -8.386808518437965237e-03 6.622137524371058022e-03 1.488533894097579394e-03 -8.040809158104750970e-03 -5.623718701138279152e-03 7.581033223303884107e-03 4.981307762661317702e-03 -6.984209635675925213e-04 9.978740177867519157e-03 2.401071242547132400e-03 5.904194625218347559e-04 -2.111218223723604359e-04 -7.238019582094053130e-04 -6.137023943648180777e-04 5.398188982721214679e-04 8.814322416188973344e-05 8.161558591019856988e-05 -3.547922075430944054e-04 -1.810753686722332281e-04 3.125118327135938427e-04 1.069360200462149261e-04 -1.554611362615432757e-04 -1.348389675452477806e-05 -2.100213322416194368e-05 1.313147752210520620e-04 3.824002737201768391e-05 2.659940800613697057e-05 2.924456388972403116e-05 -2.080568721423884138e-05 3.526266791391301545e-05 -1.480870205671202186e-05 -3.336547759573937449e-05 -1.063445457587769095e-05 2.871409738102048020e-05 6.726295011380237417e-06 -1.175350019207360468e-05 2.170510741386288563e-05 -1.448064795891040923e-05 -1.042498032786376992e-05 1.597165789409206555e-05 -1.253888389383585237e-05 6.736831285819334451e-06 1.083893563335877063e-05 1.450878037906366340e-06 4.054202211064563128e-06 -1.132394321991034218e-05 6.883386714315126653e-06 2.711689676113091227e-06 3.445760783063244821e-06 3.043784731780698586e-06 5.042027123378156580e-06 1.515205457629298163e-07 -3.794054591613607972e-06 2.637348567422762434e-06 -2.598954029735527643e-07 3.262752903500524751e-06 -3.509676316894518986e-06 -5.975323253942488857e-07 2.221326450778006366e-06 -2.382215526655323162e-06 -3.446991892553718155e-06 -3.614838062359436948e-07 -1.661307389047478205e-06 -5.816343929781949384e-07 -5.600825022422226172e-07 -5.703477275038895062e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -6.928899595091043973e-02 -1.114645866418713036e-02 -3.433939784802754469e-02 2.127367547077535156e-02 5.971266459747406191e-03 1.503589045077791814e-03 -9.240316698801164658e-03 1.310399574314126403e-02 2.921278368835744307e-03 -8.647506447880402153e-03 -3.826512737802884988e-03 5.631916138201302664e-03 5.392063475037497076e-03 -1.612786544524850291e-03 2.401071242547132400e-03 8.784434034722947426e-03 1.267955691158102836e-03 -3.906829188066448058e-05 -8.598055645507212200e-04 -2.579917623397481354e-04 7.475021609492588440e-04 2.157809509401560980e-04 -1.098559092735032364e-04 -6.407025779186355198e-04 -1.222610969273650334e-04 3.836350626446540673e-04 3.551235901938351911e-05 -1.617970521098006421e-04 1.135921452009736873e-04 -7.496953629531983231e-05 1.266843640778925445e-04 3.043965333873382959e-05 -1.918210672150226241e-06 1.170651409857963795e-05 -2.190644154072858760e-05 1.887828842935070680e-05 -2.833140658352186565e-05 -2.435334949557669721e-05 -1.842124031509980223e-05 3.138718151430508815e-05 9.301846013530460708e-06 -2.635105165839978238e-05 2.168991103260672700e-05 -8.461270410837457042e-06 -9.934412191455545315e-06 1.381533656598010190e-05 -1.527909938575709473e-05 5.417270580583692312e-06 1.337195308266203661e-05 1.382077939810439417e-06 4.350261058384740647e-06 -1.325867461541711008e-05 9.004784301809802747e-06 3.745903749580492371e-06 3.277886338508914530e-06 4.094318463592796512e-06 5.877528040272694759e-06 -5.604920655777498291e-07 -5.062419837518941381e-06 3.142843063817634383e-06 1.389952917046311231e-07 4.129675438955480779e-06 -4.665165174764926564e-06 -6.383155976231338997e-07 2.465113882689141429e-06 -2.963031835159859062e-06 -4.086058690323189241e-06 -6.060719727261434323e-07 -1.529265104782114816e-06 -7.215892745304936164e-07 -7.167152683383465318e-07 -5.600020394411885686e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -2.314328394561051064e-02 7.139725802889677897e-03 -3.875496288806829642e-02 2.230341027418606542e-02 1.203537092994293525e-02 6.377880613768815987e-03 -2.331527786491728420e-03 2.888096498243742857e-03 2.014168246023944914e-03 -4.080546950286072921e-03 -9.775227094927225850e-04 1.902438188237369054e-03 3.235466475779910037e-03 -1.150377183219982976e-03 5.904194625218347559e-04 1.267955691158102836e-03 2.758652573634379039e-03 -6.721332077634122452e-06 -2.410937514453779585e-04 -1.127259240034755015e-05 4.061987402526326049e-04 -1.314183722669266569e-04 -7.518932739827273264e-05 -2.950813172104308716e-04 2.344793725878289460e-04 6.909808432054568477e-05 -2.702112633043842759e-05 -2.019192166986651723e-04 1.314975886739681919e-04 -3.205075068231233860e-05 -4.757942185240482137e-05 -3.683131086371386006e-05 2.323404252428282124e-06 -8.518051003777814658e-05 3.263977312268235973e-05 -1.237809026142133732e-05 2.010665843829168291e-05 1.059585566033998563e-05 -5.407791862073911715e-06 -6.839770342201077493e-06 -1.121779260755475359e-05 -2.155466002869424675e-05 -4.539239811610367902e-06 7.444838442643168811e-06 -2.319966315274088547e-06 -2.129983569628202774e-06 -9.439960295196752100e-06 -5.785650638778238011e-06 5.981169738424131981e-06 -3.004212273746950412e-06 -1.895187360954479683e-06 -4.494433710206043521e-06 5.140046142235332652e-06 -1.130987251304275965e-08 -9.728453920491350508e-07 1.624998609077082158e-06 3.311804951802428678e-06 -4.053063378453189973e-07 -2.081515874057325160e-06 1.184554945980987814e-06 3.964074421047910244e-07 9.243351735950407418e-07 -1.390809463374531720e-06 2.607890887808156917e-07 9.134946395413285417e-07 -7.664359594913168868e-07 -9.484758151830637712e-07 1.213406610054423080e-07 -2.162647439429792068e-07 -7.316624217480915010e-08 -1.578331198901239645e-07 -1.569864910630974774e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -1.010290524722116424e-02 7.953197102929781409e-03 -4.154055695242609564e-02 1.224807504030877951e-02 2.940571028908996621e-03 -5.149093723735230152e-03 -8.188836725314545956e-03 5.761630324748338904e-04 -1.572297529803952506e-03 -4.611175096582403009e-03 -5.899508768327291468e-04 -1.049340468015621001e-03 1.315917776717218319e-03 8.909906886483122358e-04 -2.111218223723604359e-04 -3.906829188066448058e-05 -6.721332077634122452e-06 2.654883176957817566e-03 1.781385821357838264e-04 3.832584097043809313e-05 -1.710656118742384267e-04 -1.478959617349011823e-04 -2.795742191558133328e-05 -2.533682421845999747e-04 2.636961287341718727e-04 -7.712112274684554750e-05 4.919423165728258019e-05 -6.018814393143723755e-05 8.899828273028689970e-05 -1.793917172178902255e-05 -7.845376862377115941e-05 -4.427663070699804955e-05 -3.996101136246304946e-06 -6.259167037907554534e-05 1.354218409939705828e-05 -1.894785430261292143e-05 2.351704961606973526e-05 2.077036801265803599e-05 1.245084588743401586e-06 -1.228944324201914164e-05 -1.438197776180284643e-05 -1.327053592797013376e-05 -1.074136191982459908e-05 1.001382695417531708e-05 2.150726598090146471e-06 -7.119869868421568059e-06 -2.213786450559341237e-06 -6.434177212053375019e-06 1.693601703710781671e-06 -4.676667766669187167e-06 -2.900196139012895507e-06 -3.640658582318879352e-07 1.778657434708435252e-06 -8.726387820627688336e-07 -1.633268037881527397e-06 -2.478368698217313839e-07 8.041116949300991615e-07 -8.631858710836139504e-07 2.280979474181072700e-07 -2.114012247821179239e-07 7.199771695890263606e-07 -8.859070622746353517e-07 3.074074593753878681e-07 6.910274422069910821e-07 -2.147353494858667464e-07 5.519858931202305425e-07 1.022984437208358002e-06 -2.253908139097856409e-07 7.837075828301020140e-07 2.014451613640029494e-07 1.261141637019637014e-07 2.631878518066719889e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -1.867650347404954322e-02 -1.099099467253558844e-02 -1.502694183874799840e-02 7.096089158303382424e-03 -8.845344290973528034e-03 4.372275155176838187e-04 3.365603624996505068e-03 4.791860386687295248e-03 -2.701631919881379313e-03 2.010445487816622627e-03 8.651394548020621255e-04 -1.097675554005412939e-03 -1.181257423413893936e-04 1.423417856257976804e-03 -7.238019582094053130e-04 -8.598055645507212200e-04 -2.410937514453779585e-04 1.781385821357838264e-04 1.972601968791804199e-03 1.581999642219370975e-05 -5.573780829297082145e-04 -1.120792657909674370e-04 1.212727644467128540e-04 2.791991013712721566e-04 1.968144077367101074e-04 -1.402432568413815696e-04 -1.097437038246173443e-05 7.179012630970069728e-05 8.930451225534898635e-06 3.583008156604408769e-05 -6.602652202207881060e-05 -2.047190979075051923e-05 -9.231290729490578364e-06 -1.150582770602273388e-05 1.670742185242203457e-05 -7.250651623484685219e-06 1.097452315393216245e-05 9.366992394747756872e-06 5.363696091605718661e-06 -1.625428292558089621e-05 -1.506965902226843640e-07 8.344759025141013241e-06 -7.152475622632952571e-06 1.446563839978299185e-06 3.142198422638828178e-06 -3.665741129643249016e-06 5.156257426738980782e-06 -2.125306917406018259e-06 -3.491962166533340183e-06 -1.445813868687924058e-06 -1.374818018631555688e-06 3.266141439568210905e-06 -2.379288734981468331e-06 -1.374841754268015205e-06 -1.752465757194270748e-06 -9.714309424680902456e-07 -1.395717084989465948e-06 -1.100584666138763304e-07 1.490945565476103243e-06 -9.869014142786351643e-07 1.629617307616035339e-07 -1.320205930552323286e-06 1.593442474264427362e-06 3.019426348771525274e-07 -6.986712579869655454e-07 1.017430094326341144e-06 1.402298595073682115e-06 1.704891596913584890e-07 8.437967807583934288e-07 2.635560043088589926e-07 2.387087147285776049e-07 2.310082046331661528e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -8.086492966917406197e-03 2.170026299784197713e-03 1.287632953748129878e-03 -1.776439619886364995e-03 1.251727113446670899e-03 -7.621165378955127101e-04 1.685761680162608340e-03 -5.639705381500486215e-04 -4.559322325244253714e-04 6.465624349423248259e-04 8.793760860145664188e-04 -1.180698743346181908e-03 -4.016331728921166203e-04 2.050400087963895085e-04 -6.137023943648180777e-04 -2.579917623397481354e-04 -1.127259240034755015e-05 3.832584097043809313e-05 1.581999642219370975e-05 7.694835997424208244e-04 -6.109637013537570381e-05 -1.940283436442630967e-05 -2.462091617782779657e-06 4.641455305581585188e-05 3.856865732780316472e-05 -2.156696877299690542e-05 -1.999750393333679993e-05 3.841737265816056073e-06 5.082827275921383602e-06 -1.300258944905265885e-07 -2.436193456894685799e-05 -9.800770842029199069e-06 -1.783401125914881450e-06 -1.193064611366403195e-05 4.092970260458978076e-06 -1.521954550187050161e-06 2.986387614670308069e-06 5.504263114258058536e-06 1.532912011431012526e-06 -2.861592923993595169e-06 -2.254521428416226894e-06 -4.645009491739806849e-07 -2.723736758898496130e-06 2.596856710036457097e-06 1.197361570135105038e-06 -2.338779104371744934e-06 9.819325193866895831e-07 -9.470142459945499828e-07 -6.561953175666949185e-07 -5.446076603722090273e-07 -7.186494168135633854e-07 1.068148721044382209e-06 -4.578304226890583509e-07 -2.475616309151499188e-07 -5.681244876201166376e-07 -2.854924526465894277e-07 -4.508289033213431892e-07 -1.319957856052361168e-07 3.575204695786335595e-07 -2.734315681762009625e-07 1.019937499235025219e-07 -3.337929408674683556e-07 3.138778611670293107e-07 8.086401926044007054e-08 -2.559306707585397471e-07 2.414661991954002641e-07 3.697135743607308323e-07 2.744415738202501588e-08 2.265222527209049861e-07 6.029957038327152215e-08 5.432256177951978763e-08 6.820330898875917256e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -3.179099939464212488e-03 1.313678162907609692e-03 -1.628824760070252467e-02 5.657054541087331960e-03 -4.204353811565990709e-03 9.707828895168025586e-04 -3.514112749737811989e-03 -4.769812684452682584e-04 1.513123684842696990e-03 -2.638999513379862872e-03 -2.102539007745656604e-03 1.928498632502418340e-03 1.336513853119313802e-03 -1.291866490741424948e-03 5.398188982721214679e-04 7.475021609492588440e-04 4.061987402526326049e-04 -1.710656118742384267e-04 -5.573780829297082145e-04 -6.109637013537570381e-05 9.512918421791452658e-04 5.942298954637408287e-05 -8.102125469767662764e-05 -2.128874175420991861e-04 -7.310639150808250560e-05 1.190017483162093668e-04 2.708870426419875602e-06 -1.066236087843876055e-04 4.676347578093375412e-05 -2.019294086883735120e-05 4.889171253566331496e-05 5.047013451780475539e-06 4.012966987645971198e-06 -8.421336577508751036e-06 -1.423799459480948691e-05 1.305508286702875969e-05 -1.265408923371457892e-05 -7.255141362063686831e-06 -7.715651156148918495e-06 1.265320570843961708e-05 1.099530111560931772e-06 -9.980952003680414243e-06 6.781122835189948094e-06 -7.901045360359031898e-07 -3.337601418942264740e-06 3.633626562737958221e-06 -5.420458065911870621e-06 1.685420671364507618e-06 4.974502521845857810e-06 1.276132510500819379e-06 4.773875732849080013e-07 -4.563858070661183952e-06 3.597772699908132245e-06 1.198699228118513262e-06 1.136884661498420252e-06 1.350720801120938314e-06 2.178105945193848382e-06 -2.040400604256013644e-07 -1.883232178564239638e-06 1.168569542372381486e-06 1.086201913125062155e-09 1.500948965738256719e-06 -1.884590386869213127e-06 -2.487763008760115392e-07 8.063456815319987564e-07 -1.122388599179621682e-06 -1.523953094583137964e-06 -1.677735389502810711e-07 -6.547994884620087304e-07 -2.754722462252092911e-07 -2.688458475558004660e-07 -2.047241814358036085e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.692206271712309362e-02 3.129994640285305336e-03 2.046943851606475642e-02 -1.090035309174220346e-03 -8.845449937712742544e-03 -2.590707319120030133e-03 1.910048530170708895e-03 1.300946422275066783e-03 -8.641529465784039601e-05 1.384210934921840984e-03 8.371820020202150586e-04 -1.398795150299020340e-04 -5.828860744392343066e-04 -7.850157273589783772e-05 8.814322416188973344e-05 2.157809509401560980e-04 -1.314183722669266569e-04 -1.478959617349011823e-04 -1.120792657909674370e-04 -1.940283436442630967e-05 5.942298954637408287e-05 3.791368468593192390e-04 -3.966492867750703145e-05 5.025286892321620901e-05 -1.179168619656764888e-04 3.965260843890713920e-05 -7.323087832490306015e-06 5.557739584215943868e-05 -2.496790025116852357e-05 7.342877811707802448e-06 5.277448054007078852e-05 1.785765708892192793e-05 -2.314409195727792971e-06 5.105989180248134276e-05 -1.736615429395850244e-05 1.421557091532470368e-05 -1.914532795170611606e-05 -1.172253059430506888e-05 -2.753513503034467404e-06 1.011681170731442506e-05 8.345312134885264342e-06 6.229501122792144230e-06 6.510829223873973898e-06 -6.362982826924688146e-06 -7.434749984469295211e-07 4.545992970062947133e-06 2.459794336524338313e-06 4.384666331021207666e-06 1.569064048095963131e-07 2.705949146805865008e-06 1.373703476853237022e-06 -4.858405210958268478e-07 -5.439557268092902337e-07 8.203714546575005508e-07 1.173937404748231261e-06 1.458400908808626721e-07 -2.790189499960211380e-07 1.480863047751040585e-07 -2.264991835704122336e-07 -2.082119025055192160e-08 -1.808243747754117441e-07 5.375602623974374954e-07 -4.659385876185425389e-07 -3.308327033165305113e-07 9.076097753945582601e-08 -3.169084459341442714e-07 -5.012606478486283811e-07 -1.336850201458063875e-07 -2.584102985771163875e-07 -1.389331838198693611e-07 -9.471408319558297167e-08 -5.800788164262524000e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -3.716463874820270623e-03 -6.800850072633726068e-03 -3.980856425898263336e-03 3.148674767432082470e-03 -1.427256574343709755e-03 -5.700356000010933918e-04 6.396416396915338308e-04 4.429995338919762746e-04 -2.891520187456748654e-04 4.153989173274799641e-04 -8.422057463650490334e-04 -8.132969182586236533e-05 2.770168873476726540e-04 3.085029114721717677e-04 8.161558591019856988e-05 -1.098559092735032364e-04 -7.518932739827273264e-05 -2.795742191558133328e-05 1.212727644467128540e-04 -2.462091617782779657e-06 -8.102125469767662764e-05 -3.966492867750703145e-05 1.943331112973469737e-04 4.620456727709265729e-05 1.985508420856076393e-05 -1.037303043801206781e-05 3.476068251120629964e-06 6.485937340358795058e-06 -1.320624820963963158e-05 1.016348171221444833e-05 -7.872774659517026701e-07 -3.451643251733242962e-06 8.562139676069603107e-07 2.269434145541159094e-06 -1.347567054247607791e-06 4.658885676016988948e-07 -4.260337323611845854e-07 -3.779206497785300149e-07 4.870165057145087965e-08 -1.493458322754097109e-06 3.256397432931554201e-07 1.325588628417998583e-06 -4.283466380571797808e-07 -4.902293212545840522e-07 1.915809545512185938e-07 -8.303880306775466180e-08 6.778385376820873895e-07 8.001765279809289614e-08 -5.915189901801597069e-07 -6.210773323252043075e-08 1.283840886846695906e-07 4.554107735126645981e-07 -4.716321995240883795e-07 -1.082096389824352911e-07 -4.185379481573339169e-08 -1.569499840195947381e-07 -2.485735330464010601e-07 2.505458125531476584e-08 2.448701463667898783e-07 -1.213793734953603467e-07 -4.174203063759120042e-08 -1.282342721334833822e-07 2.318186796303224725e-07 1.180801700814298371e-08 -6.649916177769624017e-08 1.203146000329891082e-07 1.422710045280861107e-07 1.124544048302991715e-08 6.537217150705251352e-08 2.693940749634684591e-08 2.862587028136226858e-08 1.361458105578812630e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.435999345302212321e-02 -6.428282939916199999e-03 1.816575145232657285e-02 -6.230487835484074984e-03 -3.315342074928036634e-03 -2.266320428973580981e-05 5.119563475380033667e-03 2.651985220767857920e-04 -2.117517713488148033e-04 3.929943804890193536e-03 1.552126333632783102e-03 -1.139575101445562330e-03 -1.718514846778839951e-03 2.497826587293777820e-04 -3.547922075430944054e-04 -6.407025779186355198e-04 -2.950813172104308716e-04 -2.533682421845999747e-04 2.791991013712721566e-04 4.641455305581585188e-05 -2.128874175420991861e-04 5.025286892321620901e-05 4.620456727709265729e-05 3.934507508018032654e-04 -7.497705330358391538e-05 -5.884690678380889397e-05 -3.390795263073728027e-05 1.013909762649168521e-04 -6.554022500793949880e-05 3.554355699682964245e-05 1.027199591109988907e-05 1.159309148260462930e-05 -2.084648983372586327e-06 3.941837050649379286e-05 -4.979247280099254574e-06 7.113250889964536939e-06 -6.139542826779920374e-06 -5.629758959217230669e-06 2.775672600909365765e-06 -5.339703117668354174e-07 6.010583505940177225e-06 1.283943202753549981e-05 6.139787067412278521e-07 -4.673534220111206383e-06 1.668082042273912496e-06 1.270779049476179787e-06 5.614346977311933670e-06 2.558776263775449283e-06 -3.751933528977560824e-06 2.042987101463171457e-06 5.734361285883117776e-07 3.173841890454395189e-06 -3.019572854956388139e-06 -2.573193011753605258e-07 1.204355174926739101e-07 -7.738238062717366057e-07 -1.763794103828677374e-06 4.629099487134646169e-07 1.066211802757781815e-06 -7.435410395407145560e-07 -3.457758134077817468e-07 -4.887846996210276815e-07 9.039007425962339883e-07 -2.025907334512037853e-07 -4.817459891533697588e-07 4.310349364325829828e-07 4.660556393120762020e-07 1.576000151600658905e-07 8.588230330409662199e-08 6.212216831921702464e-08 1.001815186360548032e-07 2.920732134283257970e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -1.855317653362035038e-02 6.266700245703227777e-03 -2.260077600759738056e-02 9.266754937166011771e-03 1.742051632048351043e-03 1.188109077144210201e-03 -1.023192533045549349e-03 -5.219545639991976429e-04 -3.917143241348182534e-04 -1.474737246469256515e-03 -1.823483594772343122e-04 1.475769819879471150e-05 9.648381522618979488e-04 4.722708274003915309e-04 -1.810753686722332281e-04 -1.222610969273650334e-04 2.344793725878289460e-04 2.636961287341718727e-04 1.968144077367101074e-04 3.856865732780316472e-05 -7.310639150808250560e-05 -1.179168619656764888e-04 1.985508420856076393e-05 -7.497705330358391538e-05 2.727450964001314941e-04 -5.246786360908931539e-05 -3.753926873231404462e-06 -5.445302834894155691e-05 6.589698455494952473e-05 -6.819580321071816513e-06 -6.390986927413272456e-05 -3.225598515609925780e-05 -2.630105985469369363e-06 -5.136490491466927795e-05 1.793505408184338415e-05 -1.346624417354368161e-05 1.783917303711390703e-05 1.324354469328319927e-05 1.513660489385331954e-06 -1.220037129952917650e-05 -7.859988199068437925e-06 -7.336238846252679082e-06 -7.182634381918656094e-06 6.446402482539179799e-06 7.175054275464823696e-07 -4.589462929215870953e-06 -1.923636255019687593e-06 -4.454313748893245547e-06 8.389716000141876248e-07 -2.854761963097016561e-06 -2.079300727543130159e-06 -6.910193989440181831e-08 1.049957303304256693e-06 -8.266511660398396141e-07 -1.397456101202905326e-06 4.008844992338183767e-11 5.892587474358313580e-07 -3.725984889029965819e-07 4.963575964239832487e-08 -7.344732201048610832e-08 3.599422655760447568e-07 -5.071080795109601970e-07 2.965991371089280550e-07 4.067826576065174623e-07 -5.400346519558699801e-08 3.215948792948165141e-07 5.491839657154014149e-07 6.149312214732258695e-08 4.339290759661658034e-07 1.400884324127875396e-07 8.402460154690179919e-08 1.007822564215318863e-07
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -3.916002264634483622e-03 -6.067260715852028363e-03 1.782993145803323269e-03 4.271256944268427848e-04 -3.440408587652007077e-04 2.282423709557635579e-04 -8.532184721832028994e-04 1.019919213013772627e-03 1.820065400901503831e-04 -8.952327384546862176e-04 -6.317269401444231295e-04 5.316926219505077557e-04 4.877208911109993613e-04 -2.000083894173748113e-04 3.125118327135938427e-04 3.836350626446540673e-04 6.909808432054568477e-05 -7.712112274684554750e-05 -1.402432568413815696e-04 -2.156696877299690542e-05 1.190017483162093668e-04 3.965260843890713920e-05 -1.037303043801206781e-05 -5.884690678380889397e-05 -5.246786360908931539e-05 9.264343516634351210e-05 7.265565995917290035e-06 -1.773559801556426558e-05 2.225938557862382606e-06 -9.978580694065766296e-06 2.808308707703107692e-05 9.135989232550937518e-06 1.179737144004476322e-06 8.530318885315444480e-06 -7.300151359965833823e-06 6.236192529520273055e-06 -7.022393758778555738e-06 -4.996827641323857075e-06 -2.466525569032061804e-06 5.815880694681688025e-06 2.786000210051901315e-06 -1.839203601637591919e-06 4.439440254221549007e-06 -1.835714410676625404e-06 -1.343245793829233503e-06 2.451051610256039569e-06 -1.556577048268841076e-06 1.576675673177870600e-06 1.667796887524464566e-06 5.696283390954355290e-07 8.164855387896602175e-07 -1.731280606646242096e-06 1.009876605812521656e-06 6.073106869740168480e-07 5.811455916257587519e-07 4.863310634396478249e-07 6.108294573604946415e-07 -3.871871234503116670e-08 -6.323569176957675461e-07 4.260839427667649490e-07 -2.005223454683642556e-08 5.908466432966315986e-07 -6.467003569471858135e-07 -1.504669750064687198e-07 2.945631564644911354e-07 -4.256234974237915495e-07 -5.979401318640006234e-07 -9.363371555576276321e-08 -2.566688521773576872e-07 -1.155701271466934864e-07 -1.048538991210759860e-07 -8.146701411549708026e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.961426584621759912e-03 1.115941082599483745e-03 -3.031765251208297937e-04 -1.476806385973291193e-04 1.626404982145517551e-04 2.959414735838545099e-04 -8.218052979748259081e-04 -6.209575863051346482e-04 -4.317992615409150278e-04 -4.318245500662746248e-04 -1.540086329773595393e-04 2.501603267483476479e-04 2.870447328793081557e-04 1.044987434562893139e-05 1.069360200462149261e-04 3.551235901938351911e-05 -2.702112633043842759e-05 4.919423165728258019e-05 -1.097437038246173443e-05 -1.999750393333679993e-05 2.708870426419875602e-06 -7.323087832490306015e-06 3.476068251120629964e-06 -3.390795263073728027e-05 -3.753926873231404462e-06 7.265565995917290035e-06 4.793273308971415152e-05 -5.414414087876128692e-06 -1.251533203252561358e-06 -4.242062762560063932e-06 3.391372825504724319e-06 2.574006820409962263e-06 2.565578944352956838e-06 -1.005193948897797317e-06 -1.244979559943177920e-07 -6.226814545739537816e-08 1.414674660672169313e-06 3.950992831580657751e-08 3.317398722130379880e-07 1.099146223501573368e-06 -5.756965908644955555e-07 -5.829717391663871752e-07 6.306821331357348478e-07 -1.520254991362623457e-07 -9.417130942758539414e-08 2.514748673411726297e-07 -6.012885334784451625e-07 -7.992534576934015063e-08 4.676311196183304469e-07 -2.583326879149878767e-07 1.828804286820405885e-07 -5.048538218272272140e-07 3.078978873991664029e-07 1.192625133969768478e-07 4.707297878516994921e-08 6.879855545900448722e-08 1.932533733761452229e-07 -4.378302586068363137e-08 -7.653911439912850044e-08 8.692285138395415111e-08 1.950389815099762972e-08 6.830135405316129420e-08 -9.917920486950763284e-08 2.162329698149664617e-09 6.090334154863489225e-08 -5.487730365889311538e-08 -7.035811849550010454e-08 -3.273973804117308361e-08 -2.271480938680894485e-08 -1.303555713111178435e-08 -1.365744804901927566e-08 -9.047030654725759493e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.379483211838189970e-02 -1.396379023468275827e-03 1.327211202166610378e-02 -2.570305171456441651e-03 -9.259165204774062264e-04 -9.685527569268893508e-04 1.898185105172450326e-03 -1.913951214517718637e-04 -6.622490769088017638e-05 1.621879772926319418e-03 7.538009697509935906e-04 -7.390892025240558593e-04 -9.635738500791380615e-04 1.318789244529286638e-04 -1.554611362615432757e-04 -1.617970521098006421e-04 -2.019192166986651723e-04 -6.018814393143723755e-05 7.179012630970069728e-05 3.841737265816056073e-06 -1.066236087843876055e-04 5.557739584215943868e-05 6.485937340358795058e-06 1.013909762649168521e-04 -5.445302834894155691e-05 -1.773559801556426558e-05 -5.414414087876128692e-06 7.694219516468463454e-05 -3.436663795487520864e-05 1.451145188287327195e-05 1.338077342150127400e-05 9.661643482613603302e-06 -2.113803306867673781e-06 2.591208037930531528e-05 -6.037968131522985410e-06 2.701412632965550816e-06 -5.014498020969170897e-06 -3.617218976855786694e-06 7.162359637654176502e-07 1.592308716766916597e-06 3.894654265204348992e-06 6.085319478055461737e-06 1.096314002475894834e-06 -2.696958479657308597e-06 7.926376302042794269e-07 9.635294055749949871e-07 2.728848825071403190e-06 1.612151791661021473e-06 -1.797233129179782877e-06 1.130991252582778236e-06 5.617484031883347994e-07 1.369204238283362246e-06 -1.509721019477010243e-06 4.690145603672084100e-08 2.687311991134587993e-07 -3.546484314721698762e-07 -9.171341637169666122e-07 2.044004570926797186e-07 4.845598811173780516e-07 -3.511009994328629906e-07 -1.707891858925659091e-07 -1.801138326236477209e-07 3.567664216553801741e-07 -1.052145131941720459e-07 -2.143197326346838355e-07 1.724982056319728334e-07 1.849303543062758270e-07 6.000442361321819088e-09 1.568472762937707165e-08 1.223125639288827777e-08 3.630573812768552190e-08 1.812301243834629015e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -7.656023950829765265e-03 -2.423758407962214607e-03 -9.696093218920108850e-03 2.907758513954590996e-03 -9.223362785273737994e-05 3.882006593789460872e-04 -1.065851493091645895e-03 1.844775720904867752e-04 -2.611784072064916558e-04 -8.664658670297145909e-04 -2.990433471298725655e-04 3.196742614371007440e-04 5.834546366025882046e-04 8.288457155387046326e-05 -1.348389675452477806e-05 1.135921452009736873e-04 1.314975886739681919e-04 8.899828273028689970e-05 8.930451225534898635e-06 5.082827275921383602e-06 4.676347578093375412e-05 -2.496790025116852357e-05 -1.320624820963963158e-05 -6.554022500793949880e-05 6.589698455494952473e-05 2.225938557862382606e-06 -1.251533203252561358e-06 -3.436663795487520864e-05 4.794984987036161726e-05 -8.770231842247347599e-06 -1.666697513661379338e-05 -1.209373095051701681e-05 -1.912009663813338058e-06 -1.984116428122448004e-05 2.633101886404662472e-06 -2.122897431263114409e-06 3.576944054345103181e-06 3.535575392641577237e-06 -6.059244111092689727e-07 -2.959109723046072643e-06 -1.974862835390086522e-06 -4.299092860322872856e-06 -1.328292575590776167e-06 2.383296157680885366e-06 -4.666650871754792877e-07 -1.106373216701226330e-06 -1.496730229112809614e-06 -1.093082466495289695e-06 1.187221740824392311e-06 -9.839902403692241853e-07 -7.372004231954547149e-07 -8.907716389272790895e-07 9.659242496204091937e-07 -1.452398045184809895e-07 -3.683833605898418016e-07 2.100397093695916530e-07 5.685904568973600958e-07 -2.258161059698749847e-07 -2.758966382161725582e-07 1.600828566499863743e-07 1.825025623440771205e-07 3.636241671077920607e-08 -1.890654270788131530e-07 1.231474677731686145e-07 1.026274034132372151e-07 -5.092190758405757729e-08 -1.622461052881251519e-08 -2.458950764445714201e-08 1.012517879773706124e-07 8.692203376885269593e-09 -1.229017299970242187e-08 1.781693049583065503e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 3.873034561114936658e-03 1.276625619007184847e-04 4.891419642007533222e-04 7.050605490800735927e-04 -4.071991486931637546e-04 -2.889895002230908889e-04 6.421724751429869031e-04 -3.721908418144862684e-05 1.568286528632138451e-05 5.523266222920545938e-04 1.304392974530912811e-04 -2.351317767343346157e-04 -2.230824282871540765e-04 -3.157230427125036178e-05 -2.100213322416194368e-05 -7.496953629531983231e-05 -3.205075068231233860e-05 -1.793917172178902255e-05 3.583008156604408769e-05 -1.300258944905265885e-07 -2.019294086883735120e-05 7.342877811707802448e-06 1.016348171221444833e-05 3.554355699682964245e-05 -6.819580321071816513e-06 -9.978580694065766296e-06 -4.242062762560063932e-06 1.451145188287327195e-05 -8.770231842247347599e-06 1.371865440708553078e-05 2.895298682310882402e-07 -2.069474367166198109e-07 -4.691172546247505373e-07 4.750585127263203454e-06 -6.570173117177189976e-07 3.860029337344554268e-07 -7.227215156757109969e-07 -8.065206296308028036e-07 -3.031196772471181798e-07 -5.251180072127742030e-07 4.378022434600343931e-07 1.606666307533042569e-06 -3.522385961173027593e-07 -4.991153890221549287e-07 2.120634758604629858e-07 3.974372508781101403e-09 7.707053057775901815e-07 1.843868373721095011e-07 -6.155778124656044435e-07 3.773506784570121783e-07 -2.476366495848755220e-08 5.740343438404113636e-07 -5.116550018917696690e-07 -5.196856989237107258e-08 4.363709901320863580e-08 -1.118102898165226843e-07 -2.654342010464903637e-07 8.442612256831411917e-08 1.621976737758503226e-07 -1.334196417505107447e-07 -7.414198823563760363e-08 -7.592857664575425666e-08 1.376223373748636337e-07 -1.798823405792464636e-08 -6.641418472176448442e-08 7.364037315134527227e-08 8.031425840858179706e-08 2.469494132468412819e-08 6.142209655267589044e-09 1.293295075490816128e-08 1.775624445705540029e-08 1.602648541450339559e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.859048760585152553e-03 -2.289587561184892438e-03 5.859269966536665222e-03 -1.762903629297214291e-03 -2.090702825279103574e-03 -8.115976316868670415e-04 2.071903486986630431e-04 8.577034660354077716e-04 2.322345424930624886e-04 1.319258944292221150e-04 -1.527094718131040256e-04 3.325865956727879543e-04 -8.834108906807161662e-05 -1.350921746972201640e-04 1.313147752210520620e-04 1.266843640778925445e-04 -4.757942185240482137e-05 -7.845376862377115941e-05 -6.602652202207881060e-05 -2.436193456894685799e-05 4.889171253566331496e-05 5.277448054007078852e-05 -7.872774659517026701e-07 1.027199591109988907e-05 -6.390986927413272456e-05 2.808308707703107692e-05 3.391372825504724319e-06 1.338077342150127400e-05 -1.666697513661379338e-05 2.895298682310882402e-07 3.217689713519335525e-05 1.120804243886776502e-05 5.501126990841685430e-07 1.991243266622429840e-05 -8.714190746792384532e-06 6.412026186741289029e-06 -8.620389192056654911e-06 -5.778779292760037053e-06 -1.492786642433309826e-06 5.704149129586227232e-06 3.675946155085613497e-06 1.868517929256865822e-06 3.788725389988128223e-06 -2.734069217605594083e-06 -6.585466975864201927e-07 2.312229143555063246e-06 2.813742900305172557e-07 1.987452252045661470e-06 3.228046702859528223e-07 1.107094325896995624e-06 8.940453005181056347e-07 -6.073636096656496521e-07 -2.399768297383310656e-09 4.789200905756296268e-07 6.486159439402901574e-07 1.569744587801683851e-07 1.866814808014980740e-08 7.583491125861837440e-08 -2.072790422639651187e-07 1.323540839911136273e-07 -1.023586910555842618e-07 3.472810366429472105e-07 -3.104237995683853256e-07 -1.676773269846751308e-07 1.103616887841112891e-07 -2.285323416898064706e-07 -3.501280954021450887e-07 -6.889339443104288423e-08 -1.882024691449037666e-07 -8.089869793912407793e-08 -5.991782651450717320e-08 -4.974846563085596704e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.459744844100453850e-03 -2.066377076444324184e-05 3.916619596057983876e-03 -1.260627616950483814e-03 -2.633703315170182854e-04 -6.260789282172198207e-05 6.506125407401229640e-05 1.241107454449386896e-04 7.720357750624254713e-05 1.498206408742286573e-04 6.205928923840062868e-05 1.368399555932141049e-04 -1.047785179044574303e-04 -4.006748714237507109e-05 3.824002737201768391e-05 3.043965333873382959e-05 -3.683131086371386006e-05 -4.427663070699804955e-05 -2.047190979075051923e-05 -9.800770842029199069e-06 5.047013451780475539e-06 1.785765708892192793e-05 -3.451643251733242962e-06 1.159309148260462930e-05 -3.225598515609925780e-05 9.135989232550937518e-06 2.574006820409962263e-06 9.661643482613603302e-06 -1.209373095051701681e-05 -2.069474367166198109e-07 1.120804243886776502e-05 8.265056840297344139e-06 9.547050109313803902e-07 8.606349986315128375e-06 -2.300609775034491218e-06 2.141534853091293490e-06 -2.490198232384762006e-06 -2.120326462275363965e-06 2.403385504647582972e-08 2.309475437278386506e-06 1.296794857341384874e-06 1.144859125530628976e-06 1.485316068690676216e-06 -1.226972362164709977e-06 -1.168212915017373535e-07 9.107367647764969952e-07 1.735352048162372354e-07 7.159828281877780701e-07 5.340845750474473392e-09 3.985068102310577851e-07 4.393239779671301155e-07 -1.429566892808168969e-07 -5.486106225735870282e-08 1.731111232460107308e-07 2.289151390267980413e-07 2.854113586588568113e-08 -3.926052450641816611e-08 4.295865559688766724e-08 -4.489997775075587473e-08 4.268845851586619143e-08 -4.625235196028542682e-08 1.076624063842555885e-07 -8.009969405191406467e-08 -6.695571317808968167e-08 2.498440593268076046e-08 -7.251002137583272957e-08 -1.165242881487228299e-07 -1.583152580108024508e-08 -7.214487089902525238e-08 -2.759300245397318268e-08 -1.864429329848813941e-08 -1.861481786572203059e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.623712329662630485e-04 6.608127734306666135e-04 -1.264826368334277069e-04 6.708911820589068069e-05 3.326612374644468068e-04 1.897887040932097133e-04 -2.136427887362311656e-04 -1.934416306290414798e-04 9.321557582174938041e-06 -4.300420763623788499e-05 -1.771457353379915124e-05 8.976859120453504657e-05 4.408551797492701760e-05 -1.242419819597935633e-05 2.659940800613697057e-05 -1.918210672150226241e-06 2.323404252428282124e-06 -3.996101136246304946e-06 -9.231290729490578364e-06 -1.783401125914881450e-06 4.012966987645971198e-06 -2.314409195727792971e-06 8.562139676069603107e-07 -2.084648983372586327e-06 -2.630105985469369363e-06 1.179737144004476322e-06 2.565578944352956838e-06 -2.113803306867673781e-06 -1.912009663813338058e-06 -4.691172546247505373e-07 5.501126990841685430e-07 9.547050109313803902e-07 2.347910622502999284e-06 1.457795356888787998e-07 3.808058448216350799e-07 2.335401288939174384e-07 4.191377934108167707e-07 -1.030583521310533497e-07 1.811675455038533287e-07 3.231818971352723382e-07 -2.336458158766709118e-07 -1.075705073863391810e-07 1.725576891899498772e-07 -9.236383599895728235e-08 -6.312478603258127690e-08 5.775101621308153777e-08 -1.598719331144562127e-07 -2.931854449453823038e-08 1.198865761813585856e-07 -2.001549790561424600e-08 5.406774334712463572e-08 -9.476341145230530528e-08 7.976119215145571500e-08 2.635750138958444949e-08 2.982336462537798939e-08 2.143619275536725925e-08 5.366866713130228829e-08 5.811514337023930737e-10 -2.756310199101718529e-08 2.282054155479355626e-08 -8.324190427751539077e-09 2.485087683293876783e-08 -2.558315900600582350e-08 -3.854166442724544756e-09 1.794916099403844893e-08 -1.888691946102067018e-08 -2.949184616579635060e-08 8.344354977149938748e-10 -1.601417896564284264e-08 -4.190262009618466689e-09 -4.003117106038301459e-09 -6.666840004082733244e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.200720604920498993e-03 -7.884496315363308120e-04 6.642541907409913772e-03 -1.875080421848507621e-03 -1.812632608739227214e-03 -7.367020915229897026e-04 5.583919436789780465e-04 4.194341884179539240e-04 1.223052945866118474e-04 5.969079909692888691e-04 1.560986943266852260e-04 -2.423731873066102703e-05 -3.325316271699815351e-04 -3.555702382019164696e-05 2.924456388972403116e-05 1.170651409857963795e-05 -8.518051003777814658e-05 -6.259167037907554534e-05 -1.150582770602273388e-05 -1.193064611366403195e-05 -8.421336577508751036e-06 5.105989180248134276e-05 2.269434145541159094e-06 3.941837050649379286e-05 -5.136490491466927795e-05 8.530318885315444480e-06 -1.005193948897797317e-06 2.591208037930531528e-05 -1.984116428122448004e-05 4.750585127263203454e-06 1.991243266622429840e-05 8.606349986315128375e-06 1.457795356888787998e-07 2.052191911496376305e-05 -6.234091818901569082e-06 4.764417548997256982e-06 -6.415819025056300087e-06 -4.432955009303401602e-06 -5.972265871012777862e-07 3.594437450382876209e-06 3.060714215371471572e-06 3.051950460483643143e-06 2.326275806262501204e-06 -2.414757914653333620e-06 -1.460438458493629240e-07 1.556201504301329193e-06 1.073973596796523045e-06 1.593094532217905693e-06 -3.891473107943884169e-07 1.019810143100346101e-06 6.461288905886662192e-07 1.591228483029162330e-07 -5.041312232072984408e-07 2.702600596236074023e-07 4.552321196152342198e-07 -3.419466910366087793e-08 -2.797048109809135203e-07 1.193868381491380210e-07 4.566749442817169379e-08 -5.307593619167651935e-08 -1.196694217755784684e-07 1.300881272527649852e-07 -4.685717935272465095e-08 -1.298732049324576889e-07 -9.175201702638384423e-09 -6.587747971965346453e-08 -1.305809395832603629e-07 -2.706351467904462243e-08 -1.026127191999482088e-07 -3.931275331107261171e-08 -2.017344044826520406e-08 -2.312739248423948984e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -4.962248305341734151e-04 1.884541811965423060e-03 -1.190319643811191871e-03 7.338179669298882605e-04 9.830962311733130576e-04 2.718218190133865142e-04 -1.081001096278010032e-04 -2.375091734082166112e-04 -1.252927970015214162e-05 -7.500888771415703603e-05 4.555590013488509287e-05 -4.927734689555310441e-05 7.184448516392883278e-05 5.480338726730083050e-06 -2.080568721423884138e-05 -2.190644154072858760e-05 3.263977312268235973e-05 1.354218409939705828e-05 1.670742185242203457e-05 4.092970260458978076e-06 -1.423799459480948691e-05 -1.736615429395850244e-05 -1.347567054247607791e-06 -4.979247280099254574e-06 1.793505408184338415e-05 -7.300151359965833823e-06 -1.244979559943177920e-07 -6.037968131522985410e-06 2.633101886404662472e-06 -6.570173117177189976e-07 -8.714190746792384532e-06 -2.300609775034491218e-06 3.808058448216350799e-07 -6.234091818901569082e-06 5.283446765621201487e-06 -2.191157655050583224e-06 3.281448614421909646e-06 1.820473021235201193e-06 6.333305749695458720e-07 -1.668571878840943470e-06 -1.301658108119081228e-06 -5.380334314563471657e-07 -1.165646002230252475e-06 6.286501263468425631e-07 3.022502330770561173e-07 -5.564764762356091947e-07 -1.900974812213959987e-07 -7.338897258090786425e-07 -8.008142805217629484e-08 -3.431754634014582926e-07 -2.032565089038636370e-07 1.640643286950083784e-07 5.291858121558251218e-08 -1.478618902634795795e-07 -2.159489659487046436e-07 -3.390142703323486044e-08 3.472303699320571830e-08 -4.444960682780473798e-09 3.104230492175816599e-08 -1.943824307566032445e-08 2.611336988525630043e-08 -9.013142777606432023e-08 8.662415629283762556e-08 4.178619537544149781e-08 -2.089849281286317917e-08 5.643385911018899974e-08 8.530737587620482276e-08 3.813699740703329354e-08 3.717216373166129533e-08 2.333337366706423846e-08 1.622980588782055270e-08 7.936228871395799181e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.155067622225258391e-03 -1.380882336863154284e-03 1.601381672276517686e-03 -4.134408128410163369e-04 -8.132202820873179376e-04 -1.948414915555949885e-04 -1.528124947070308605e-05 1.685384801134868138e-04 5.945758465737223909e-06 8.037519744602711379e-05 -2.720213059526273110e-05 1.085926629963905691e-04 -1.242192901196323053e-05 -8.008739068485121101e-06 3.526266791391301545e-05 1.887828842935070680e-05 -1.237809026142133732e-05 -1.894785430261292143e-05 -7.250651623484685219e-06 -1.521954550187050161e-06 1.305508286702875969e-05 1.421557091532470368e-05 4.658885676016988948e-07 7.113250889964536939e-06 -1.346624417354368161e-05 6.236192529520273055e-06 -6.226814545739537816e-08 2.701412632965550816e-06 -2.122897431263114409e-06 3.860029337344554268e-07 6.412026186741289029e-06 2.141534853091293490e-06 2.335401288939174384e-07 4.764417548997256982e-06 -2.191157655050583224e-06 2.825881031251089607e-06 -2.194862735306189251e-06 -1.482116798933531229e-06 -3.618979610897593336e-07 1.354363924586111548e-06 9.455415714561124352e-07 5.300841610812254292e-07 9.863721891765498404e-07 -7.455834935824343087e-07 -1.915230785464359946e-07 6.152305826190861416e-07 9.186240175365802476e-08 5.563318677510607814e-07 1.464565098947935003e-07 2.732010244754212697e-07 1.879379199500241026e-07 -1.893474808786748181e-07 2.858458805034508924e-08 1.098721868576217649e-07 1.396282468372054099e-07 3.871021154815421361e-08 2.493373517446609098e-08 6.235793431811105549e-09 -6.213165610494248488e-08 3.183211384584028819e-08 -2.034055237077276044e-08 8.939915122302451335e-08 -8.389473296493107119e-08 -4.231882149492701807e-08 2.684941829943946643e-08 -5.835382874322901230e-08 -8.931171084839953515e-08 -1.416966417913568194e-08 -3.840197104727526729e-08 -2.053405874333945747e-08 -1.574653376803142373e-08 -1.157098103618119473e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -1.085927798701154245e-03 9.773706906995947158e-04 -1.655224502893727591e-03 6.234686898944534830e-04 1.224434480423853850e-03 3.814933487859542755e-04 -9.239860251646188555e-05 -3.482094383594609125e-04 -7.143942230123425654e-05 -1.261702632390506406e-04 4.466549351231284369e-05 -4.781254553463656416e-05 6.302408409915136081e-05 3.395432212217582360e-05 -1.480870205671202186e-05 -2.833140658352186565e-05 2.010665843829168291e-05 2.351704961606973526e-05 1.097452315393216245e-05 2.986387614670308069e-06 -1.265408923371457892e-05 -1.914532795170611606e-05 -4.260337323611845854e-07 -6.139542826779920374e-06 1.783917303711390703e-05 -7.022393758778555738e-06 1.414674660672169313e-06 -5.014498020969170897e-06 3.576944054345103181e-06 -7.227215156757109969e-07 -8.620389192056654911e-06 -2.490198232384762006e-06 4.191377934108167707e-07 -6.415819025056300087e-06 3.281448614421909646e-06 -2.194862735306189251e-06 3.538037532413936144e-06 1.791576929350304435e-06 6.073228474965090802e-07 -1.640681318672833013e-06 -1.276690978450806267e-06 -6.745423766479644751e-07 -1.067167337195252021e-06 7.578523978087593265e-07 2.073821149116240670e-07 -6.214738272746370176e-07 -2.346762295785241515e-07 -6.797657280235880872e-07 -2.961870562920159074e-08 -3.876930569807707352e-07 -2.177599340799471629e-07 1.094055839254111987e-07 6.650028091610249183e-08 -1.306310206855343037e-07 -1.988692199049038058e-07 -2.817554353961776784e-08 3.668943321913643730e-08 -1.937637670647791962e-08 3.851455067711715197e-08 -1.751759756843745717e-08 2.931070330262247680e-08 -8.910687065361821148e-08 7.795826130305549392e-08 4.846144263801207765e-08 -1.870475536154783344e-08 5.532319114227674269e-08 8.532369533149940241e-08 2.471051458681790368e-08 4.511140416926369482e-08 2.243721786375495658e-08 1.560026714909821334e-08 9.730826444044803855e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -9.174919813057303471e-04 5.288017268068624915e-04 -1.306269580295902786e-03 3.109436448481413501e-04 4.597299578832127198e-04 1.232344235519889524e-04 -7.544187377709950118e-05 -2.178263632688422918e-04 -4.626372365928647343e-05 -7.157316449197834792e-05 1.821857359539658377e-05 -7.568404615422542284e-05 2.818176826297703256e-05 2.456636053431822874e-05 -3.336547759573937449e-05 -2.435334949557669721e-05 1.059585566033998563e-05 2.077036801265803599e-05 9.366992394747756872e-06 5.504263114258058536e-06 -7.255141362063686831e-06 -1.172253059430506888e-05 -3.779206497785300149e-07 -5.629758959217230669e-06 1.324354469328319927e-05 -4.996827641323857075e-06 3.950992831580657751e-08 -3.617218976855786694e-06 3.535575392641577237e-06 -8.065206296308028036e-07 -5.778779292760037053e-06 -2.120326462275363965e-06 -1.030583521310533497e-07 -4.432955009303401602e-06 1.820473021235201193e-06 -1.482116798933531229e-06 1.791576929350304435e-06 1.548276060655171348e-06 3.832145538754136866e-07 -1.063404832721082336e-06 -7.947719327804499466e-07 -5.231638717238008177e-07 -7.671930196888122971e-07 6.282000775337116899e-07 1.935829654168483912e-07 -5.066178104534476166e-07 -8.590448672232153578e-08 -4.412562643445461317e-07 -3.799834051559350419e-08 -2.797323633238087575e-07 -1.719776612572005164e-07 8.589631918525064128e-08 4.667379363928934397e-08 -9.370009903269416191e-08 -1.459122094247069589e-07 -3.106986343407556546e-08 8.738595811041176935e-09 -2.900803605847064694e-08 3.462798215543897479e-08 -1.728512039492004324e-08 3.395315485827203137e-08 -7.169556259157945202e-08 5.508347085921189989e-08 3.790249521597678496e-08 -2.344344286739602842e-08 4.442845107207346507e-08 7.292049480011651936e-08 8.397568829384527522e-09 4.281411598732249032e-08 1.605017326586468682e-08 1.146345947428834040e-08 1.337192434049511286e-08
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.688954467356825784e-04 1.806362606807766032e-04 1.835583799984067982e-04 -1.498344237680553439e-04 1.815483159931088398e-04 8.659112766189907585e-05 2.179376756322894178e-05 -1.146781522127005216e-04 -3.284870675851983302e-05 2.658696764700058170e-05 3.805776701988292934e-05 -1.640081712721153180e-05 -1.488219089555614500e-05 2.222611053478334284e-05 -1.063445457587769095e-05 -1.842124031509980223e-05 -5.407791862073911715e-06 1.245084588743401586e-06 5.363696091605718661e-06 1.532912011431012526e-06 -7.715651156148918495e-06 -2.753513503034467404e-06 4.870165057145087965e-08 2.775672600909365765e-06 1.513660489385331954e-06 -2.466525569032061804e-06 3.317398722130379880e-07 7.162359637654176502e-07 -6.059244111092689727e-07 -3.031196772471181798e-07 -1.492786642433309826e-06 2.403385504647582972e-08 1.811675455038533287e-07 -5.972265871012777862e-07 6.333305749695458720e-07 -3.618979610897593336e-07 6.073228474965090802e-07 3.832145538754136866e-07 4.537049055447404390e-07 -2.511934985937228190e-07 -1.600190967569868889e-07 6.450596743062254548e-08 -1.957889442592928944e-07 8.842898109553240035e-08 9.758776251904260973e-08 -1.297286384787956109e-07 5.461151754859507723e-08 -9.685463049672513148e-08 -6.221874801103715220e-08 -8.652820161070290025e-08 -2.327596891293031970e-08 5.440225093284243391e-08 -1.536995305369625697e-08 -3.455378592253408981e-08 -4.784852143139007834e-08 -2.431179944337980490e-08 -1.967690741078270795e-08 -5.701026010571452679e-09 2.945657077975446913e-08 -1.222593534389939558e-08 1.013384664242022132e-08 -3.256524199915528166e-08 3.312388836461493741e-08 9.310777727390655034e-09 -1.496857353546132346e-08 2.086476587979568180e-08 3.122121292473368845e-08 3.893608853626855498e-09 1.571139918596603625e-08 5.911897240247968828e-09 5.120615115238622890e-09 5.583568563326357299e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 8.173338144558544866e-04 -7.617746781500025176e-05 1.165215422154015131e-03 -3.984244383030050886e-04 -4.989111164926325918e-04 -1.092912447359346102e-04 -5.166825657201732188e-05 1.512742811430125862e-04 5.163991588943059975e-05 -6.636851409038127663e-06 -2.549466682091819849e-05 8.965088354318442713e-05 -5.451978491054562122e-06 -2.823673957256211920e-05 2.871409738102048020e-05 3.138718151430508815e-05 -6.839770342201077493e-06 -1.228944324201914164e-05 -1.625428292558089621e-05 -2.861592923993595169e-06 1.265320570843961708e-05 1.011681170731442506e-05 -1.493458322754097109e-06 -5.339703117668354174e-07 -1.220037129952917650e-05 5.815880694681688025e-06 1.099146223501573368e-06 1.592308716766916597e-06 -2.959109723046072643e-06 -5.251180072127742030e-07 5.704149129586227232e-06 2.309475437278386506e-06 3.231818971352723382e-07 3.594437450382876209e-06 -1.668571878840943470e-06 1.354363924586111548e-06 -1.640681318672833013e-06 -1.063404832721082336e-06 -2.511934985937228190e-07 1.367618452319511690e-06 6.099373495684655555e-07 2.228765430121271571e-07 7.721370407040248678e-07 -5.215008468938842482e-07 -1.405861590321121375e-07 4.499201865191143933e-07 -8.936807636442820730e-09 3.803370820654224750e-07 1.252660340007136210e-07 1.942203700855893642e-07 1.811362294121596925e-07 -1.688765820982276347e-07 5.114464642030334549e-08 1.026468143143501985e-07 1.288904492798862339e-07 3.956869597650499104e-08 2.985487584219843542e-08 5.410644605152185911e-09 -5.662383631989382321e-08 3.653901351168113672e-08 -1.524177733264301718e-08 7.624476234060067225e-08 -7.637833817621370947e-08 -3.161431724694371366e-08 2.697366181915551966e-08 -5.246303424394046841e-08 -7.730791368793387050e-08 -1.623393723024969207e-08 -3.789514780331499610e-08 -1.753171251095821456e-08 -1.365831229097354797e-08 -9.917366222069330611e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 9.479600814782030609e-04 -7.535002575803852361e-04 1.224958061405036170e-03 -3.179923538187708456e-04 -3.664481562893740331e-04 -1.134043135378545611e-04 1.242651621836379425e-04 1.379892229610619493e-04 1.635837233325937616e-05 8.788404914792070585e-05 1.511074267337383769e-06 6.937157613623407486e-06 -4.332988792362825058e-05 -2.378083578042228489e-06 6.726295011380237417e-06 9.301846013530460708e-06 -1.121779260755475359e-05 -1.438197776180284643e-05 -1.506965902226843640e-07 -2.254521428416226894e-06 1.099530111560931772e-06 8.345312134885264342e-06 3.256397432931554201e-07 6.010583505940177225e-06 -7.859988199068437925e-06 2.786000210051901315e-06 -5.756965908644955555e-07 3.894654265204348992e-06 -1.974862835390086522e-06 4.378022434600343931e-07 3.675946155085613497e-06 1.296794857341384874e-06 -2.336458158766709118e-07 3.060714215371471572e-06 -1.301658108119081228e-06 9.455415714561124352e-07 -1.276690978450806267e-06 -7.947719327804499466e-07 -1.600190967569868889e-07 6.099373495684655555e-07 7.039091917785482538e-07 4.383163833959617823e-07 4.796720885777766419e-07 -3.998058130691682176e-07 -6.307255626581146150e-08 3.001641552916042146e-07 1.505234251254049058e-07 3.149003828346301648e-07 -2.359688422809137350e-08 1.623709347206900859e-07 1.065128636103765491e-07 -3.064259543750788803e-08 -5.216217004026487127e-08 4.858423991575996336e-08 7.251585543190840200e-08 6.895019553995409227e-09 -3.027239059145786145e-08 1.232977285062902266e-08 -7.900815347798672818e-09 3.165605946498883758e-09 -1.384905934108395950e-08 3.242599343213307153e-08 -2.184703268071556368e-08 -2.262199572199114182e-08 4.588298931132724542e-09 -1.940843395444498010e-08 -3.214872115104035350e-08 -7.311081596654048260e-09 -1.705273331005863859e-08 -8.634142629840585811e-09 -5.587039652733640085e-09 -4.091012433374921910e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.518845556756011740e-03 -9.496218659871184071e-05 1.568707600725005461e-03 -4.575475998708749669e-04 -2.144726938231217609e-04 -1.197668074231670004e-04 2.233038249486575891e-04 -9.824793223463326199e-06 -3.577896867832735290e-06 1.850251946994860915e-04 7.653819982564934545e-05 -5.540091942239721656e-05 -9.768047664317769927e-05 3.852144294755435534e-06 -1.175350019207360468e-05 -2.635105165839978238e-05 -2.155466002869424675e-05 -1.327053592797013376e-05 8.344759025141013241e-06 -4.645009491739806849e-07 -9.980952003680414243e-06 6.229501122792144230e-06 1.325588628417998583e-06 1.283943202753549981e-05 -7.336238846252679082e-06 -1.839203601637591919e-06 -5.829717391663871752e-07 6.085319478055461737e-06 -4.299092860322872856e-06 1.606666307533042569e-06 1.868517929256865822e-06 1.144859125530628976e-06 -1.075705073863391810e-07 3.051950460483643143e-06 -5.380334314563471657e-07 5.300841610812254292e-07 -6.745423766479644751e-07 -5.231638717238008177e-07 6.450596743062254548e-08 2.228765430121271571e-07 4.383163833959617823e-07 8.260052309516506526e-07 1.563392930673608629e-07 -3.584681419637977487e-07 8.164332494083036750e-08 1.505622264785300139e-07 3.191382911326584675e-07 1.994993131450279778e-07 -2.143057505077566330e-07 1.611749014099301682e-07 7.362511931994254949e-08 1.627776394049187445e-07 -1.810488193776095120e-07 4.133919323147876767e-09 3.675220276913783404e-08 -4.279740605014474725e-08 -1.031456323843953217e-07 3.194144686147028420e-08 5.443376317328032603e-08 -3.856623421795745707e-08 -2.447232567103460520e-08 -1.703411450259194130e-08 4.133778195960887588e-08 -1.758623963936870476e-08 -2.353447626186515939e-08 1.738771216804579208e-08 1.600135926675760555e-08 6.205349102373116247e-09 -4.590844784987318447e-09 7.889460077209109895e-10 3.758264687913682383e-09 1.558222980716653319e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.483009342941916461e-04 -2.588207834887589973e-04 7.965590525515912944e-04 -2.399203394430934725e-04 -2.572500533951217576e-04 -2.897056353938380001e-05 -2.083800174040636169e-05 1.151213068249090648e-04 1.872788832206620713e-05 -3.921700243155538337e-06 -1.958926611795705535e-05 7.592620801441106633e-05 6.423477259243303630e-06 -9.416007522626111619e-06 2.170510741386288563e-05 2.168991103260672700e-05 -4.539239811610367902e-06 -1.074136191982459908e-05 -7.152475622632952571e-06 -2.723736758898496130e-06 6.781122835189948094e-06 6.510829223873973898e-06 -4.283466380571797808e-07 6.139787067412278521e-07 -7.182634381918656094e-06 4.439440254221549007e-06 6.306821331357348478e-07 1.096314002475894834e-06 -1.328292575590776167e-06 -3.522385961173027593e-07 3.788725389988128223e-06 1.485316068690676216e-06 1.725576891899498772e-07 2.326275806262501204e-06 -1.165646002230252475e-06 9.863721891765498404e-07 -1.067167337195252021e-06 -7.671930196888122971e-07 -1.957889442592928944e-07 7.721370407040248678e-07 4.796720885777766419e-07 1.563392930673608629e-07 6.065555887692877995e-07 -3.701589102441557553e-07 -1.322455866932810211e-07 3.370814559131855144e-07 -3.369759113394992300e-08 2.709956952623766401e-07 1.148369860032279897e-07 1.221462070020704633e-07 1.240629152261854313e-07 -1.413632334976916433e-07 4.246169246611797031e-08 7.334451207500938904e-08 8.479786665132760574e-08 3.460365738500246442e-08 2.875386280071280456e-08 1.898918275713294554e-09 -4.424149538024042334e-08 2.831647126285066349e-08 -1.014870606770969391e-08 5.711078804911533739e-08 -5.558907607398652146e-08 -2.185982635652072294e-08 2.227934796570399887e-08 -3.850464778597815364e-08 -5.746406945296525131e-08 -1.062720147662047900e-08 -2.476628629234901459e-08 -1.237297326227601204e-08 -9.951951863687371573e-09 -7.943621195758910878e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -1.197985824812628722e-03 2.344236943892276923e-05 -1.022309298684174073e-03 1.957288266084472659e-04 2.897270067419454751e-04 6.077410511736578482e-05 -5.199101002130805378e-05 -8.014948496242969226e-05 -1.511944187037296394e-05 -6.340670607870966960e-05 -6.767340925218062386e-06 -2.564710937049201640e-05 2.267883033109129368e-05 4.764253380890537553e-06 -1.448064795891040923e-05 -8.461270410837457042e-06 7.444838442643168811e-06 1.001382695417531708e-05 1.446563839978299185e-06 2.596856710036457097e-06 -7.901045360359031898e-07 -6.362982826924688146e-06 -4.902293212545840522e-07 -4.673534220111206383e-06 6.446402482539179799e-06 -1.835714410676625404e-06 -1.520254991362623457e-07 -2.696958479657308597e-06 2.383296157680885366e-06 -4.991153890221549287e-07 -2.734069217605594083e-06 -1.226972362164709977e-06 -9.236383599895728235e-08 -2.414757914653333620e-06 6.286501263468425631e-07 -7.455834935824343087e-07 7.578523978087593265e-07 6.282000775337116899e-07 8.842898109553240035e-08 -5.215008468938842482e-07 -3.998058130691682176e-07 -3.584681419637977487e-07 -3.701589102441557553e-07 4.002898256245638136e-07 3.629375740485994355e-08 -2.750089894321228156e-07 -8.635168845117293089e-08 -2.138629567681393852e-07 6.304526265783260788e-09 -1.287572978524947595e-07 -1.007740863997920673e-07 2.505952543659238745e-08 3.183132278280558118e-08 -4.059538826347864940e-08 -5.914433145552688073e-08 -6.773488481344990089e-09 1.217899652460255828e-08 -1.408948784450231658e-08 1.176715671202877352e-08 -4.311519017210369154e-09 1.521303242253018209e-08 -2.908463774930743960e-08 1.872796603814322018e-08 1.970595533339436515e-08 -6.823049204866383864e-09 1.751290257893360713e-08 2.996530275212561963e-08 1.498568807365891315e-09 1.797390522750351605e-08 6.880864341184318795e-09 4.640086474492054404e-09 5.441308503249259370e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 3.508727782263668235e-04 7.998107364222783295e-05 1.706247875115830695e-04 -2.039683793529065191e-05 3.847942857561093563e-05 -2.507367715266709126e-05 3.839577502370147350e-05 -5.035650309211807846e-05 -5.355860726879331885e-06 3.162609645467792235e-05 2.135864082915135857e-05 -4.188549991038170923e-05 -1.987818965056291188e-05 2.654528515378756236e-06 -1.042498032786376992e-05 -9.934412191455545315e-06 -2.319966315274088547e-06 2.150726598090146471e-06 3.142198422638828178e-06 1.197361570135105038e-06 -3.337601418942264740e-06 -7.434749984469295211e-07 1.915809545512185938e-07 1.668082042273912496e-06 7.175054275464823696e-07 -1.343245793829233503e-06 -9.417130942758539414e-08 7.926376302042794269e-07 -4.666650871754792877e-07 2.120634758604629858e-07 -6.585466975864201927e-07 -1.168212915017373535e-07 -6.312478603258127690e-08 -1.460438458493629240e-07 3.022502330770561173e-07 -1.915230785464359946e-07 2.073821149116240670e-07 1.935829654168483912e-07 9.758776251904260973e-08 -1.405861590321121375e-07 -6.307255626581146150e-08 8.164332494083036750e-08 -1.322455866932810211e-07 3.629375740485994355e-08 8.897511436864678203e-08 -6.632954824603484170e-08 6.712271923507567545e-08 -4.852195514944613053e-08 -6.454209554070456418e-08 -1.936913147157000103e-08 -1.486800960238367738e-08 6.110466010568124461e-08 -3.145013760694462915e-08 -1.872182048003827349e-08 -2.437323035691143059e-08 -1.808901909578336925e-08 -2.396383573200005866e-08 1.387308849201662436e-09 1.920281990396014087e-08 -1.269268000654767268e-08 1.883465104445794285e-09 -1.942017741886845117e-08 2.100092659395331668e-08 3.283974934922042720e-09 -1.151374592558550182e-08 1.332632048854671590e-08 1.935339294800482383e-08 3.406953635245660924e-09 7.572814296516647891e-09 3.493934315710834392e-09 3.263556363497743318e-09 3.115544195942903542e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.764947375475536618e-04 -1.306611249122533532e-04 6.022402369423375838e-04 -1.113649231132346148e-04 -2.092620685585261419e-04 -4.115587432262423018e-05 1.074239288935346662e-05 9.522404593106969291e-05 1.408938021508663544e-05 1.027481762106180603e-05 -1.268684764466603794e-05 4.234499075215722181e-05 1.711686737882336770e-06 -6.401279406414283033e-06 1.597165789409206555e-05 1.381533656598010190e-05 -2.129983569628202774e-06 -7.119869868421568059e-06 -3.665741129643249016e-06 -2.338779104371744934e-06 3.633626562737958221e-06 4.545992970062947133e-06 -8.303880306775466180e-08 1.270779049476179787e-06 -4.589462929215870953e-06 2.451051610256039569e-06 2.514748673411726297e-07 9.635294055749949871e-07 -1.106373216701226330e-06 3.974372508781101403e-09 2.312229143555063246e-06 9.107367647764969952e-07 5.775101621308153777e-08 1.556201504301329193e-06 -5.564764762356091947e-07 6.152305826190861416e-07 -6.214738272746370176e-07 -5.066178104534476166e-07 -1.297286384787956109e-07 4.499201865191143933e-07 3.001641552916042146e-07 1.505622264785300139e-07 3.370814559131855144e-07 -2.750089894321228156e-07 -6.632954824603484170e-08 2.399284196826176457e-07 -1.737153147197673296e-09 1.660310178052409190e-07 5.017261215022868893e-08 9.051318085858313061e-08 7.806631686323872622e-08 -6.799081187677742815e-08 1.275237950376850998e-08 3.998843690872219847e-08 5.081553886210472215e-08 1.845032517991575450e-08 1.352485089036381318e-08 6.542038706848154551e-09 -2.593184079853223657e-08 1.468252719493615218e-08 -9.076124586116643304e-09 3.389515094825672786e-08 -3.028054913037645288e-08 -1.520411604385467981e-08 1.305189546573797036e-08 -2.243923167059003606e-08 -3.463750208300704733e-08 -3.219181777680236607e-09 -1.713112101534010932e-08 -7.179486384728923726e-09 -5.693791263976027113e-09 -5.655167849302724547e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 6.306790547611602167e-04 -7.032096143211686646e-05 5.216720730557096366e-04 -1.661255559548034742e-04 -1.283616488059200812e-04 -7.827536125282888511e-05 1.082598213452782296e-04 -9.563561893121391626e-06 -5.335971353757113604e-06 8.635562568171455090e-05 3.566195657343805071e-05 -5.178197633985196749e-05 -4.901026187410532708e-05 4.880955365653679169e-06 -1.253888389383585237e-05 -1.527909938575709473e-05 -9.439960295196752100e-06 -2.213786450559341237e-06 5.156257426738980782e-06 9.819325193866895831e-07 -5.420458065911870621e-06 2.459794336524338313e-06 6.778385376820873895e-07 5.614346977311933670e-06 -1.923636255019687593e-06 -1.556577048268841076e-06 -6.012885334784451625e-07 2.728848825071403190e-06 -1.496730229112809614e-06 7.707053057775901815e-07 2.813742900305172557e-07 1.735352048162372354e-07 -1.598719331144562127e-07 1.073973596796523045e-06 -1.900974812213959987e-07 9.186240175365802476e-08 -2.346762295785241515e-07 -8.590448672232153578e-08 5.461151754859507723e-08 -8.936807636442820730e-09 1.505234251254049058e-07 3.191382911326584675e-07 -3.369759113394992300e-08 -8.635168845117293089e-08 6.712271923507567545e-08 -1.737153147197673296e-09 1.765367784993672978e-07 5.846822390989983794e-08 -1.177458001520560494e-07 5.043219476178019965e-08 7.589162144046531886e-09 9.736005387176186237e-08 -8.774555272360771323e-08 -1.039095353036354897e-08 1.389564363633767509e-09 -2.704028939793962101e-08 -5.473707080130010802e-08 1.005227681043366103e-08 3.370320831896409426e-08 -2.351130249849853488e-08 -5.881631771301232767e-09 -1.903646534995379151e-08 2.777003052892447577e-08 -3.388178484857443643e-09 -1.644126177753830969e-08 1.542086798442522191e-08 1.953543588167739113e-08 2.034350056767122949e-09 4.903852927355137348e-09 2.406418217882636814e-09 3.405996043314057935e-09 2.964985485091416447e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.876088149690555626e-04 -2.791938146040060572e-04 5.800935098855347855e-04 -1.950803455168761871e-04 -2.200454153016562762e-04 -5.208487749324975231e-05 3.677478308399171844e-05 6.660100724028785888e-05 6.841423103567814359e-06 3.688489484844579153e-05 -7.031695996204021260e-07 1.641287046299667044e-05 -1.730822146712370386e-05 -3.023001551370472642e-06 6.736831285819334451e-06 5.417270580583692312e-06 -5.785650638778238011e-06 -6.434177212053375019e-06 -2.125306917406018259e-06 -9.470142459945499828e-07 1.685420671364507618e-06 4.384666331021207666e-06 8.001765279809289614e-08 2.558776263775449283e-06 -4.454313748893245547e-06 1.576675673177870600e-06 -7.992534576934015063e-08 1.612151791661021473e-06 -1.093082466495289695e-06 1.843868373721095011e-07 1.987452252045661470e-06 7.159828281877780701e-07 -2.931854449453823038e-08 1.593094532217905693e-06 -7.338897258090786425e-07 5.563318677510607814e-07 -6.797657280235880872e-07 -4.412562643445461317e-07 -9.685463049672513148e-08 3.803370820654224750e-07 3.149003828346301648e-07 1.994993131450279778e-07 2.709956952623766401e-07 -2.138629567681393852e-07 -4.852195514944613053e-08 1.660310178052409190e-07 5.846822390989983794e-08 1.701023545207490636e-07 5.675942461996447086e-09 8.691831292258790699e-08 5.894411669674562406e-08 -2.852173504913856549e-08 -1.774867067123445495e-08 3.030720354811477188e-08 4.470719771873073377e-08 5.915743256385989722e-09 -8.943026665043525637e-09 5.549447870385363517e-09 -7.950915651572463666e-09 4.119350159523310016e-09 -7.527399261466176697e-09 2.058510833311354388e-08 -1.635827494157548386e-08 -1.222594802786561353e-08 4.526750400287865915e-09 -1.284007495894431329e-08 -2.054466879875352151e-08 -4.634036812363204047e-09 -1.061537759107453824e-08 -5.214851137178819199e-09 -3.580413847932004248e-09 -2.704541291127445528e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -2.608135762638004972e-04 -1.030727866645449065e-06 -2.947047319011987347e-04 1.153937357392687826e-04 -1.951886707610114675e-05 3.456386565905998561e-05 -8.277894084684063415e-05 3.148035494170563529e-05 -1.922103537959432319e-06 -6.144853042666176615e-05 -2.611622615625184613e-05 4.579202980697374780e-05 3.795870938052484637e-05 -1.296651148190591137e-06 1.083893563335877063e-05 1.337195308266203661e-05 5.981169738424131981e-06 1.693601703710781671e-06 -3.491962166533340183e-06 -6.561953175666949185e-07 4.974502521845857810e-06 1.569064048095963131e-07 -5.915189901801597069e-07 -3.751933528977560824e-06 8.389716000141876248e-07 1.667796887524464566e-06 4.676311196183304469e-07 -1.797233129179782877e-06 1.187221740824392311e-06 -6.155778124656044435e-07 3.228046702859528223e-07 5.340845750474473392e-09 1.198865761813585856e-07 -3.891473107943884169e-07 -8.008142805217629484e-08 1.464565098947935003e-07 -2.961870562920159074e-08 -3.799834051559350419e-08 -6.221874801103715220e-08 1.252660340007136210e-07 -2.359688422809137350e-08 -2.143057505077566330e-07 1.148369860032279897e-07 6.304526265783260788e-09 -6.454209554070456418e-08 5.017261215022868893e-08 -1.177458001520560494e-07 5.675942461996447086e-09 1.144418286856239086e-07 -2.233523932966949917e-08 7.620108360252366392e-09 -9.970703129984356273e-08 7.763852130342205862e-08 1.849358189320957254e-08 8.678429457942457977e-09 2.485576272723692988e-08 4.667318752239041829e-08 -1.113636919942217446e-08 -3.182712757557293831e-08 2.077839393141560565e-08 5.814709736565113226e-09 2.201999088614692840e-08 -3.015300528206738155e-08 -8.968305818659364664e-11 1.463812561619256552e-08 -1.671571016592462143e-08 -2.170153009502604929e-08 -4.576776316397327064e-09 -4.623030803799492177e-09 -3.676138418160822304e-09 -4.093499942089111349e-09 -2.387783046130999934e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.860754560132674584e-04 -3.156305908982825462e-05 2.868423323168219985e-04 -7.432780230046229404e-05 -1.287634508994955841e-04 -4.884980097861421091e-05 3.674818401997486430e-05 3.361328170202302788e-05 1.541714632343483258e-05 3.189756072788974174e-05 4.829055327057635835e-06 1.152707906296191173e-07 -1.721228523917412866e-05 -8.258443736897127149e-06 1.450878037906366340e-06 1.382077939810439417e-06 -3.004212273746950412e-06 -4.676667766669187167e-06 -1.445813868687924058e-06 -5.446076603722090273e-07 1.276132510500819379e-06 2.705949146805865008e-06 -6.210773323252043075e-08 2.042987101463171457e-06 -2.854761963097016561e-06 5.696283390954355290e-07 -2.583326879149878767e-07 1.130991252582778236e-06 -9.839902403692241853e-07 3.773506784570121783e-07 1.107094325896995624e-06 3.985068102310577851e-07 -2.001549790561424600e-08 1.019810143100346101e-06 -3.431754634014582926e-07 2.732010244754212697e-07 -3.876930569807707352e-07 -2.797323633238087575e-07 -8.652820161070290025e-08 1.942203700855893642e-07 1.623709347206900859e-07 1.611749014099301682e-07 1.221462070020704633e-07 -1.287572978524947595e-07 -1.936913147157000103e-08 9.051318085858313061e-08 5.043219476178019965e-08 8.691831292258790699e-08 -2.233523932966949917e-08 7.296523813654509425e-08 3.090083027248087601e-08 1.272272778200457502e-08 -3.082014255411869007e-08 1.535491510019934251e-08 2.896940840956550405e-08 3.936271271980139244e-10 -1.428001305702432960e-08 9.754242526335720493e-09 -1.599243082521937534e-10 -2.442163030425013077e-09 -9.514959834834210030e-09 1.009182651662351087e-08 -4.794891437873309109e-09 -8.355747478429848824e-09 9.380366102948030282e-10 -5.393354794852180085e-09 -1.029900446629442975e-08 -5.772295607941173226e-11 -8.462113326275320678e-09 -2.509512649004528219e-09 -1.424372583057414257e-09 -2.386021341959502953e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.207174968183846578e-04 -9.416422033959063800e-07 3.018829008353570548e-04 -9.182450807080003836e-05 -3.001391282702328488e-05 -1.374164913006776855e-05 8.095439642159416729e-06 2.565746411228413533e-05 7.827915823939723294e-06 7.482435597411070159e-06 -7.107727357430579850e-07 1.095455392325919880e-05 -4.256380650441868637e-06 -3.042455569613446421e-06 4.054202211064563128e-06 4.350261058384740647e-06 -1.895187360954479683e-06 -2.900196139012895507e-06 -1.374818018631555688e-06 -7.186494168135633854e-07 4.773875732849080013e-07 1.373703476853237022e-06 1.283840886846695906e-07 5.734361285883117776e-07 -2.079300727543130159e-06 8.164855387896602175e-07 1.828804286820405885e-07 5.617484031883347994e-07 -7.372004231954547149e-07 -2.476366495848755220e-08 8.940453005181056347e-07 4.393239779671301155e-07 5.406774334712463572e-08 6.461288905886662192e-07 -2.032565089038636370e-07 1.879379199500241026e-07 -2.177599340799471629e-07 -1.719776612572005164e-07 -2.327596891293031970e-08 1.811362294121596925e-07 1.065128636103765491e-07 7.362511931994254949e-08 1.240629152261854313e-07 -1.007740863997920673e-07 -1.486800960238367738e-08 7.806631686323872622e-08 7.589162144046531886e-09 5.894411669674562406e-08 7.620108360252366392e-09 3.090083027248087601e-08 3.662745163141860206e-08 -1.789628253153569436e-08 -9.853074190462920490e-10 1.516733104103525796e-08 1.901064122967732007e-08 3.999141952666102073e-09 1.374608260777116618e-10 2.705956943837161043e-09 -5.971072473499102804e-09 4.270654293345999980e-09 -3.621578336948509903e-09 1.034197204034586696e-08 -8.181298905621171952e-09 -5.608425930048213983e-09 3.192058708212345319e-09 -6.815664733928838862e-09 -1.075130822239076879e-08 -1.335981787320148522e-09 -5.825102474542005024e-09 -2.384695284474049028e-09 -1.744889923726272485e-09 -1.708367538731110283e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.893092101203785885e-04 8.729346800827646679e-05 1.293252209069996285e-04 -6.701449858117690622e-05 3.478822809372388730e-05 -2.331468774836307196e-05 6.673979250841398185e-05 -3.990064124238967998e-05 2.076167297879437017e-06 5.190915115178377788e-05 2.510774275624423757e-05 -4.618846412808259790e-05 -3.307189074513850586e-05 1.079840990757596066e-06 -1.132394321991034218e-05 -1.325867461541711008e-05 -4.494433710206043521e-06 -3.640658582318879352e-07 3.266141439568210905e-06 1.068148721044382209e-06 -4.563858070661183952e-06 -4.858405210958268478e-07 4.554107735126645981e-07 3.173841890454395189e-06 -6.910193989440181831e-08 -1.731280606646242096e-06 -5.048538218272272140e-07 1.369204238283362246e-06 -8.907716389272790895e-07 5.740343438404113636e-07 -6.073636096656496521e-07 -1.429566892808168969e-07 -9.476341145230530528e-08 1.591228483029162330e-07 1.640643286950083784e-07 -1.893474808786748181e-07 1.094055839254111987e-07 8.589631918525064128e-08 5.440225093284243391e-08 -1.688765820982276347e-07 -3.064259543750788803e-08 1.627776394049187445e-07 -1.413632334976916433e-07 2.505952543659238745e-08 6.110466010568124461e-08 -6.799081187677742815e-08 9.736005387176186237e-08 -2.852173504913856549e-08 -9.970703129984356273e-08 1.272272778200457502e-08 -1.789628253153569436e-08 9.716838302853998102e-08 -6.892243056764489814e-08 -1.960657489593498958e-08 -1.250424142120699998e-08 -2.249966025688686229e-08 -4.098064525377453604e-08 9.892571623317000948e-09 2.928134151743160779e-08 -2.019195817422481686e-08 -5.226197631985438192e-09 -2.198274424791199868e-08 2.870828222607401537e-08 1.584707807451644887e-09 -1.365173709363340688e-08 1.657730172323531683e-08 2.180447752466151935e-08 4.901900262874927553e-09 5.212458976434576952e-09 3.968265845175372203e-09 4.133344012044137822e-09 2.220088253306263855e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -2.447489546475116852e-04 -2.325496455731076726e-05 -2.466032303862507910e-04 9.163507154272652205e-05 4.485919123229179930e-07 2.623967104073993291e-05 -6.246090667361027646e-05 1.406272239379101979e-05 -6.690447756701302991e-07 -4.769669935150141820e-05 -1.972073898776988349e-05 2.966406717171467887e-05 2.863442294571222292e-05 -8.676380647538681878e-07 6.883386714315126653e-06 9.004784301809802747e-06 5.140046142235332652e-06 1.778657434708435252e-06 -2.379288734981468331e-06 -4.578304226890583509e-07 3.597772699908132245e-06 -5.439557268092902337e-07 -4.716321995240883795e-07 -3.019572854956388139e-06 1.049957303304256693e-06 1.009876605812521656e-06 3.078978873991664029e-07 -1.509721019477010243e-06 9.659242496204091937e-07 -5.116550018917696690e-07 -2.399768297383310656e-09 -5.486106225735870282e-08 7.976119215145571500e-08 -5.041312232072984408e-07 5.291858121558251218e-08 2.858458805034508924e-08 6.650028091610249183e-08 4.667379363928934397e-08 -1.536995305369625697e-08 5.114464642030334549e-08 -5.216217004026487127e-08 -1.810488193776095120e-07 4.246169246611797031e-08 3.183132278280558118e-08 -3.145013760694462915e-08 1.275237950376850998e-08 -8.774555272360771323e-08 -1.774867067123445495e-08 7.763852130342205862e-08 -3.082014255411869007e-08 -9.853074190462920490e-10 -6.892243056764489814e-08 6.156595904573130256e-08 7.456385778852068476e-09 -2.073250495897672410e-09 1.609898353371081657e-08 3.491686552016343673e-08 -9.717211843280145963e-09 -2.204990345560686055e-08 1.531344961610322519e-08 6.555809788885369186e-09 1.232292869528463972e-08 -1.932853378702077934e-08 1.690431001885026155e-09 9.362434245182038557e-09 -1.020856010109448835e-08 -1.230298566546570222e-08 -2.701806365784534529e-09 -1.500474639486970880e-09 -1.925962682941341123e-09 -2.459628040200187293e-09 -9.493145268760885368e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.300228571694425051e-05 1.476121983403341457e-05 5.118090102464685552e-05 -1.448637271028778137e-05 -2.166437761715150461e-05 -3.159038315610843235e-06 -6.270041031876362709e-06 1.413509911422772095e-05 4.365661606724274408e-06 -5.038508685294498293e-06 -2.657060087150566561e-06 9.322242005733639048e-06 2.118184174149720994e-06 -2.607562874279888942e-06 2.711689676113091227e-06 3.745903749580492371e-06 -1.130987251304275965e-08 -8.726387820627688336e-07 -1.374841754268015205e-06 -2.475616309151499188e-07 1.198699228118513262e-06 8.203714546575005508e-07 -1.082096389824352911e-07 -2.573193011753605258e-07 -8.266511660398396141e-07 6.073106869740168480e-07 1.192625133969768478e-07 4.690145603672084100e-08 -1.452398045184809895e-07 -5.196856989237107258e-08 4.789200905756296268e-07 1.731111232460107308e-07 2.635750138958444949e-08 2.702600596236074023e-07 -1.478618902634795795e-07 1.098721868576217649e-07 -1.306310206855343037e-07 -9.370009903269416191e-08 -3.455378592253408981e-08 1.026468143143501985e-07 4.858423991575996336e-08 4.133919323147876767e-09 7.334451207500938904e-08 -4.059538826347864940e-08 -1.872182048003827349e-08 3.998843690872219847e-08 -1.039095353036354897e-08 3.030720354811477188e-08 1.849358189320957254e-08 1.535491510019934251e-08 1.516733104103525796e-08 -1.960657489593498958e-08 7.456385778852068476e-09 1.098793758099281772e-08 1.181534697512247196e-08 5.514642216490628203e-09 5.001767377138776986e-09 1.902222123031297195e-10 -6.513990402203609033e-09 3.920235976799583595e-09 -1.469655207096823352e-09 8.080435927506419097e-09 -8.185574761862596087e-09 -2.673312307851300231e-09 3.372978499066504765e-09 -5.404675697439056545e-09 -7.984865260948639608e-09 -1.653712318060761975e-09 -3.494916561460158804e-09 -1.677394702764909911e-09 -1.380389870974265595e-09 -1.151534175119784583e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 7.921976770215414132e-05 -1.198720387177489829e-06 8.869939690229217285e-05 -3.383942663067263584e-05 -4.489077262878641498e-05 -1.401897468028334396e-05 -2.322381621328806462e-06 2.131151768185685438e-05 7.597883185995727905e-06 1.681868023706654693e-06 -3.432175703763764522e-06 1.048654694881498200e-05 -1.798061078961197359e-06 -3.670293928117995323e-06 3.445760783063244821e-06 3.277886338508914530e-06 -9.728453920491350508e-07 -1.633268037881527397e-06 -1.752465757194270748e-06 -5.681244876201166376e-07 1.136884661498420252e-06 1.173937404748231261e-06 -4.185379481573339169e-08 1.204355174926739101e-07 -1.397456101202905326e-06 5.811455916257587519e-07 4.707297878516994921e-08 2.687311991134587993e-07 -3.683833605898418016e-07 4.363709901320863580e-08 6.486159439402901574e-07 2.289151390267980413e-07 2.982336462537798939e-08 4.552321196152342198e-07 -2.159489659487046436e-07 1.396282468372054099e-07 -1.988692199049038058e-07 -1.459122094247069589e-07 -4.784852143139007834e-08 1.288904492798862339e-07 7.251585543190840200e-08 3.675220276913783404e-08 8.479786665132760574e-08 -5.914433145552688073e-08 -2.437323035691143059e-08 5.081553886210472215e-08 1.389564363633767509e-09 4.470719771873073377e-08 8.678429457942457977e-09 2.896940840956550405e-08 1.901064122967732007e-08 -1.250424142120699998e-08 -2.073250495897672410e-09 1.181534697512247196e-08 1.805338764651566868e-08 4.284065222807800358e-09 6.614588811485880332e-10 2.680083915319239894e-09 -4.694192907749750100e-09 2.572002299580761414e-09 -3.545306626471533415e-09 8.538114169949871783e-09 -7.333682557941810937e-09 -3.814486374974892804e-09 3.115826762281089117e-09 -5.404805000094673438e-09 -8.571302941338531498e-09 -1.543543165722352389e-09 -4.946302491587467024e-09 -1.882241233930865467e-09 -1.385838303871160723e-09 -1.472907831152439491e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -7.911798454858443225e-05 -8.472052786346177494e-06 -7.044373523278919097e-05 3.197138926804353562e-05 -3.351763805884130585e-06 9.342002799247602312e-06 -1.405854773861216713e-05 1.300270781147599408e-05 2.173718635118782802e-06 -1.356519243751721837e-05 -6.489055943141860449e-06 1.090389373806454206e-05 8.365292388722692004e-06 -1.543772288019227860e-06 3.043784731780698586e-06 4.094318463592796512e-06 1.624998609077082158e-06 -2.478368698217313839e-07 -9.714309424680902456e-07 -2.854924526465894277e-07 1.350720801120938314e-06 1.458400908808626721e-07 -1.569499840195947381e-07 -7.738238062717366057e-07 4.008844992338183767e-11 4.863310634396478249e-07 6.879855545900448722e-08 -3.546484314721698762e-07 2.100397093695916530e-07 -1.118102898165226843e-07 1.569744587801683851e-07 2.854113586588568113e-08 2.143619275536725925e-08 -3.419466910366087793e-08 -3.390142703323486044e-08 3.871021154815421361e-08 -2.817554353961776784e-08 -3.106986343407556546e-08 -2.431179944337980490e-08 3.956869597650499104e-08 6.895019553995409227e-09 -4.279740605014474725e-08 3.460365738500246442e-08 -6.773488481344990089e-09 -1.808901909578336925e-08 1.845032517991575450e-08 -2.704028939793962101e-08 5.915743256385989722e-09 2.485576272723692988e-08 3.936271271980139244e-10 3.999141952666102073e-09 -2.249966025688686229e-08 1.609898353371081657e-08 5.514642216490628203e-09 4.284065222807800358e-09 6.706300637761058500e-09 1.031683236072874529e-08 -1.376242754927492827e-09 -7.972777818146092676e-09 5.046833604507670645e-09 3.020577615769639914e-10 6.355953165773100881e-09 -7.623937085234914518e-09 -7.047772288056651254e-10 3.915996288868898277e-09 -4.614466371529034522e-09 -6.359810910879953395e-09 -9.011261674338129447e-10 -2.125140389405122149e-09 -1.078435675947127092e-09 -1.105826871582507158e-09 -9.262389880798338464e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -1.366093265264323501e-04 -2.086726260716456291e-06 -1.569195688554354871e-04 6.182566649155243031e-05 8.932572823863872939e-08 1.689455884534502838e-05 -3.658116265943532652e-05 1.290990402637764693e-05 -2.380443856231859398e-07 -2.829047263357563656e-05 -1.228234037790994161e-05 2.027432401588401143e-05 1.821986625390242394e-05 -1.141356908366740230e-06 5.042027123378156580e-06 5.877528040272694759e-06 3.311804951802428678e-06 8.041116949300991615e-07 -1.395717084989465948e-06 -4.508289033213431892e-07 2.178105945193848382e-06 -2.790189499960211380e-07 -2.485735330464010601e-07 -1.763794103828677374e-06 5.892587474358313580e-07 6.108294573604946415e-07 1.932533733761452229e-07 -9.171341637169666122e-07 5.685904568973600958e-07 -2.654342010464903637e-07 1.866814808014980740e-08 -3.926052450641816611e-08 5.366866713130228829e-08 -2.797048109809135203e-07 3.472303699320571830e-08 2.493373517446609098e-08 3.668943321913643730e-08 8.738595811041176935e-09 -1.967690741078270795e-08 2.985487584219843542e-08 -3.027239059145786145e-08 -1.031456323843953217e-07 2.875386280071280456e-08 1.217899652460255828e-08 -2.396383573200005866e-08 1.352485089036381318e-08 -5.473707080130010802e-08 -8.943026665043525637e-09 4.667318752239041829e-08 -1.428001305702432960e-08 1.374608260777116618e-10 -4.098064525377453604e-08 3.491686552016343673e-08 5.001767377138776986e-09 6.614588811485880332e-10 1.031683236072874529e-08 2.134369151751626251e-08 -4.693840820177987687e-09 -1.364201020551215353e-08 9.112505454194893238e-09 2.859507355905607351e-09 8.297746720382353569e-09 -1.186140480763356792e-08 5.610824631456572223e-10 6.238133691134991830e-09 -6.548047428973199068e-09 -8.328872739069338896e-09 -1.230520445986353348e-09 -1.703478768670802332e-09 -1.249286213299707842e-09 -1.549584606805220438e-09 -9.654101719683703709e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 5.193283897093149476e-05 1.305057917378352134e-05 4.730353989668989331e-05 -1.105883034174681005e-05 6.158866306933454851e-06 -2.527292274620906343e-07 9.874275028953842211e-06 -1.518064024033400747e-07 3.174397085963111570e-06 6.569053642875453973e-06 2.034757309990990878e-06 -2.616271890777917323e-06 -3.944866672102277042e-06 -1.499600456045305431e-06 1.515205457629298163e-07 -5.604920655777498291e-07 -4.053063378453189973e-07 -8.631858710836139504e-07 -1.100584666138763304e-07 -1.319957856052361168e-07 -2.040400604256013644e-07 1.480863047751040585e-07 2.505458125531476584e-08 4.629099487134646169e-07 -3.725984889029965819e-07 -3.871871234503116670e-08 -4.378302586068363137e-08 2.044004570926797186e-07 -2.258161059698749847e-07 8.442612256831411917e-08 7.583491125861837440e-08 4.295865559688766724e-08 5.811514337023930737e-10 1.193868381491380210e-07 -4.444960682780473798e-09 6.235793431811105549e-09 -1.937637670647791962e-08 -2.900803605847064694e-08 -5.701026010571452679e-09 5.410644605152185911e-09 1.232977285062902266e-08 3.194144686147028420e-08 1.898918275713294554e-09 -1.408948784450231658e-08 1.387308849201662436e-09 6.542038706848154551e-09 1.005227681043366103e-08 5.549447870385363517e-09 -1.113636919942217446e-08 9.754242526335720493e-09 2.705956943837161043e-09 9.892571623317000948e-09 -9.717211843280145963e-09 1.902222123031297195e-10 2.680083915319239894e-09 -1.376242754927492827e-09 -4.693840820177987687e-09 2.892761708850901180e-09 2.133483265227577989e-09 -1.706059827048941211e-09 -2.147278824525898217e-09 -3.457470922464176957e-10 1.926192264894618847e-09 -1.005799984017218873e-09 -6.167090068420999429e-10 5.384672208939864336e-10 1.308438360289447574e-10 7.783586985478703598e-10 -1.181955499406468477e-09 3.585859504057861424e-11 1.700151892994023716e-10 -3.554820552994491015e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.030193279876571194e-05 1.705204147931304874e-05 4.758552861056551621e-05 -2.988948902017643554e-05 1.456050878924346965e-05 -4.921059296074571016e-06 2.060428261404691552e-05 -1.454233196208581819e-05 -3.662689477174183371e-06 1.592046946430446732e-05 8.626285529836870479e-06 -1.335044618253140997e-05 -1.008376334965083370e-05 2.322605410427941311e-06 -3.794054591613607972e-06 -5.062419837518941381e-06 -2.081515874057325160e-06 2.280979474181072700e-07 1.490945565476103243e-06 3.575204695786335595e-07 -1.883232178564239638e-06 -2.264991835704122336e-07 2.448701463667898783e-07 1.066211802757781815e-06 4.963575964239832487e-08 -6.323569176957675461e-07 -7.653911439912850044e-08 4.845598811173780516e-07 -2.758966382161725582e-07 1.621976737758503226e-07 -2.072790422639651187e-07 -4.489997775075587473e-08 -2.756310199101718529e-08 4.566749442817169379e-08 3.104230492175816599e-08 -6.213165610494248488e-08 3.851455067711715197e-08 3.462798215543897479e-08 2.945657077975446913e-08 -5.662383631989382321e-08 -7.900815347798672818e-09 5.443376317328032603e-08 -4.424149538024042334e-08 1.176715671202877352e-08 1.920281990396014087e-08 -2.593184079853223657e-08 3.370320831896409426e-08 -7.950915651572463666e-09 -3.182712757557293831e-08 -1.599243082521937534e-10 -5.971072473499102804e-09 2.928134151743160779e-08 -2.204990345560686055e-08 -6.513990402203609033e-09 -4.694192907749750100e-09 -7.972777818146092676e-09 -1.364201020551215353e-08 2.133483265227577989e-09 1.087263099110076479e-08 -6.794393702515898873e-09 -8.085448236799684996e-10 -8.056434412848676504e-09 1.004812194110773235e-08 9.850712845543645382e-10 -4.822586176229844288e-09 6.029711171974792275e-09 8.154128055234787738e-09 9.636732813150622573e-10 2.693023014676496033e-09 1.397729852121938554e-09 1.449553161970267951e-09 1.096061068276667264e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -6.123426162489244888e-05 -2.382961695293209979e-05 -3.192421682876966211e-05 9.469614553325014691e-06 -3.433938999049238844e-06 6.175279472530517966e-06 -1.460365363383939131e-05 8.528530866488123603e-06 1.611671919285619567e-06 -1.193357918772824713e-05 -6.660283232602078376e-06 9.580811289778705769e-06 7.085648790238564006e-06 -1.216767464131830376e-06 2.637348567422762434e-06 3.142843063817634383e-06 1.184554945980987814e-06 -2.114012247821179239e-07 -9.869014142786351643e-07 -2.734315681762009625e-07 1.168569542372381486e-06 -2.082119025055192160e-08 -1.213793734953603467e-07 -7.435410395407145560e-07 -7.344732201048610832e-08 4.260839427667649490e-07 8.692285138395415111e-08 -3.511009994328629906e-07 1.600828566499863743e-07 -1.334196417505107447e-07 1.323540839911136273e-07 4.268845851586619143e-08 2.282054155479355626e-08 -5.307593619167651935e-08 -1.943824307566032445e-08 3.183211384584028819e-08 -1.751759756843745717e-08 -1.728512039492004324e-08 -1.222593534389939558e-08 3.653901351168113672e-08 3.165605946498883758e-09 -3.856623421795745707e-08 2.831647126285066349e-08 -4.311519017210369154e-09 -1.269268000654767268e-08 1.468252719493615218e-08 -2.351130249849853488e-08 4.119350159523310016e-09 2.077839393141560565e-08 -2.442163030425013077e-09 4.270654293345999980e-09 -2.019195817422481686e-08 1.531344961610322519e-08 3.920235976799583595e-09 2.572002299580761414e-09 5.046833604507670645e-09 9.112505454194893238e-09 -1.706059827048941211e-09 -6.794393702515898873e-09 4.829670886310697918e-09 9.016651686485327116e-10 4.945714155285675796e-09 -6.304929515339946905e-09 -4.618570351479315036e-10 3.100767567617724350e-09 -3.815508296250398210e-09 -5.062911539008353216e-09 -7.478359423309641358e-10 -1.596217913727631208e-09 -8.703629506411777718e-10 -9.156361354545421597e-10 -6.194326780592672405e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -3.540274696008339036e-05 -1.049045148166791454e-05 -3.853971097288183675e-05 7.631446310161534228e-06 -7.884007815521751863e-07 9.352652663449905533e-07 -6.080081318369939757e-06 -1.267722123747141608e-07 -2.955290454887205809e-06 -5.162055851454997439e-06 -1.195063361199627214e-06 1.277334091824381308e-06 3.015605347617571720e-06 1.424941754188119410e-06 -2.598954029735527643e-07 1.389952917046311231e-07 3.964074421047910244e-07 7.199771695890263606e-07 1.629617307616035339e-07 1.019937499235025219e-07 1.086201913125062155e-09 -1.808243747754117441e-07 -4.174203063759120042e-08 -3.457758134077817468e-07 3.599422655760447568e-07 -2.005223454683642556e-08 1.950389815099762972e-08 -1.707891858925659091e-07 1.825025623440771205e-07 -7.414198823563760363e-08 -1.023586910555842618e-07 -4.625235196028542682e-08 -8.324190427751539077e-09 -1.196694217755784684e-07 2.611336988525630043e-08 -2.034055237077276044e-08 2.931070330262247680e-08 3.395315485827203137e-08 1.013384664242022132e-08 -1.524177733264301718e-08 -1.384905934108395950e-08 -2.447232567103460520e-08 -1.014870606770969391e-08 1.521303242253018209e-08 1.883465104445794285e-09 -9.076124586116643304e-09 -5.881631771301232767e-09 -7.527399261466176697e-09 5.814709736565113226e-09 -9.514959834834210030e-09 -3.621578336948509903e-09 -5.226197631985438192e-09 6.555809788885369186e-09 -1.469655207096823352e-09 -3.545306626471533415e-09 3.020577615769639914e-10 2.859507355905607351e-09 -2.147278824525898217e-09 -8.085448236799684996e-10 9.016651686485327116e-10 2.021525285538367331e-09 -7.852617486829457354e-10 -4.206622109408404445e-10 1.065417183662207413e-09 2.363066142963369759e-11 2.503104188375167595e-10 9.171610201149202639e-10 -4.234782401212876187e-10 1.292125031309876639e-09 1.681346102887114476e-10 3.436054719532369338e-11 4.582454175775494250e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -8.647178073554945026e-07 -1.984277781703176489e-05 1.882503195926090157e-06 8.032200875169178569e-06 -2.411226103936548980e-05 3.976346635752156228e-08 -1.032983173646601689e-05 1.528607026872851461e-05 3.765199894143256358e-06 -8.267627418773137351e-06 -5.847615990734883575e-06 1.114550408464794913e-05 5.646054904762381247e-06 -2.440796434761868840e-06 3.262752903500524751e-06 4.129675438955480779e-06 9.243351735950407418e-07 -8.859070622746353517e-07 -1.320205930552323286e-06 -3.337929408674683556e-07 1.500948965738256719e-06 5.375602623974374954e-07 -1.282342721334833822e-07 -4.887846996210276815e-07 -5.071080795109601970e-07 5.908466432966315986e-07 6.830135405316129420e-08 -1.801138326236477209e-07 3.636241671077920607e-08 -7.592857664575425666e-08 3.472810366429472105e-07 1.076624063842555885e-07 2.485087683293876783e-08 1.300881272527649852e-07 -9.013142777606432023e-08 8.939915122302451335e-08 -8.910687065361821148e-08 -7.169556259157945202e-08 -3.256524199915528166e-08 7.624476234060067225e-08 3.242599343213307153e-08 -1.703411450259194130e-08 5.711078804911533739e-08 -2.908463774930743960e-08 -1.942017741886845117e-08 3.389515094825672786e-08 -1.903646534995379151e-08 2.058510833311354388e-08 2.201999088614692840e-08 1.009182651662351087e-08 1.034197204034586696e-08 -2.198274424791199868e-08 1.232292869528463972e-08 8.080435927506419097e-09 8.538114169949871783e-09 6.355953165773100881e-09 8.297746720382353569e-09 -3.457470922464176957e-10 -8.056434412848676504e-09 4.945714155285675796e-09 -7.852617486829457354e-10 7.871544864481355639e-09 -8.467682837862657217e-09 -2.029701256925692636e-09 3.939081417683354680e-09 -5.488759745412081506e-09 -7.912884212571772782e-09 -1.031001015049521369e-09 -3.274044885752324638e-09 -1.494816860080120803e-09 -1.350984031142077709e-09 -1.196073425400100068e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 2.829028106930004177e-05 1.944921768762868179e-05 3.108887283344801246e-05 -1.751672063897858822e-05 2.656817071957624942e-05 2.041667214813223345e-08 1.786385625911848468e-05 -1.515938125586694791e-05 -3.086792200769470042e-06 1.365005790821120530e-05 7.540510309074001110e-06 -1.319045281535339718e-05 -8.216049733794172664e-06 2.368856382328713002e-06 -3.509676316894518986e-06 -4.665165174764926564e-06 -1.390809463374531720e-06 3.074074593753878681e-07 1.593442474264427362e-06 3.138778611670293107e-07 -1.884590386869213127e-06 -4.659385876185425389e-07 2.318186796303224725e-07 9.039007425962339883e-07 2.965991371089280550e-07 -6.467003569471858135e-07 -9.917920486950763284e-08 3.567664216553801741e-07 -1.890654270788131530e-07 1.376223373748636337e-07 -3.104237995683853256e-07 -8.009969405191406467e-08 -2.558315900600582350e-08 -4.685717935272465095e-08 8.662415629283762556e-08 -8.389473296493107119e-08 7.795826130305549392e-08 5.508347085921189989e-08 3.312388836461493741e-08 -7.637833817621370947e-08 -2.184703268071556368e-08 4.133778195960887588e-08 -5.558907607398652146e-08 1.872796603814322018e-08 2.100092659395331668e-08 -3.028054913037645288e-08 2.777003052892447577e-08 -1.635827494157548386e-08 -3.015300528206738155e-08 -4.794891437873309109e-09 -8.181298905621171952e-09 2.870828222607401537e-08 -1.932853378702077934e-08 -8.185574761862596087e-09 -7.333682557941810937e-09 -7.623937085234914518e-09 -1.186140480763356792e-08 1.926192264894618847e-09 1.004812194110773235e-08 -6.304929515339946905e-09 -4.206622109408404445e-10 -8.467682837862657217e-09 1.028349096903714853e-08 1.429356763644912800e-09 -4.593333098593778539e-09 6.168250437327763222e-09 8.449709522931912838e-09 1.485419567657777476e-09 2.911577625649900564e-09 1.583557028579024542e-09 1.526388126133114149e-09 1.078486171688719376e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -4.570226560718281065e-05 7.938510256294796510e-06 -4.759406903307133156e-05 1.485528605159619028e-05 1.328632869806629799e-05 3.767785622215511326e-06 -3.504021046037497873e-06 -4.840014652142644194e-06 -1.751284841526143866e-06 -2.620978606795394094e-06 -5.402179963150813133e-08 -1.155278832292491333e-06 1.244245699531189135e-06 1.047180671087091911e-06 -5.975323253942488857e-07 -6.383155976231338997e-07 2.607890887808156917e-07 6.910274422069910821e-07 3.019426348771525274e-07 8.086401926044007054e-08 -2.487763008760115392e-07 -3.308327033165305113e-07 1.180801700814298371e-08 -2.025907334512037853e-07 4.067826576065174623e-07 -1.504669750064687198e-07 2.162329698149664617e-09 -1.052145131941720459e-07 1.231474677731686145e-07 -1.798823405792464636e-08 -1.676773269846751308e-07 -6.695571317808968167e-08 -3.854166442724544756e-09 -1.298732049324576889e-07 4.178619537544149781e-08 -4.231882149492701807e-08 4.846144263801207765e-08 3.790249521597678496e-08 9.310777727390655034e-09 -3.161431724694371366e-08 -2.262199572199114182e-08 -1.758623963936870476e-08 -2.185982635652072294e-08 1.970595533339436515e-08 3.283974934922042720e-09 -1.520411604385467981e-08 -3.388178484857443643e-09 -1.222594802786561353e-08 -8.968305818659364664e-11 -8.355747478429848824e-09 -5.608425930048213983e-09 1.584707807451644887e-09 1.690431001885026155e-09 -2.673312307851300231e-09 -3.814486374974892804e-09 -7.047772288056651254e-10 5.610824631456572223e-10 -1.005799984017218873e-09 9.850712845543645382e-10 -4.618570351479315036e-10 1.065417183662207413e-09 -2.029701256925692636e-09 1.429356763644912800e-09 1.263176409299788208e-09 -5.350309176535153797e-10 1.264638894632421437e-09 2.104159003980076703e-09 4.752900690003741203e-11 1.319275667592082162e-09 4.552514131391790960e-10 3.196426910358387148e-10 4.073370589858392892e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -3.280438509074233420e-05 -4.343349351053582001e-06 -2.869546955544409657e-05 1.550313538489669733e-05 -2.956145215406492982e-06 4.937137719078863692e-06 -9.318399980112320740e-06 7.502761712063313596e-06 1.481553022075293797e-06 -7.868677817429591437e-06 -4.370368807009190035e-06 7.374138184194983456e-06 5.104576992946524359e-06 -1.006425173237491263e-06 2.221326450778006366e-06 2.465113882689141429e-06 9.134946395413285417e-07 -2.147353494858667464e-07 -6.986712579869655454e-07 -2.559306707585397471e-07 8.063456815319987564e-07 9.076097753945582601e-08 -6.649916177769624017e-08 -4.817459891533697588e-07 -5.400346519558699801e-08 2.945631564644911354e-07 6.090334154863489225e-08 -2.143197326346838355e-07 1.026274034132372151e-07 -6.641418472176448442e-08 1.103616887841112891e-07 2.498440593268076046e-08 1.794916099403844893e-08 -9.175201702638384423e-09 -2.089849281286317917e-08 2.684941829943946643e-08 -1.870475536154783344e-08 -2.344344286739602842e-08 -1.496857353546132346e-08 2.697366181915551966e-08 4.588298931132724542e-09 -2.353447626186515939e-08 2.227934796570399887e-08 -6.823049204866383864e-09 -1.151374592558550182e-08 1.305189546573797036e-08 -1.644126177753830969e-08 4.526750400287865915e-09 1.463812561619256552e-08 9.380366102948030282e-10 3.192058708212345319e-09 -1.365173709363340688e-08 9.362434245182038557e-09 3.372978499066504765e-09 3.115826762281089117e-09 3.915996288868898277e-09 6.238133691134991830e-09 -6.167090068420999429e-10 -4.822586176229844288e-09 3.100767567617724350e-09 2.363066142963369759e-11 3.939081417683354680e-09 -4.593333098593778539e-09 -5.350309176535153797e-10 2.451643082378578628e-09 -2.853766939964591889e-09 -3.988703694798402134e-09 -4.801794279849311949e-10 -1.481707765387776807e-09 -6.728794198026569962e-10 -6.800994734739156662e-10 -6.160473318733631388e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 7.186743291255005798e-06 1.495891802290004830e-05 2.930061778180938451e-08 -4.281262605362500213e-06 1.409693082616226133e-05 -1.094703937690689581e-06 8.649171052292548817e-06 -1.039438806924597051e-05 -2.684966038604770898e-06 6.914677017780991490e-06 4.544034092349464228e-06 -8.184525967841569600e-06 -4.389484473512973286e-06 1.777598704392067933e-06 -2.382215526655323162e-06 -2.963031835159859062e-06 -7.664359594913168868e-07 5.519858931202305425e-07 1.017430094326341144e-06 2.414661991954002641e-07 -1.122388599179621682e-06 -3.169084459341442714e-07 1.203146000329891082e-07 4.310349364325829828e-07 3.215948792948165141e-07 -4.256234974237915495e-07 -5.487730365889311538e-08 1.724982056319728334e-07 -5.092190758405757729e-08 7.364037315134527227e-08 -2.285323416898064706e-07 -7.251002137583272957e-08 -1.888691946102067018e-08 -6.587747971965346453e-08 5.643385911018899974e-08 -5.835382874322901230e-08 5.532319114227674269e-08 4.442845107207346507e-08 2.086476587979568180e-08 -5.246303424394046841e-08 -1.940843395444498010e-08 1.738771216804579208e-08 -3.850464778597815364e-08 1.751290257893360713e-08 1.332632048854671590e-08 -2.243923167059003606e-08 1.542086798442522191e-08 -1.284007495894431329e-08 -1.671571016592462143e-08 -5.393354794852180085e-09 -6.815664733928838862e-09 1.657730172323531683e-08 -1.020856010109448835e-08 -5.404675697439056545e-09 -5.404805000094673438e-09 -4.614466371529034522e-09 -6.548047428973199068e-09 5.384672208939864336e-10 6.029711171974792275e-09 -3.815508296250398210e-09 2.503104188375167595e-10 -5.488759745412081506e-09 6.168250437327763222e-09 1.264638894632421437e-09 -2.853766939964591889e-09 3.936050899653589319e-09 5.565896819880079367e-09 7.349153141858646432e-10 2.219704323503962209e-09 1.038671533874805450e-09 9.633972863265621685e-10 8.041311248556472019e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -3.516281247233987301e-06 1.992834674433305802e-05 -1.101203020547942824e-05 -3.861948110716982535e-06 1.993678211976121189e-05 -1.676981979439286908e-06 1.000871601215943410e-05 -1.493156975057570465e-05 -4.298212120426754376e-06 8.050492804267978341e-06 5.905198939696531584e-06 -1.144085679522630020e-05 -5.419640140062355265e-06 2.684939129778658618e-06 -3.446991892553718155e-06 -4.086058690323189241e-06 -9.484758151830637712e-07 1.022984437208358002e-06 1.402298595073682115e-06 3.697135743607308323e-07 -1.523953094583137964e-06 -5.012606478486283811e-07 1.422710045280861107e-07 4.660556393120762020e-07 5.491839657154014149e-07 -5.979401318640006234e-07 -7.035811849550010454e-08 1.849303543062758270e-07 -1.622461052881251519e-08 8.031425840858179706e-08 -3.501280954021450887e-07 -1.165242881487228299e-07 -2.949184616579635060e-08 -1.305809395832603629e-07 8.530737587620482276e-08 -8.931171084839953515e-08 8.532369533149940241e-08 7.292049480011651936e-08 3.122121292473368845e-08 -7.730791368793387050e-08 -3.214872115104035350e-08 1.600135926675760555e-08 -5.746406945296525131e-08 2.996530275212561963e-08 1.935339294800482383e-08 -3.463750208300704733e-08 1.953543588167739113e-08 -2.054466879875352151e-08 -2.170153009502604929e-08 -1.029900446629442975e-08 -1.075130822239076879e-08 2.180447752466151935e-08 -1.230298566546570222e-08 -7.984865260948639608e-09 -8.571302941338531498e-09 -6.359810910879953395e-09 -8.328872739069338896e-09 1.308438360289447574e-10 8.154128055234787738e-09 -5.062911539008353216e-09 9.171610201149202639e-10 -7.912884212571772782e-09 8.449709522931912838e-09 2.104159003980076703e-09 -3.988703694798402134e-09 5.565896819880079367e-09 8.055232596619064380e-09 8.932454813727385054e-10 3.452824703861692610e-09 1.502486327253665486e-09 1.356954461411380196e-09 1.257724192519818718e-09
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.085044567018165149e-05 9.781241534505454783e-06 7.718093365400439297e-06 -1.757989961223755028e-06 7.417341896346888420e-06 1.710859712445590334e-06 3.628568418847811236e-06 -2.674915835064053855e-06 6.256314854520926767e-07 2.393845435813816572e-06 1.116429268396872302e-06 -1.593019161874593051e-06 -9.263970116627612412e-07 -2.870167575707448028e-07 -3.614838062359436948e-07 -6.060719727261434323e-07 1.213406610054423080e-07 -2.253908139097856409e-07 1.704891596913584890e-07 2.744415738202501588e-08 -1.677735389502810711e-07 -1.336850201458063875e-07 1.124544048302991715e-08 1.576000151600658905e-07 6.149312214732258695e-08 -9.363371555576276321e-08 -3.273973804117308361e-08 6.000442361321819088e-09 -2.458950764445714201e-08 2.469494132468412819e-08 -6.889339443104288423e-08 -1.583152580108024508e-08 8.344354977149938748e-10 -2.706351467904462243e-08 3.813699740703329354e-08 -1.416966417913568194e-08 2.471051458681790368e-08 8.397568829384527522e-09 3.893608853626855498e-09 -1.623393723024969207e-08 -7.311081596654048260e-09 6.205349102373116247e-09 -1.062720147662047900e-08 1.498568807365891315e-09 3.406953635245660924e-09 -3.219181777680236607e-09 2.034350056767122949e-09 -4.634036812363204047e-09 -4.576776316397327064e-09 -5.772295607941173226e-11 -1.335981787320148522e-09 4.901900262874927553e-09 -2.701806365784534529e-09 -1.653712318060761975e-09 -1.543543165722352389e-09 -9.011261674338129447e-10 -1.230520445986353348e-09 7.783586985478703598e-10 9.636732813150622573e-10 -7.478359423309641358e-10 -4.234782401212876187e-10 -1.031001015049521369e-09 1.485419567657777476e-09 4.752900690003741203e-11 -4.801794279849311949e-10 7.349153141858646432e-10 8.932454813727385054e-10 7.240954697289915346e-10 6.642762586641142630e-11 2.475810147336224691e-10 2.162677799251022896e-10 -3.340915738090531983e-11
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -2.007248700368360647e-05 -1.291133773362803928e-06 -2.626731872376759428e-05 5.600039129933297027e-06 6.267002453151422167e-06 1.532861605895005321e-07 1.698996092568142021e-06 -5.152958681383269683e-06 -3.717786414014746694e-06 1.334268978704148648e-06 2.291183767977657140e-06 -3.656059094743960134e-06 -6.173413782026520671e-07 2.120510888560250851e-06 -1.661307389047478205e-06 -1.529265104782114816e-06 -2.162647439429792068e-07 7.837075828301020140e-07 8.437967807583934288e-07 2.265222527209049861e-07 -6.547994884620087304e-07 -2.584102985771163875e-07 6.537217150705251352e-08 8.588230330409662199e-08 4.339290759661658034e-07 -2.566688521773576872e-07 -2.271480938680894485e-08 1.568472762937707165e-08 1.012517879773706124e-07 6.142209655267589044e-09 -1.882024691449037666e-07 -7.214487089902525238e-08 -1.601417896564284264e-08 -1.026127191999482088e-07 3.717216373166129533e-08 -3.840197104727526729e-08 4.511140416926369482e-08 4.281411598732249032e-08 1.571139918596603625e-08 -3.789514780331499610e-08 -1.705273331005863859e-08 -4.590844784987318447e-09 -2.476628629234901459e-08 1.797390522750351605e-08 7.572814296516647891e-09 -1.713112101534010932e-08 4.903852927355137348e-09 -1.061537759107453824e-08 -4.623030803799492177e-09 -8.462113326275320678e-09 -5.825102474542005024e-09 5.212458976434576952e-09 -1.500474639486970880e-09 -3.494916561460158804e-09 -4.946302491587467024e-09 -2.125140389405122149e-09 -1.703478768670802332e-09 -1.181955499406468477e-09 2.693023014676496033e-09 -1.596217913727631208e-09 1.292125031309876639e-09 -3.274044885752324638e-09 2.911577625649900564e-09 1.319275667592082162e-09 -1.481707765387776807e-09 2.219704323503962209e-09 3.452824703861692610e-09 6.642762586641142630e-11 2.111798555383464349e-09 6.399634159579553275e-10 5.207201219727837339e-10 6.977427551719482891e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -5.295553162504519813e-06 5.442262500731319240e-06 -7.116804599121627626e-06 1.226962407725760877e-06 6.181941802577398503e-06 7.188840269763651195e-07 1.513032931868125849e-06 -3.027310308501267242e-06 -6.928184822989570810e-07 1.040729183977109180e-06 9.489634262735647839e-07 -1.997452207645467794e-06 -7.052907008841067151e-07 4.596497846412796187e-07 -5.816343929781949384e-07 -7.215892745304936164e-07 -7.316624217480915010e-08 2.014451613640029494e-07 2.635560043088589926e-07 6.029957038327152215e-08 -2.754722462252092911e-07 -1.389331838198693611e-07 2.693940749634684591e-08 6.212216831921702464e-08 1.400884324127875396e-07 -1.155701271466934864e-07 -1.303555713111178435e-08 1.223125639288827777e-08 8.692203376885269593e-09 1.293295075490816128e-08 -8.089869793912407793e-08 -2.759300245397318268e-08 -4.190262009618466689e-09 -3.931275331107261171e-08 2.333337366706423846e-08 -2.053405874333945747e-08 2.243721786375495658e-08 1.605017326586468682e-08 5.911897240247968828e-09 -1.753171251095821456e-08 -8.634142629840585811e-09 7.889460077209109895e-10 -1.237297326227601204e-08 6.880864341184318795e-09 3.493934315710834392e-09 -7.179486384728923726e-09 2.406418217882636814e-09 -5.214851137178819199e-09 -3.676138418160822304e-09 -2.509512649004528219e-09 -2.384695284474049028e-09 3.968265845175372203e-09 -1.925962682941341123e-09 -1.677394702764909911e-09 -1.882241233930865467e-09 -1.078435675947127092e-09 -1.249286213299707842e-09 3.585859504057861424e-11 1.397729852121938554e-09 -8.703629506411777718e-10 1.681346102887114476e-10 -1.494816860080120803e-09 1.583557028579024542e-09 4.552514131391790960e-10 -6.728794198026569962e-10 1.038671533874805450e-09 1.502486327253665486e-09 2.475810147336224691e-10 6.399634159579553275e-10 3.049507638499734438e-10 2.619731764680509981e-10 2.092670633999060337e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 4.122111153218879565e-07 4.509456850882449668e-06 -9.901080463009747570e-07 -7.923939218379101901e-07 4.611147689297883146e-06 1.315091717808854989e-07 2.095895205823464670e-06 -2.680007784735964941e-06 -5.338370232815017866e-07 1.592673355379447283e-06 1.066002144511729646e-06 -2.002451489677425600e-06 -1.037992718809561380e-06 3.782072301957504328e-07 -5.600825022422226172e-07 -7.167152683383465318e-07 -1.578331198901239645e-07 1.261141637019637014e-07 2.387087147285776049e-07 5.432256177951978763e-08 -2.688458475558004660e-07 -9.471408319558297167e-08 2.862587028136226858e-08 1.001815186360548032e-07 8.402460154690179919e-08 -1.048538991210759860e-07 -1.365744804901927566e-08 3.630573812768552190e-08 -1.229017299970242187e-08 1.775624445705540029e-08 -5.991782651450717320e-08 -1.864429329848813941e-08 -4.003117106038301459e-09 -2.017344044826520406e-08 1.622980588782055270e-08 -1.574653376803142373e-08 1.560026714909821334e-08 1.146345947428834040e-08 5.120615115238622890e-09 -1.365831229097354797e-08 -5.587039652733640085e-09 3.758264687913682383e-09 -9.951951863687371573e-09 4.640086474492054404e-09 3.263556363497743318e-09 -5.693791263976027113e-09 3.405996043314057935e-09 -3.580413847932004248e-09 -4.093499942089111349e-09 -1.424372583057414257e-09 -1.744889923726272485e-09 4.133344012044137822e-09 -2.459628040200187293e-09 -1.380389870974265595e-09 -1.385838303871160723e-09 -1.105826871582507158e-09 -1.549584606805220438e-09 1.700151892994023716e-10 1.449553161970267951e-09 -9.156361354545421597e-10 3.436054719532369338e-11 -1.350984031142077709e-09 1.526388126133114149e-09 3.196426910358387148e-10 -6.800994734739156662e-10 9.633972863265621685e-10 1.356954461411380196e-09 2.162677799251022896e-10 5.207201219727837339e-10 2.619731764680509981e-10 2.398270716608777692e-10 1.830220969278060225e-10
+0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 -3.161746684132797919e-06 -1.030483147388550617e-06 -4.192148453277030133e-06 -4.526732459805152895e-07 2.499200290363847799e-07 -1.057256888599603949e-06 5.027824590938702883e-07 -1.739423428883073235e-06 -1.058159092309566331e-06 6.263309317770234638e-07 6.853496867453323004e-07 -1.522927148308537333e-06 -5.413516795648731065e-07 6.153488546045230070e-07 -5.703477275038895062e-07 -5.600020394411885686e-07 -1.569864910630974774e-07 2.631878518066719889e-07 2.310082046331661528e-07 6.820330898875917256e-08 -2.047241814358036085e-07 -5.800788164262524000e-08 1.361458105578812630e-08 2.920732134283257970e-08 1.007822564215318863e-07 -8.146701411549708026e-08 -9.047030654725759493e-09 1.812301243834629015e-08 1.781693049583065503e-08 1.602648541450339559e-09 -4.974846563085596704e-08 -1.861481786572203059e-08 -6.666840004082733244e-09 -2.312739248423948984e-08 7.936228871395799181e-09 -1.157098103618119473e-08 9.730826444044803855e-09 1.337192434049511286e-08 5.583568563326357299e-09 -9.917366222069330611e-09 -4.091012433374921910e-09 1.558222980716653319e-10 -7.943621195758910878e-09 5.441308503249259370e-09 3.115544195942903542e-09 -5.655167849302724547e-09 2.964985485091416447e-09 -2.704541291127445528e-09 -2.387783046130999934e-09 -2.386021341959502953e-09 -1.708367538731110283e-09 2.220088253306263855e-09 -9.493145268760885368e-10 -1.151534175119784583e-09 -1.472907831152439491e-09 -9.262389880798338464e-10 -9.654101719683703709e-10 -3.554820552994491015e-10 1.096061068276667264e-09 -6.194326780592672405e-10 4.582454175775494250e-10 -1.196073425400100068e-09 1.078486171688719376e-09 4.073370589858392892e-10 -6.160473318733631388e-10 8.041311248556472019e-10 1.257724192519818718e-09 -3.340915738090531983e-11 6.977427551719482891e-10 2.092670633999060337e-10 1.830220969278060225e-10 2.789937835338941657e-10
diff --git a/multimodal/tests/datasets/get_dataset_path.py b/multimodal/tests/datasets/get_dataset_path.py
new file mode 100644
index 0000000000000000000000000000000000000000..f533aa84910b5ee415db222072d88b66620754ba
--- /dev/null
+++ b/multimodal/tests/datasets/get_dataset_path.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+
+
+from __future__ import print_function, division
+
+import os
+
+
+def get_dataset_path(filename):
+    """Return the absolute path of a reference dataset for tests
+
+    - Input parameter:
+
+    :param str filename: File name of the file containing reference data
+        for tests (which must be in ``skgilearn/tests/datasets/``)
+
+    - Output parameters:
+
+    :returns: The absolute path where the file with name **filename** is stored
+    :rtype: str
+
+    """
+
+    datasets_path = os.path.dirname(os.path.abspath(__file__))
+    return os.path.join(datasets_path, filename)
diff --git a/multimodal/tests/datasets/input_dir.npy b/multimodal/tests/datasets/input_dir.npy
new file mode 100644
index 0000000000000000000000000000000000000000..7b07ef4e6ffcd2e797d473b1f0341d67b673852c
Binary files /dev/null and b/multimodal/tests/datasets/input_dir.npy differ
diff --git a/multimodal/tests/datasets/input_dir.txt b/multimodal/tests/datasets/input_dir.txt
new file mode 100644
index 0000000000000000000000000000000000000000..174ee98959349ea9d6ff7592092d2ff50be6bb87
--- /dev/null
+++ b/multimodal/tests/datasets/input_dir.txt
@@ -0,0 +1 @@
+{"0": 
\ No newline at end of file
diff --git a/multimodal/tests/datasets/input_x_dic.npy b/multimodal/tests/datasets/input_x_dic.npy
new file mode 100644
index 0000000000000000000000000000000000000000..8a1cc27a255361f49ad2c03a57f9d8a6f6a56ec3
Binary files /dev/null and b/multimodal/tests/datasets/input_x_dic.npy differ
diff --git a/multimodal/tests/datasets/input_x_dic.pkl b/multimodal/tests/datasets/input_x_dic.pkl
new file mode 100644
index 0000000000000000000000000000000000000000..ac879caaa3bd4b29030decb48b42d522ad341595
Binary files /dev/null and b/multimodal/tests/datasets/input_x_dic.pkl differ
diff --git a/multimodal/tests/datasets/input_y.npy b/multimodal/tests/datasets/input_y.npy
new file mode 100644
index 0000000000000000000000000000000000000000..74806893dbded469bbc82d6277b33528759ee780
Binary files /dev/null and b/multimodal/tests/datasets/input_y.npy differ
diff --git a/multimodal/tests/datasets/kernel_input_x.pkl b/multimodal/tests/datasets/kernel_input_x.pkl
new file mode 100644
index 0000000000000000000000000000000000000000..7b07ef4e6ffcd2e797d473b1f0341d67b673852c
Binary files /dev/null and b/multimodal/tests/datasets/kernel_input_x.pkl differ
diff --git a/multimodal/tests/datasets/test_input_y.npy b/multimodal/tests/datasets/test_input_y.npy
new file mode 100644
index 0000000000000000000000000000000000000000..1708652a7f325d582d5d40834c0427e41b42ad88
Binary files /dev/null and b/multimodal/tests/datasets/test_input_y.npy differ
diff --git a/multimodal/tests/datasets/test_kernel_input_x.pkl b/multimodal/tests/datasets/test_kernel_input_x.pkl
new file mode 100644
index 0000000000000000000000000000000000000000..8088a578080f936ed18381340c83c4a38a254f6d
Binary files /dev/null and b/multimodal/tests/datasets/test_kernel_input_x.pkl differ
diff --git a/multimodal/tests/datasets/x_y_.npz b/multimodal/tests/datasets/x_y_.npz
new file mode 100644
index 0000000000000000000000000000000000000000..05fcf754b142d35f3297946ccc7dba30005a4b0f
Binary files /dev/null and b/multimodal/tests/datasets/x_y_.npz differ
diff --git a/multimodal/tests/test_cumbo.py b/multimodal/tests/test_cumbo.py
new file mode 100644
index 0000000000000000000000000000000000000000..d8a9e008cd7b5fc14cd0fc243b7222ed897e56d4
--- /dev/null
+++ b/multimodal/tests/test_cumbo.py
@@ -0,0 +1,1060 @@
+# -*- coding: utf-8 -*-
+"""Testing for the mumbo module."""
+
+
+import pickle
+
+import numpy as np
+import unittest
+from scipy.sparse import csc_matrix, csr_matrix, coo_matrix, dok_matrix
+from scipy.sparse import lil_matrix
+from sklearn.model_selection import GridSearchCV
+from sklearn.svm import SVC
+from sklearn.ensemble import RandomForestClassifier
+from sklearn.cluster import KMeans
+from sklearn.tree import DecisionTreeClassifier
+from sklearn import datasets
+
+from multimodal.boosting.cumbo import MuCumboClassifier
+from multimodal.tests.data.get_dataset_path import get_dataset_path
+
+
+class TestMuCumboClassifier(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(clf):
+        # Load the iris dataset
+        iris = datasets.load_iris()
+        iris.views_ind = np.array([0, 2, 4])
+        clf.iris = iris
+
+
+    def test_init(self):
+        clf = MuCumboClassifier()
+        self.assertEqual(clf.random_state, None)
+        self.assertEqual(clf.n_estimators, 50)
+        n_estimators = 3
+        clf = MuCumboClassifier(n_estimators=n_estimators)
+        #self.assertEqual(clf.view_mode_)
+
+    def test_init_var(self):
+        n_classes = 3
+        n_views = 3
+        y = np.array([0, 2, 1, 2])
+        expected_cost = np.array(
+            [[[-2, 1, 0.5], [1, 1, -1], [1, -2, 0.5], [1, 1, -1]],
+             [[-2, 1, 0.5], [1, 1, -1], [1, -2, 0.5], [1, 1, -1]],
+             [[-2, 1, 0.5], [1, 1, -1], [1, -2,0.5], [1, 1, -1]]],
+            dtype=np.float64)
+        # expected_cost_glob = np.array(
+        #     [[-2, 1, 1], [1, 1, -2], [1, -2, 1], [1, 1, -2]], dtype=np.float64)
+        expected_label_score = np.zeros((n_views, y.shape[0], n_classes))
+        expected_label_score_glob = np.zeros((y.shape[0], n_classes))
+        expected_predicted_classes_shape = ((n_views, y.shape[0]))
+        expected_n_yi_s = np.array([1, 1, 2])
+        expected_beta_class = np.ones((n_views, n_classes)) / n_classes
+        clf = MuCumboClassifier()
+        clf.n_classes_ = n_classes
+        (cost,  label_score, label_score_glob, predicted_classes, score_function, beta_class, n_yi_s) \
+            = clf._init_var(n_views, y)
+        np.testing.assert_equal(cost, expected_cost)
+        # np.testing.assert_equal(cost_glob, expected_cost_glob)
+        np.testing.assert_equal(label_score, expected_label_score)
+        np.testing.assert_equal(label_score_glob, expected_label_score_glob)
+        np.testing.assert_equal(predicted_classes.shape, expected_predicted_classes_shape)
+        np.testing.assert_equal(n_yi_s, expected_n_yi_s)
+        np.testing.assert_equal(beta_class, expected_beta_class)
+        np.testing.assert_equal(score_function, np.zeros((n_views, 4, n_classes)))
+
+    # def test_compute_edge_global(self):
+    #     cost_global = np.array([[-2, 1, 1], [1, 1, -2], [1, -2, 1], [1, 1, -2]],
+    #                            dtype=np.float64)
+    #     predicted_classes = np.array([[0, 0, 1, 1], [0, 1, 0, 2], [2, 2, 0, 0]])
+    #     y = np.array([0, 2, 1, 2])
+    #     expected_edge_global = np.array([0.25, 0.25, -0.125])
+    #
+    #     clf = MuCumboClassifier()
+    #     edge_global = clf._compute_edge_global(cost_global, predicted_classes, y)
+    #     np.testing.assert_equal(edge_global, expected_edge_global)
+
+    def test_compute_dist(self):
+        cost = np.array(
+            [[[-2, 1, 1], [-1, -1, -2], [1, -2, 1], [1, 1, -2]],
+             [[-1, 2, 2], [2, 2, -1], [-2, 4, -2], [2, 2, -4]],
+             [[1, 4, -4], [-1, 3, -1], [-2, 2, 4], [4, 4, -4]]],
+            dtype=np.float64)
+        y = np.array([0, 2, 1, 2])
+        expected_dist = np.array(
+            [[0.25, 0.25, 0.25, 0.25], [0.5, 0.5, -2., 2.], [-0.5, 0.5, -1., 2.]])
+
+        clf = MuCumboClassifier()
+        dist = clf._compute_dist(cost, y)
+
+        np.testing.assert_equal(dist, expected_dist)
+
+        # The computation of the distribution only uses the costs when predicting
+        # the right classes, so the following cost matrix should give the same
+        # result as the previous.
+        cost = np.array(
+            [[[-2, 0, 0], [0, 0, -2], [0, -2, 0], [0, 0, -2]],
+             [[-1, 0, 0], [0, 0, -1], [0, 4, 0], [0, 0, -4]],
+             [[1, 0, 0], [0, 0, -1], [0, 2, 0], [0, 0, -4]]],
+            dtype=np.float64)
+
+        dist = clf._compute_dist(cost, y)
+
+        np.testing.assert_equal(dist, expected_dist)
+
+        expected_cost = np.array(
+            [[[-2, 1, 0.5], [1, 1, -1], [1, -2, 0.5], [1, 1, -1]],
+             [[-2, 1, 0.5], [1, 1, -1], [1, -2, 0.5], [1, 1, -1]],
+             [[-2, 1, 0.5], [1, 1, -1], [1, -2,0.5], [1, 1, -1]]],
+            dtype=np.float64)
+        dist = clf._compute_dist(cost, y)
+        expected_dist = np.array([[ 0.25,0.25,0.25,0.25],
+                                  [ 0.5,0.5, -2.,2. ], [-0.5 , 0.5 ,-1.,  2. ]])
+        np.testing.assert_equal(dist, expected_dist)
+
+
+    # def test_compute_coop_coef(self):
+    #     y = np.array([0, 1, 2, 0])
+    #     predicted_classes = np.array([[0, 0, 1, 1], [0, 1, 0, 2], [2, 2, 0, 0]])
+    #     expected_coop_coef = np.array([[1, 0, 1, 0], [1, 1, 1, 0], [0, 0, 1, 1]],
+    #                                   dtype=np.float64)
+    #
+    #     clf = MuCumboClassifier()
+    #     coop_coef = clf._compute_coop_coef(predicted_classes, y)
+    #
+    #     assert_array_equal(coop_coef, expected_coop_coef)
+
+
+    def test_compute_edges(self):
+        cost = np.array(
+            [[[-2, 1, 1], [-1, -1, -2], [1, -2, 1], [1, 1, -2]],
+             [[-2, 2, 2], [2, 2, -4], [-2, -4, -2], [2, 2, -4]],
+             [[1, 4, -4], [-1, 3, -1], [-2, 4, 4], [4, 4, -1]]],
+            dtype=np.float64)
+        predicted_classes = np.array([[0, 2, 1, 1], [0, 1, 0, 2], [2, 2, 0, 1]])
+        y = np.array([0, 2, 1, 2])
+        expected_edges = np.array([1.25, 0.75, 0.25])
+
+        clf = MuCumboClassifier()
+        edges = clf._compute_edges(cost, predicted_classes, y)
+
+        np.testing.assert_equal(edges, expected_edges)
+
+
+    def test_compute_alphas(self):
+        decimal = 12
+        expected_alpha = 0.5
+        edge = (np.e-1.) / (np.e+1.)
+
+        clf = MuCumboClassifier()
+        alpha = clf._compute_alphas(edge)
+        self.assertAlmostEqual(alpha, expected_alpha, decimal)
+
+        expected_alphas = np.array([0.5, 1., 2.])
+        tmp = np.array([np.e, np.e**2, np.e**4])
+        edges = (tmp-1.) / (tmp+1.)
+
+        alphas = clf._compute_alphas(edges)
+
+        np.testing.assert_almost_equal(alphas, expected_alphas, decimal)
+
+
+    def test_prepare_beta_solver(self):
+        clf = MuCumboClassifier()
+        clf.n_views_ = 3
+        clf.n_classes_ = 3
+        A, b, G, h, l = clf._prepare_beta_solver()
+        a_n = np.array(A)
+        A_expected = np.array([[ 1 , 1 , 1 , 0 , 0,  0,  0 , 0, 0],
+                               [ 0 , 0 , 0 , 1,  1,  1,  0 , 0, 0],
+                               [ 0 , 0 , 0 , 0 , 0,  0,  1,  1,1 ]])
+        np.testing.assert_equal(a_n , A_expected)
+        b_expected = np.array([[ 1.00e+00],[ 1.00e+00],[ 1.00e+00]])
+        #                      [ 1.00e+00],[ 1.00e+00],[ 1.00e+00],
+        #                      [ 1.00e+00],[ 1.00e+00],[ 1.00e+00]])
+        b_n = np.array(b)
+        np.testing.assert_equal(b_n, b_expected)
+        G_n = np.array(G)
+        G_expected = np.array([[1.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 1.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 1.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00, 1.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 1.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 1.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 1.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 1.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 1.00e+00],
+                               [-1.00e+00, 0.00e+00, 0.00e+00, 0.00e+00,  0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, -1.00e+00,  0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, -1.00e+00,  0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00, -1.00e+00,  0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00,  0.00e+00, -1.00e+00, 0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00,  0.00e+00,  0.00e+00,-1.00e+00, 0.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00,  0.00e+00,  0.00e+00, 0.00e+00, -1.00e+00, 0.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00,  0.00e+00,  0.00e+00, 0.00e+00, 0.00e+00, -1.00e+00, 0.00e+00],
+                               [0.00e+00, 0.00e+00, 0.00e+00,  0.00e+00,  0.00e+00, 0.00e+00, 0.00e+00, 0.00e+00,-1.00e+00]])
+        np.testing.assert_equal(G_n, G_expected)
+        h_n = np.array(h)
+        h_expected = np.array([[ 1.00e+00],[ 1.00e+00],[ 1.00e+00],[ 1.00e+00],[ 1.00e+00],[ 1.00e+00],[ 1.00e+00],[ 1.00e+00],[ 1.00e+00],
+                               [ 0.00e+00],[ 0.00e+00],[ 0.00e+00],[ 0.00e+00],[ 0.00e+00],[ 0.00e+00],[ 0.00e+00],[ 0.00e+00],[ 0.00e+00]])
+        np.testing.assert_equal(h_n, h_expected)
+
+        self.assertEqual(l, {'l': 18})
+
+    def test_solver_cp_forbeta(self):
+            clf = MuCumboClassifier()
+            clf.n_views_ = 3
+            clf.n_classes_ = 3
+            clf.n_yi = np.array([1, 1, 2])
+            A, b, G, h, l = clf._prepare_beta_solver()
+            y_i = np.array([0, 1, 2, 0])
+            predicted_classes = np.array([[0, 0, 1, 1], [0, 1, 0, 2], [2, 2, 0, 0]])
+
+            indicat , indicat_yi, delta = clf._indicatrice(predicted_classes, y_i)
+            indicate_vue = np.block(np.split(indicat, 3, axis=0)).squeeze()
+            indicate_vue_yi = np.block(np.split(indicat_yi, 3, axis=0)).squeeze()
+            alphas = np.array([0.5, 1., 2.])
+            cost_Tminus1 = 10 * np.array(
+                [[[2, 1, 0.5], [1, 1, 1], [1, 2, 0.5], [1, 1, 1]],
+                 [[2, 1, 0.5], [1, 1, 1], [1, 2, 0.5], [1, 1, 1]],
+                 [[2, 1, 0.5], [1, 1, 1], [1, 2, 0.5], [1, 1, 1]]],
+                dtype=np.float64)
+            cost_Tminus1_vue = np.block(np.split(cost_Tminus1, 3, axis=0)).squeeze()
+            delta_vue = np.block(np.split(delta, 3, axis=0)).squeeze()
+            solver = np.array(clf._solver_cp_forbeta(alphas, indicate_vue, indicate_vue_yi, delta_vue,
+                                         cost_Tminus1_vue, A, b, G, h, l))
+            self.assertEqual(solver.shape, (9,1))
+            s_r = np.sum(solver.reshape(3,3), axis=1)
+            np.testing.assert_almost_equal(s_r, np.ones(3, dtype=np.float), 9)
+
+    def test_solver_compute_betas(self):
+        clf = MuCumboClassifier()
+        clf.n_views_ = 3
+        clf.n_classes_ = 3
+        clf.n_yi = np.array([1, 1, 2])
+        cost_Tminus1 =np.array([[[-7.45744585e+01,  3.67879439e-01,  7.42065790e+01],
+          [ 4.78511743e-06,  3.87742081e-02, -3.87789932e-02],
+          [ 2.47875218e-03, -2.48182428e-03,  3.07210618e-06],
+          [ 1.35335283e-01,  6.73794700e-03, -1.42073230e-01]],
+
+         [[-2.02255359e-01,  1.83156389e-02,  1.83939720e-01],
+          [ 7.38905610e+00,  4.97870684e-02, -7.43884317e+00],
+          [ 3.67879441e-01, -4.06240749e+00,  3.69452805e+00],
+          [ 3.67879441e-01,  8.10308393e+03, -8.10345181e+03]],
+
+         [[-2.48182452e-03,  2.47875218e-03,  3.07234660e-06],
+          [ 5.45938775e+01,  4.03397223e+02, -4.57991101e+02],
+          [ 1.48401545e+02, -1.48426438e+02,  2.48935342e-02],
+          [ 1.09663316e+03,  2.71828184e+00, -1.09935144e+03]]])
+        score_function_Tminus1 =10 *np.ones((3,4,3), dtype =np.float)
+        alphas = np.array([0.5, 1., 2.])
+        predicted_classes = np.array([[0, 0, 1, 1], [0, 1, 0, 2], [2, 2, 0, 0]])
+        y = np.array([0, 1, 2, 0])
+        betas = clf._compute_betas(alphas, y, score_function_Tminus1, predicted_classes)
+        self.assertEqual(betas.shape, (3,3))
+        np.testing.assert_almost_equal(np.sum(betas, axis =1), np.ones(3, dtype=np.float), 9)
+        self.assertTrue(np.all(betas <= 1) )
+        self.assertTrue(np.all(betas >= 0) )
+    #
+    #
+    # def test_compute_cost_global():betas.
+    #
+    #     decimal = 12
+    #     label_score_glob = np.array(
+    #         [[-1, -2, 4], [-8, 1, 4], [2, 8, -4], [2, -1, 4]],
+    #         dtype=np.float64)
+    #     best_pred_classes = np.array([0, 1, 0, 2])
+    #     y = np.array([0, 2, 1, 2])
+    #     alpha = 0.5
+    #     expected_label_score_glob = np.array(
+    #         [[-0.5, -2, 4], [-8, 1.5, 4], [2.5, 8, -4], [2, -1, 4.5]],
+    #         dtype=np.float64)
+    #
+    #     clf = MuCumboClassifier()
+    #     cost_glob, label_score_glob = clf._compute_cost_global(
+    #         label_score_glob, best_pred_classes, y, alpha)
+    #
+    #     assert_array_almost_equal(label_score_glob, expected_label_score_glob,
+    #                               decimal=decimal)
+    #
+    #     label_score_glob = np.zeros((4, 3), dtype=np.float64)
+    #     alpha = 0.
+    #     expected_label_score_glob = np.zeros((4, 3), dtype=np.float64)
+    #     expected_cost_glob = np.array(
+    #         [[-2, 1, 1], [1, 1, -2], [1, -2, 1], [1, 1, -2]],
+    #         dtype=np.float64)
+    #
+    #     cost_glob, label_score_glob = clf._compute_cost_global(
+    #         label_score_glob, best_pred_classes, y, alpha)
+    #
+    #     assert_array_equal(label_score_glob, expected_label_score_glob)
+    #     assert_array_almost_equal(cost_glob, expected_cost_glob, decimal=decimal)
+    #
+    #     label_score_glob = np.array(
+    #         [[0, 0, np.log(4)], [np.log(8), 0, 0], [0, 0, 0], [0, 0, 0]],
+    #         dtype=np.float64)
+    #     alpha = np.log(2)
+    #     expected_label_score_glob = np.array(
+    #         [[alpha, 0, np.log(4)],
+    #          [np.log(8), alpha, 0],
+    #          [alpha, 0, 0],
+    #          [0, 0, alpha]],
+    #         dtype=np.float64)
+    #     expected_cost_glob = np.array(
+    #         [[-2.5, 0.5, 2.], [8., 2., -10.], [2., -3., 1.], [0.5, 0.5, -1.]],
+    #         dtype=np.float64)
+    #
+    #     cost_glob, label_score_glob = clf._compute_cost_global(
+    #         label_score_glob, best_pred_classes, y, alpha)
+    #
+    #     assert_array_almost_equal(label_score_glob, expected_label_score_glob,
+    #                               decimal=decimal)
+    #     assert_array_almost_equal(cost_glob, expected_cost_glob, decimal=decimal)
+
+    # def test_compute_beta(self):
+
+    def test_indicatrice(self):
+        clf = MuCumboClassifier()
+        clf.n_views_ = 3
+        clf.n_classes_ = 3
+        y_i = np.array([0, 1, 2, 0])
+        predicted_classes = np.array([[0, 0, 1, 1], [0, 1, 0, 2], [2, 2, 0, 0]])
+        indic , indic_yi, delta = clf._indicatrice(predicted_classes, y_i)
+        expected_indi = np.array( [[[0 ,0, 0], [1 ,0 ,0],[0 ,1, 0],[0 ,1 ,0]],
+                                   [[0 , 0 , 0], [0,  0 ,0],  [1 ,0 ,0], [0, 0 ,1]],
+                                   [[0, 0 ,1], [0 ,0 ,1],[1, 0, 0], [0, 0, 0]]])
+        expected_indi_yi = np.array([[[1, 0 ,0], [0, 1, 0],  [0 ,0, 1], [1 ,0 ,0]],
+                                  [[1, 0 ,0], [0 ,1 ,0], [0, 0, 1], [1, 0, 0]],
+                                  [[1, 0, 0], [0 ,1, 0],  [0 ,0 ,1],  [1 ,0 ,0]]])
+        np.testing.assert_equal(indic , expected_indi)
+        np.testing.assert_equal(indic_yi , expected_indi_yi)
+
+    def test_compute_cost(self):
+        decimal = 12
+        label_score = np.array(
+            [[[-1, -2, 4], [-8, 1, 4], [2, 8, -4], [2, -1, 4]],
+             [[2, -2, 1], [4, -1, 2], [1, 2, 4], [-2, 8, -1]],
+             [[8, 2, -4], [2, 4, -2], [4, 1, -2], [8, 2, 1]]],
+            dtype=np.float64)
+        predicted_classes = np.array([[0, 2, 1, 1], [0, 1, 0, 0], [2, 2, 0, 1]])
+        y = np.array([0, 2, 1, 2])
+        alphas = np.array([0.25, 0.5, 2.])
+        # expected_label_score = np.array(
+        #      [[[-0.75, -2, 4], [-8, 1, 4.25], [2, 8.25, -4], [2, -0.75, 4]],
+        #      [[2.5, -2, 1], [4, -0.5, 2], [1.5, 2, 4], [-1.5, 8, -1]],
+        #       [[8, 2, -2.], [2, 4, 0.], [6., 1, -2], [8, 4., 1]]],
+        #      dtype=np.float64)
+        cost_Tminus1 = np.array(
+                [[[-2, 1, 0.5], [1, 1, -1], [1, -2, 0.5], [1, 1, -1]],
+                 [[-2, 1, 0.5], [1, 1, -1], [1, -2, 0.5], [1, 1, -1]],
+                 [[-2, 1, 0.5], [1, 1, -1], [1, -2, 0.5], [1, 1, -1]]],
+                dtype=np.float64)
+        score_function_Tminus1 =10 *np.ones((3,4,3), dtype=np.float)
+        clf = MuCumboClassifier()
+        clf.n_views_ = 3
+        clf.n_classes_ = 3
+        clf.n_yi = np.array([1, 1, 2])
+        betas = clf._compute_betas(alphas, y, score_function_Tminus1, predicted_classes)
+        cost, label_score, score_function_dif = clf._compute_cost(label_score, predicted_classes, y, alphas,
+                                              betas, use_coop_coef=True)
+        cost_expected = np.array([[[-6.58117293e+01,  3.24652469e-01,  6.54870769e+01],
+                                   [ 5.42224839e-06,  4.39369338e-02, -4.39423560e-02],
+                                   [ 2.47875216e-03, -2.48182426e-03,  3.07210615e-06],
+                                   [ 1.35335283e-01,  6.73794705e-03, -1.42073230e-01]],
+
+                                  [[-2.02255355e-01,  1.83156385e-02,  1.83939717e-01],
+                                   [ 7.38905610e+00,  4.97870688e-02, -7.43884317e+00],
+                                   [ 3.67879448e-01, -4.06240750e+00,  3.69452805e+00],
+                                   [ 3.67879448e-01,  8.10308393e+03, -8.10345181e+03]],
+
+                                  [[-2.48725300e-03,  2.47875218e-03,  8.50082247e-06],
+                                   [ 1.97311865e+01,  1.45794844e+02, -1.65526031e+02],
+                                   [ 3.28220396e+01, -3.28469331e+01,  2.48935342e-02],
+                                   [ 1.09663316e+03,  4.44198002e+00, -1.10107514e+03]]])
+        np.testing.assert_almost_equal(cost, cost_expected, 4)
+        expected_label_score = np.array([[[-0.875,      -2.,          4.,        ],
+                                       [-8.,          1.,          4.125     ],
+                                       [ 2.,          8.00000001, -4.        ],
+                                       [ 2.,         -0.99999999,  4.        ]],
+
+                                      [[ 2.00000002, -2.,          1.        ],
+                                       [ 4.,         -0.99999999,  2.        ],
+                                       [ 1.00000002,  2.,          4.        ],
+                                       [-1.99999998,  8.,         -1.        ]],
+
+                                      [[ 8.,          2.,         -2.98220046],
+                                       [ 2.,         4.,         -0.98220046],
+                                       [ 4.49110023,  1.,         -2.        ],
+                                       [ 8.,          2.49110023,  1.        ]]])
+        np.testing.assert_almost_equal(label_score, expected_label_score,6)
+    #
+    #     label_score = np.array(
+    #         [[[-1, -2, 4], [-8, 1, 4], [2, 8, -4], [2, -1, 4]],
+    #          [[2, -2, 1], [4, -1, 2], [1, 2, 4], [-2, 8, -1]],
+    #          [[8, 2, -4], [2, 4, -2], [4, 1, -2], [8, 2, 1]]],
+    #         dtype=np.float64)
+    #     expected_label_score = np.array(
+    #         [[[-0.75, -2, 4], [-8, 1, 4.25], [2, 8.25, -4], [2, -0.75, 4]],
+    #          [[2.5, -2, 1], [4, -1, 2], [1, 2, 4], [-1.5, 8, -1]],
+    #          [[8, 2, -4], [2, 4, 0.], [4, 1, -2], [8, 4., 1]]],
+    #         dtype=np.float64)
+    #
+    #     clf = MuCumboClassifier()
+    #     cost, label_score = clf._compute_cost(label_score, pred_classes, y, alphas,
+    #                                           use_coop_coef=True)
+    #
+    #     np.testing.assert_almost_equal(label_score, expected_label_score,
+    #                                    decimal)
+    #
+    #     label_score = np.array(
+    #         [[[0, 0, np.log(4)], [np.log(8), 0, 0], [0, 0, 0], [0, 0, 0]],
+    #          [[0, np.log(2), 0], [0, 0, 0], [0, 0, 0], [0, np.log(4), 0]],
+    #          [[0, 0, 0], [np.log(8), 0, 0], [0, np.log(2), 0], [0, 0, 0]]],
+    #         dtype=np.float64)
+    #     alphas = np.array([np.log(2), np.log(4), np.log(8)])
+    #     expected_label_score = np.array(
+    #         [[[np.log(2), 0, np.log(4)],
+    #           [np.log(8), 0, np.log(2)],
+    #           [0, np.log(2), 0],
+    #           [0, np.log(2), 0]],
+    #          [[np.log(4), np.log(2), 0],
+    #           [0, np.log(4), 0],
+    #           [np.log(4), 0, 0],
+    #           [np.log(4), np.log(4), 0]],
+    #          [[0, 0, np.log(8)],
+    #           [np.log(8), 0, np.log(8)],
+    #           [np.log(8), np.log(2), 0],
+    #           [0, np.log(8), 0]]],
+    #         dtype=np.float64)
+    #     expected_cost = np.array(
+    #         [[[-2.5, 0.5, 2.], [4., 0.5, -4.5], [0.5, -1., 0.5], [1., 2., -3.]],
+    #          [[-0.75, 0.5, 0.25], [1., 4., -5.], [4., -5., 1.], [4., 4., -8.]],
+    #          [[-9., 1., 8.], [1., 0.125, -1.125], [4., -4.5, 0.5], [1., 8., -9.]]],
+    #         dtype=np.float64)
+    #
+    #     clf = MuCumboClassifier()
+    #     cost, label_score = clf._compute_cost(label_score, pred_classes, y, alphas,
+    #                                           use_coop_coef=False)
+    #
+    #     np.testing.assert_almost_equal(label_score, expected_label_score,
+    #                                    decimal)
+    #     np.testing.assert_almost_equal(cost, expected_cost, decimal)
+    #
+    #     label_score = np.array(
+    #         [[[0, 0, np.log(4)], [np.log(8), 0, 0], [0, 0, 0], [0, 0, 0]],
+    #          [[0, np.log(2), 0], [0, 0, 0], [0, 0, 0], [0, np.log(4), 0]],
+    #          [[0, 0, 0], [np.log(8), 0, 0], [0, np.log(2), 0], [0, 0, 0]]],
+    #         dtype=np.float64)
+    #     alphas = np.array([np.log(2), np.log(4), np.log(8)])
+    #     expected_label_score = np.array(
+    #         [[[np.log(2), 0, np.log(4)],
+    #           [np.log(8), 0, np.log(2)],
+    #           [0, np.log(2), 0],
+    #           [0, np.log(2), 0]],
+    #          [[np.log(4), np.log(2), 0],
+    #           [0, 0, 0],
+    #           [0, 0, 0],
+    #           [np.log(4), np.log(4), 0]],
+    #          [[0, 0, 0],
+    #           [np.log(8), 0, np.log(8)],
+    #           [0, np.log(2), 0],
+    #           [0, np.log(8), 0]]],
+    #         dtype=np.float64)
+    #     expected_cost = np.array(
+    #         [[[-2.5, 0.5, 2.], [4., 0.5, -4.5], [0.5, -1., 0.5], [1., 2., -3.]],
+    #          [[-0.75, 0.5, 0.25], [1., 1., -2.], [1., -2., 1.], [4., 4., -8.]],
+    #          [[-2., 1., 1.], [1., 0.125, -1.125], [0.5, -1., 0.5], [1., 8., -9.]]],
+    #         dtype=np.float64)
+    #
+    #     clf = MuCumboClassifier()
+    #     cost, label_score = clf._compute_cost(label_score, pred_classes, y, alphas,
+    #                                           use_coop_coef=True)
+    #
+    #     np.testing.assert_almost_equal(label_score, expected_label_score,
+    #                                    decimal)
+    #     np.testing.assert_almost_equal(cost, expected_cost, decimal)
+    #
+    #
+    # def test_algo_options():
+    #     np.random.seed(seed)
+    #
+    #     n_estimators = 10
+    #
+    #     clf = MuCumboClassifier(n_estimators=n_estimators, best_view_mode='edge')
+    #     clf.fit(iris.data, iris.target, iris.views_ind)
+    #     score = clf.score(iris.data, iris.target)
+    #     assert_greater(score, 0.95, "Failed with score = {}".format(score))
+    #
+    #     clf = MuCumboClassifier(n_estimators=n_estimators, best_view_mode='error')
+    #     clf.fit(iris.data, iris.target, iris.views_ind)
+    #     score = clf.score(iris.data, iris.target)
+    #     assert_greater(score, 0.95, "Failed with score = {}".format(score))
+    #
+    #     assert_raises(ValueError, MuCumboClassifier(), best_view_mode='test')
+    #
+    #     clf = MuCumboClassifier()
+    #     clf.best_view_mode = 'test'
+    #     assert_raises(ValueError, clf.fit, iris.data, iris.target, iris.views_ind)
+    #
+    #
+    def test_fit_views_ind(self):
+       X = np.array([[1., 1., 1.], [-1., -1., -1.]])
+       y = np.array([0, 1])
+       expected_views_ind = np.array([0, 1, 3])
+       clf = MuCumboClassifier()
+       clf.fit(X, y)
+       np.testing.assert_equal(clf.X_.views_ind, expected_views_ind)
+
+    #     assert_array_equal(clf.views_ind_, expected_views_ind)
+    # #
+    def test_class_variation(self):
+    #     # Check that classes labels can be integers or strings and can be stored
+    #     # into any kind of sequence
+        X = np.array([[1., 1., 1.], [-1., -1., -1.]])
+        views_ind = np.array([0, 1, 3])
+        y = np.array([3, 1])
+        clf = MuCumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_almost_equal(clf.predict(X), y)
+
+        y = np.array(["class_1", "class_2"])
+        clf = MuCumboClassifier()
+        clf.fit(X, y)
+        np.testing.assert_equal(clf.predict(X), y)
+    #     assert_array_equal(clf.predict(X), y)
+    #
+    #     y = [1, 0]
+    #     clf = MuCumboClassifier()
+    #     clf.fit(X, y, views_ind)
+    #     assert_array_equal(clf.predict(X), y)
+    #
+    #     y = (2, 1)
+    #     clf = MuCumboClassifier()
+    #     clf.fit(X, y, views_ind)
+    #     assert_array_equal(clf.predict(X), y)
+    #
+    #     # Check that misformed or inconsistent inputs raise expections
+        X = np.zeros((5, 4, 2))
+        y = np.array([0, 1])
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     X = ["str1", "str2"]
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     X = np.array([[1., 1., 1.], [-1., -1., -1.]])
+    #     y = np.array([1])
+    #     views_ind = np.array([0, 1, 3])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     y = np.array([1, 0, 0, 1])
+    #     views_ind = np.array([0, 1, 3])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     y = np.array([3.2, 1.1])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     y = np.array([0, 1])
+    #     views_ind = np.array([0, 3, 1])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     views_ind = np.array([-1, 1, 3])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     views_ind = np.array([0, 1, 4])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     views_ind = np.array([0.5, 1, 3])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     views_ind = np.array("test")
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     views_ind = np.zeros((3, 2, 4))
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     views_ind = np.array([[-1], [1, 2]])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     views_ind = np.array([[3], [1, 2]])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     views_ind = np.array([[0.5], [1, 2]])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     views_ind = np.array([[-1, 0], [1, 2]])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     views_ind = np.array([[0, 3], [1, 2]])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #     views_ind = np.array([[0.5], [1], [2]])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y, views_ind)
+    #
+    #
+    def test_decision_function(self):
+        clf = MuCumboClassifier()
+        clf.fit(self.iris.data, self.iris.target, self.iris.views_ind)
+        X = np.zeros((4, 3))
+        with self.assertRaises(ValueError):
+            clf.decision_function(X)
+        X = self.iris.data[:, 0:15]
+        dec = clf.decision_function(X)
+        dec_expected = np.load(get_dataset_path("dec_iris.npy"))
+        np.testing.assert_almost_equal(dec, dec_expected, 9)
+
+    def test_predict(self):
+        clf = MuCumboClassifier()
+        X = np.array([[0., 0.5, 0.7], [1., 1.5, 1.7], [2., 2.5, 2.7]])
+        y = np.array([1, 1, 1])
+        clf.fit(X, y)
+        y_expected = clf.predict(X)
+        np.testing.assert_almost_equal(y, y_expected, 9)
+
+
+    # def test_limit_cases():
+    #     np.random.seed(seed)
+    #
+    #     # Check that using empty data raises an exception
+    #     X = np.array([[]])
+    #     y = np.array([])
+    #     clf = MuCumboClassifier()
+    #     assert_raises(ValueError, clf.fit, X, y)
+    #
+    #     # Check that fit() works for the smallest possible dataset
+    #     X = np.array([[0.]])
+    #     y = np.array([0])
+    #     clf = MuCumboClassifier()
+    #     clf.fit(X, y)
+    #     assert_array_equal(clf.predict(X), y)
+    #     assert_array_equal(clf.predict(np.array([[1.]])), np.array([0]))
+    #
+    #     # Check that fit() works with samples from a single class
+    #     X = np.array([[0., 0.5, 0.7], [1., 1.5, 1.7], [2., 2.5, 2.7]])
+    #     y = np.array([1, 1, 1])
+    #     views_ind = np.array([0, 1, 3])
+    #     clf = MuCumboClassifier()
+    #     clf.fit(X, y, views_ind)
+    #     assert_array_equal(clf.predict(X), y)
+    #     assert_array_equal(clf.predict(np.array([[-1., 0., 1.]])), np.array([1]))
+    #
+    #     X = np.array([[0., 0.5, 0.7], [1., 1.5, 1.7], [2., 2.5, 2.7]])
+    #     y = np.array([1, 1, 1])
+    #     views_ind = np.array([[0, 2], [1]])
+    #     clf = MuCumboClassifier()
+    #     clf.fit(X, y, views_ind)
+    #     assert_array_equal(clf.predict(X), y)
+    #     assert_array_equal(clf.predict(np.array([[-1., 0., 1.]])), np.array([1]))
+
+
+    def test_simple_predict(self):
+        #np.random.seed(seed)
+
+        # Simple example with 2 classes and 1 view
+        X = np.array(
+            [[1.1, 2.1],
+             [2.1, 0.2],
+             [0.7, 1.2],
+             [-0.9, -1.8],
+             [-1.1, -2.2],
+             [-0.3, -1.3]])
+        y = np.array([0, 0, 0, 1, 1, 1])
+        views_ind = np.array([0, 2])
+        clf = MuCumboClassifier()
+        clf.fit(X, y, views_ind)
+        #assert_array_equal(clf.predict(X), y)
+        #assert_array_equal(clf.predict(np.array([[1., 1.], [-1., -1.]])),
+        #                   np.array([0, 1]))
+        #assert_equal(clf.decision_function(X).shape, y.shape)
+
+        views_ind = np.array([[1, 0]])
+        clf = MuCumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_almost_equal(clf.predict(X), y)
+        #assert_array_equal(clf.predict(X), y)
+        #assert_array_equal(clf.predict(np.array([[1., 1.], [-1., -1.]])),
+        #                 np.array([0, 1]))
+        #assert_equal(clf.decision_function(X).shape, y.shape)
+    #
+    #     # Simple example with 2 classes and 2 views
+    #     X = np.array(
+    #         [[1.1, 2.1, 0.5],
+    #          [2.1, 0.2, 1.2],
+    #          [0.7, 1.2, 2.1],
+    #          [-0.9, -1.8, -0.3],
+    #          [-1.1, -2.2, -0.9],
+    #          [-0.3, -1.3, -1.4]])
+    #     y = np.array([0, 0, 0, 1, 1, 1])
+    #     views_ind = np.array([0, 2, 3])
+    #     clf = MuCumboClassifier()
+    #     clf.fit(X, y, views_ind)
+    #     assert_array_equal(clf.predict(X), y)
+    #     assert_array_equal(clf.predict(np.array([[1., 1., 1.], [-1., -1., -1.]])),
+    #                        np.array([0, 1]))
+    #     assert_equal(clf.decision_function(X).shape, y.shape)
+    #
+    #     views_ind = np.array([[2, 0], [1]])
+    #     clf = MuCumboClassifier()
+    #     clf.fit(X, y, views_ind)
+    #     assert_array_equal(clf.predict(X), y)
+    #     assert_array_equal(clf.predict(np.array([[1., 1., 1.], [-1., -1., -1.]])),
+    #                        np.array([0, 1]))
+    #     assert_equal(clf.decision_function(X).shape, y.shape)
+    #
+    #     # Simple example with 2 classes and 3 views
+    #     X = np.array(
+    #         [[1.1, 2.1, 0.5, 1.2, 1.7],
+    #          [2.1, 0.2, 1.2, 0.6, 1.3],
+    #          [0.7, 1.2, 2.1, 1.1, 0.9],
+    #          [-0.9, -1.8, -0.3, -2.1, -1.1],
+    #          [-1.1, -2.2, -0.9, -1.5, -1.2],
+    #          [-0.3, -1.3, -1.4, -0.6, -0.7]])
+    #     y = np.array([0, 0, 0, 1, 1, 1])
+    #     views_ind = np.array([0, 2, 3, 5])
+    #     clf = MuCumboClassifier()
+    #     clf.fit(X, y, views_ind)
+    #     assert_array_equal(clf.predict(X), y)
+    #     data = np.array([[1., 1., 1., 1., 1.], [-1., -1., -1., -1., -1.]])
+    #     assert_array_equal(clf.predict(data), np.array([0, 1]))
+    #     assert_equal(clf.decision_function(X).shape, y.shape)
+    #
+    #     views_ind = np.array([[2, 0], [1], [3, 4]])
+    #     clf = MuCumboClassifier()
+    #     clf.fit(X, y, views_ind)
+    #     assert_array_equal(clf.predict(X), y)
+    #     data = np.array([[1., 1., 1., 1., 1.], [-1., -1., -1., -1., -1.]])
+    #     assert_array_equal(clf.predict(data), np.array([0, 1]))
+    #     assert_equal(clf.decision_function(X).shape, y.shape)
+    #
+    #     # Simple example with 3 classes and 3 views
+    #     X = np.array(
+    #         [[1.1, -1.2, 0.5, 1.2, -1.7],
+    #          [2.1, -0.2, 0.9, 0.6, -1.3],
+    #          [0.7, 1.2, 2.1, 1.1, 0.9],
+    #          [0.9, 1.8, 2.2, 2.1, 1.1],
+    #          [-1.1, -2.2, -0.9, -1.5, -1.2],
+    #          [-0.3, -1.3, -1.4, -0.6, -0.7]])
+    #     y = np.array([0, 0, 1, 1, 2, 2])
+    #     views_ind = np.array([0, 2, 3, 5])
+    #     clf = MuCumboClassifier()
+    #     clf.fit(X, y, views_ind)
+    #     assert_array_equal(clf.predict(X), y)
+    #     data = np.array(
+    #         [[1., -1., 1., 1., -1.],
+    #          [1., 1., 1., 1., 1.],
+    #          [-1., -1., -1., -1., -1.]])
+    #     assert_array_equal(clf.predict(data), np.array([0, 1, 2]))
+    #     assert_equal(clf.decision_function(X).shape, (X.shape[0], 3))
+    #
+    #     views_ind = np.array([[1, 0], [2], [3, 4]])
+    #     clf = MuCumboClassifier()
+    #     clf.fit(X, y, views_ind)
+    #     assert_array_equal(clf.predict(X), y)
+    #     data = np.array(
+    #         [[1., -1., 1., 1., -1.],
+    #          [1., 1., 1., 1., 1.],
+    #          [-1., -1., -1., -1., -1.]])
+    #     assert_array_equal(clf.predict(data), np.array([0, 1, 2]))
+    #     assert_equal(clf.decision_function(X).shape, (X.shape[0], 3))
+    #
+    #
+    # def test_generated_examples():
+    #     def generate_data_in_orthotope(n_samples, limits):
+    #         limits = np.array(limits)
+    #         n_features = limits.shape[0]
+    #         data = np.random.random((n_samples, n_features))
+    #         data = (limits[:, 1]-limits[:, 0]) * data + limits[:, 0]
+    #         return data
+    #
+    #     n_samples = 100
+    #
+    #     np.random.seed(seed)
+    #     view_0 = np.concatenate(
+    #         (generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]])))
+    #     view_1 = generate_data_in_orthotope(2*n_samples, [[0., 1.], [0., 1.]])
+    #     X = np.concatenate((view_0, view_1), axis=1)
+    #     y = np.zeros(2*n_samples, dtype=np.int64)
+    #     y[n_samples:] = 1
+    #     views_ind = np.array([0, 2, 4])
+    #     clf = MuCumboClassifier(n_estimators=1)
+    #     clf.fit(X, y, views_ind)
+    #     assert_equal(clf.score(X, y), 1.)
+    #
+    #     np.random.seed(seed)
+    #     view_0 = np.concatenate(
+    #         (generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 1.], [1., 2.]])))
+    #     view_1 = np.concatenate(
+    #         (generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 1.], [1., 2.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]])))
+    #     X = np.concatenate((view_0, view_1), axis=1)
+    #     y = np.zeros(4*n_samples, dtype=np.int64)
+    #     y[2*n_samples:] = 1
+    #     views_ind = np.array([0, 2, 4])
+    #     clf = MuCumboClassifier(n_estimators=3)
+    #     clf.fit(X, y, views_ind)
+    #     assert_equal(clf.score(X, y), 1.)
+    #
+    #     np.random.seed(seed)
+    #     view_0 = np.concatenate(
+    #         (generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]])))
+    #     view_1 = np.concatenate(
+    #         (generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]])))
+    #     view_2 = np.concatenate(
+    #         (generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]])))
+    #     X = np.concatenate((view_0, view_1, view_2), axis=1)
+    #     y = np.zeros(3*n_samples, dtype=np.int64)
+    #     y[n_samples:2*n_samples] = 1
+    #     y[2*n_samples:] = 2
+    #     views_ind = np.array([0, 2, 4, 6])
+    #     clf = MuCumboClassifier(n_estimators=3)
+    #     clf.fit(X, y, views_ind)
+    #     assert_equal(clf.score(X, y), 1.)
+    #
+    #     np.random.seed(seed)
+    #     view_0 = np.concatenate(
+    #         (generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 2.], [0., 1.]])))
+    #     view_1 = np.concatenate(
+    #         (generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 2.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]])))
+    #     view_2 = np.concatenate(
+    #         (generate_data_in_orthotope(n_samples, [[0., 2.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+    #          generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]])))
+    #     X = np.concatenate((view_0, view_1, view_2), axis=1)
+    #     y = np.zeros(3*n_samples, dtype=np.int64)
+    #     y[n_samples:2*n_samples] = 1
+    #     y[2*n_samples:] = 2
+    #     views_ind = np.array([0, 2, 4, 6])
+    #     clf = MuCumboClassifier(n_estimators=4)
+    #     clf.fit(X, y, views_ind)
+    #     assert_equal(clf.score(X, y), 1.)
+    #
+    #
+    # def test_classifier():
+    #     return check_estimator(MuCumboClassifier)
+    #
+    #
+    # def test_iris():
+    #     # Check consistency on dataset iris.
+    #
+    #     np.random.seed(seed)
+    #     n_estimators = 5
+    #     classes = np.unique(iris.target)
+    #
+    #     for views_ind in [iris.views_ind, np.array([[0, 2], [1, 3]])]:
+    #         clf = MuCumboClassifier(n_estimators=n_estimators)
+    #
+    #         clf.fit(iris.data, iris.target, views_ind)
+    #
+    #         assert_true(np.all((0. <= clf.estimator_errors_)
+    #                            & (clf.estimator_errors_ <= 1.)))
+    #         assert_true(np.all(np.diff(clf.estimator_errors_) < 0.))
+    #
+    #         assert_array_equal(classes, clf.classes_)
+    #         assert_equal(clf.decision_function(iris.data).shape[1], len(classes))
+    #
+    #         score = clf.score(iris.data, iris.target)
+    #         assert_greater(score, 0.95, "Failed with score = {}".format(score))
+    #
+    #         assert_equal(len(clf.estimators_), n_estimators)
+    #
+    #         # Check for distinct random states
+    #         assert_equal(len(set(est.random_state for est in clf.estimators_)),
+    #                      len(clf.estimators_))
+
+
+    def test_staged_methods(self):
+        seed = 7
+        n_estimators = 10
+    #
+        target_two_classes = np.zeros(self.iris.target.shape, dtype=np.int64)
+        target_two_classes[target_two_classes.shape[0]//2:] = 1
+    #
+        data = (
+               (self.iris.data, self.iris.target, self.iris.views_ind),
+                (self.iris.data, self.iris.target, np.array([[0, 2], [1, 3]])),
+                (self.iris.data, target_two_classes, self.iris.views_ind),
+                (self.iris.data, target_two_classes, np.array([[0, 2], [1, 3]])),
+               )
+    #
+        for X, y, views_ind in data:
+            clf = MuCumboClassifier(n_estimators=n_estimators, random_state=seed)
+            clf.fit(X, y, views_ind)
+
+            staged_dec_func = [dec_f for dec_f in clf.staged_decision_function(X)]
+            staged_predict = [predict for predict in clf.staged_predict(X)]
+            staged_score = [score for score in clf.staged_score(X, y)]
+            self.assertEqual(len(staged_dec_func), n_estimators)
+            self.assertEqual(len(staged_predict), n_estimators)
+            self.assertEqual(len(staged_score), n_estimators)
+            # assert_equal(len(staged_dec_func), n_estimators)
+            # assert_equal(len(staged_predict), n_estimators)
+            # assert_equal(len(staged_score), n_estimators)
+    #
+    #         for ind in range(n_estimators):
+    #             clf = MuCumboClassifier(n_estimators=ind+1, random_state=seed)
+    #             clf.fit(X, y, views_ind)
+    #             dec_func = clf.decision_function(X)
+    #             predict = clf.predict(X)
+    #             score = clf.score(X, y)
+    #             assert_array_equal(dec_func, staged_dec_func[ind])
+    #             assert_array_equal(predict, staged_predict[ind])
+    #             assert_equal(score, staged_score[ind])
+    #
+    #
+    def test_gridsearch(self):
+    #     np.random.seed(seed)
+    #
+    #     # Check that base trees can be grid-searched.
+        mumbo = MuCumboClassifier(base_estimator=DecisionTreeClassifier())
+        parameters = {'n_estimators': (1, 2),
+                      'base_estimator__max_depth': (1, 2)}
+        clf = GridSearchCV(mumbo, parameters)
+        clf.fit(self.iris.data, self.iris.target, views_ind=self.iris.views_ind)
+    #
+    #
+    # def test_pickle():
+    #     np.random.seed(seed)
+    #
+    #     # Check pickability.
+    #
+    #     clf = MuCumboClassifier()
+    #     clf.fit(iris.data, iris.target, iris.views_ind)
+    #     score = clf.score(iris.data, iris.target)
+    #     dump = pickle.dumps(clf)
+    #
+    #     clf_loaded = pickle.loads(dump)
+    #     assert_equal(type(clf_loaded), clf.__class__)
+    #     score_loaded = clf_loaded.score(iris.data, iris.target)
+    #     assert_equal(score, score_loaded)
+    #
+    #
+    def test_base_estimator_score(self):
+    #     np.random.seed(seed)
+    #
+        """ Test different base estimators."""
+        n_estimators = 5
+        clf = MuCumboClassifier(RandomForestClassifier(), n_estimators=n_estimators)
+        clf.fit(self.iris.data, self.iris.target, self.iris.views_ind)
+        score = clf.score(self.iris.data, self.iris.target)
+        self.assertGreater(score, 0.95, "Failed with score = {}".format(score))
+
+        clf = MuCumboClassifier(SVC(), n_estimators=n_estimators)
+        clf.fit(self.iris.data, self.iris.target, self.iris.views_ind)
+        score = clf.score(self.iris.data, self.iris.target)
+        self.assertGreater(score, 0.95, "Failed with score = {}".format(score))
+
+    #     # Check that using a base estimator that doesn't support sample_weight
+    #     # raises an error.
+        clf = MuCumboClassifier(KMeans())
+        self.assertRaises(ValueError, clf.fit, self.iris.data, self.iris.target, self.iris.views_ind)
+    #     assert_raises(ValueError, clf.fit, iris.data, iris.target, iris.views_ind)
+    #
+    #
+    # def test_sparse_classification():
+    #     # Check classification with sparse input.
+    #
+    #     np.random.seed(seed)
+    #
+    #     class CustomSVC(SVC):
+    #         """SVC variant that records the nature of the training set."""
+    #
+    #         def fit(self, X, y, sample_weight=None):
+    #             """Modification on fit caries data type for later verification."""
+    #             super(CustomSVC, self).fit(X, y, sample_weight=sample_weight)
+    #             self.data_type_ = type(X)
+    #             return self
+    #
+    #     n_estimators = 5
+    #     X_dense = iris.data
+    #     y = iris.target
+    #
+    #     for sparse_format in [csc_matrix, csr_matrix, lil_matrix, coo_matrix,
+    #                           dok_matrix]:
+    #         for views_ind in (iris.views_ind, np.array([[0, 2], [1, 3]])):
+    #             X_sparse = sparse_format(X_dense)
+    #
+    #             clf_sparse = MuCumboClassifier(
+    #                 base_estimator=CustomSVC(),
+    #                 random_state=seed,
+    #                 n_estimators=n_estimators)
+    #             clf_sparse.fit(X_sparse, y, views_ind)
+    #
+    #             clf_dense = MuCumboClassifier(
+    #                 base_estimator=CustomSVC(),
+    #                 random_state=seed,
+    #                 n_estimators=n_estimators)
+    #             clf_dense.fit(X_dense, y, views_ind)
+    #
+    #             assert_array_equal(clf_sparse.decision_function(X_sparse),
+    #                                clf_dense.decision_function(X_dense))
+    #
+    #             assert_array_equal(clf_sparse.predict(X_sparse),
+    #                                clf_dense.predict(X_dense))
+    #
+    #             assert_equal(clf_sparse.score(X_sparse, y),
+    #                          clf_dense.score(X_dense, y))
+    #
+    #             for res_sparse, res_dense in \
+    #                     zip(clf_sparse.staged_decision_function(X_sparse),
+    #                         clf_dense.staged_decision_function(X_dense)):
+    #                 assert_array_equal(res_sparse, res_dense)
+    #
+    #             for res_sparse, res_dense in \
+    #                     zip(clf_sparse.staged_predict(X_sparse),
+    #                         clf_dense.staged_predict(X_dense)):
+    #                 assert_array_equal(res_sparse, res_dense)
+    #
+    #             for res_sparse, res_dense in \
+    #                     zip(clf_sparse.staged_score(X_sparse, y),
+    #                         clf_dense.staged_score(X_dense, y)):
+    #                 assert_equal(res_sparse, res_dense)
+    #
+    #             # Check that sparsity of data is maintained during training
+    #             types = [clf.data_type_ for clf in clf_sparse.estimators_]
+    #             if sparse_format == csc_matrix:
+    #                 assert_true(all([type_ == csc_matrix for type_ in types]))
+    #             else:
+    #                 assert_true(all([type_ == csr_matrix for type_ in types]))
+    #
+    # def test():
+    #     cumbo = MuCumboClassifier()
+
+if __name__ == '__main__':
+    unittest.main()
+    # suite = unittest.TestLoader().loadTestsFromTestCase
+    # (TestMuCumboClassifier().test_class_variation())
+    # unittest.TextTestRunner(verbosity=2).run(suite)
\ No newline at end of file
diff --git a/multimodal/tests/test_data_sample.py b/multimodal/tests/test_data_sample.py
new file mode 100644
index 0000000000000000000000000000000000000000..43e35400ba6c96affc34159b6d14109ff892ef11
--- /dev/null
+++ b/multimodal/tests/test_data_sample.py
@@ -0,0 +1,50 @@
+import unittest
+import numpy as np
+
+from metriclearning.datasets.base import load_dict
+from metriclearning.tests.datasets.get_dataset_path import get_dataset_path
+from metriclearning.datasets.data_sample import Metriclearn_array
+import pickle
+
+class UnitaryTest(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(clf):
+        input_x = get_dataset_path("input_x_dic.pkl")
+        f = open(input_x, "rb")
+        kernel_dict = pickle.load(f)
+        f.close()
+        test_input_x = get_dataset_path("test_kernel_input_x.pkl")
+        f = open(test_input_x, "rb")
+        test_kernel_dict = pickle.load(f)
+        f.close()
+        test_input_y = get_dataset_path("test_input_y.npy")
+        input_y = get_dataset_path("input_y.npy")
+        y = np.load(input_y)
+        test_y = np.load(test_input_y)
+        clf.y = y
+        clf.kernel_dict = kernel_dict
+        clf.test_kernel_dict = test_kernel_dict
+        clf.test_y = test_y
+
+
+    def testGet_view(self):
+        a = Metriclearn_array(self.kernel_dict)
+        np.testing.assert_almost_equal(a.get_view(0), self.kernel_dict[0], 8)
+        np.testing.assert_almost_equal(a.get_view(1), self.kernel_dict[1], 8)
+
+    def test_init_Metriclearn_array(self):
+        a = Metriclearn_array(self.kernel_dict)
+        self.assertEqual(a.shape, (120, 240))
+        self.assertEqual(a.shapes_int, [120, 120])
+        self.assertEqual(a.n_views, 2)
+        dict_key = {0: 'a',1: 'b' }
+        self.assertEqual(a.keys, dict_key.keys())
+
+    def test_init_Array(self):
+        a = Metriclearn_array(self.kernel_dict)
+        array_x = a.data
+        b = Metriclearn_array(a)
+        np.testing.assert_equal(b.views_ind, np.array([0, 120, 240]))
+
+
diff --git a/multimodal/tests/test_mkl.py b/multimodal/tests/test_mkl.py
new file mode 100644
index 0000000000000000000000000000000000000000..c55df63cdc1d9b4bbdc0aaf078cc869936a7aa89
--- /dev/null
+++ b/multimodal/tests/test_mkl.py
@@ -0,0 +1,106 @@
+# -*- coding: utf-8 -*-
+
+import unittest
+import numpy as np
+from sklearn.metrics.pairwise import rbf_kernel
+from metriclearning.tests.datasets.get_dataset_path import get_dataset_path
+from metriclearning.lpMKL import MKL
+from metriclearning.datasets.data_sample import Metriclearn_array
+import pickle
+from sklearn.exceptions import NotFittedError
+
+
+class MKLTest(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(clf):
+        input_x = get_dataset_path("input_x_dic.pkl")
+        f = open(input_x, "rb")
+        kernel_dict = pickle.load(f)
+        f.close()
+        test_input_x = get_dataset_path("test_kernel_input_x.pkl")
+        f = open(test_input_x, "rb")
+        test_kernel_dict = pickle.load(f)
+        f.close()
+        test_input_y = get_dataset_path("test_input_y.npy")
+        input_y = get_dataset_path("input_y.npy")
+        y = np.load(input_y)
+        test_y = np.load(test_input_y)
+        clf.y = y
+        clf.kernel_dict = kernel_dict
+        clf.test_kernel_dict = test_kernel_dict
+        clf.test_y = test_y
+
+    def testInitMKL(self):
+        mkl = MKL(lmbda=3, m_param = 1.0, kernel = "precomputed",
+                   kernel_params = None, use_approx = True,
+                   precision = 1E-4, n_loops = 50)
+        self.assertEqual(mkl.m_param, 1.0)
+        self.assertEqual(mkl.lmbda, 3)
+        self.assertEqual(mkl.n_loops, 50)
+        self.assertEqual(mkl.precision, 1E-4)
+
+    def testFitMKLDict(self):
+        #######################################################
+        # task with dict and not precomputed
+        #######################################################
+        mkl = MKL(lmbda=3, m_param = 1.0, kernel=['rbf'], kernel_params=[{'gamma':50}],
+                   use_approx = True,
+                   precision = 1E-4, n_loops = 50)
+        mkl.fit(self.kernel_dict, y=self.y, views_ind=None)
+        self.assertEqual(mkl.C.shape, (120,))
+        np.testing.assert_almost_equal(mkl.weights, np.array([0.7428451 , 0.66946333]), 8)
+
+
+    def testFitMKLDictNLoop(self):
+        #######################################################
+        # task with dict and not precomputed
+        #######################################################
+        mkl = MKL(lmbda=3, m_param = 0.3, kernel=['rbf'], kernel_params=[{'gamma':50}],
+                   use_approx = True,
+                   precision = 1E-4, n_loops = 50)
+        views_ind = [120, 240]
+        mkl.fit(self.kernel_dict, y=self.y, views_ind=None)
+        self.assertEqual(mkl.C.shape, (120,))
+        np.testing.assert_almost_equal(mkl.weights, np.array([0.7428451 , 0.66946333]), 8)
+
+    def testFitMKLMetricPrecision(self):
+        #######################################################
+        # task with Metric array
+        #######################################################
+        # mvml = MVML.fit(self.kernel_dict, self.y)
+        w_expected = np.array([[0.5], [0.5]])
+        x_metricl = Metriclearn_array(self.kernel_dict)
+        mkl2 = MKL(lmbda=3, m_param = 0.3, kernel=['rbf'], kernel_params=[{'gamma':50}],
+                   use_approx = True,
+                   precision = 1E0, n_loops = 50)
+        with self.assertRaises(ValueError):
+            mkl2.fit(x_metricl, y=self.y, views_ind=None)
+
+    def testFitMKLMetricPrecision2(self):
+        #######################################################
+        # task with Metric array
+        #######################################################
+        # mvml = MVML.fit(self.kernel_dict, self.y)
+        w_expected = np.array([[0.5], [0.5]])
+        x_metricl = Metriclearn_array(self.kernel_dict)
+        mkl2 = MKL(lmbda=3, m_param = 0.3, kernel="precomputed",
+                   use_approx = True,
+                   precision = 1E-9, n_loops = 600)
+        mkl2.fit(x_metricl, y=self.y, views_ind=None)
+
+    def testPredictMVML_witoutFit(self):
+       mkl = MKL(lmbda=3, m_param = 0.3, kernel=['rbf'], kernel_params=[{'gamma':50}],
+                   use_approx = True,
+                   precision = 1E-9, n_loops = 50)
+       with self.assertRaises(NotFittedError):
+            mkl.predict(self.test_kernel_dict)
+
+    def testPredictMVML_witoutFit(self):
+       x_metric = Metriclearn_array(self.kernel_dict)
+       mkl = MKL(lmbda=3, m_param = 0.3, kernel=['rbf'], kernel_params=[{'gamma':50}],
+                   use_approx = True,
+                   precision = 1E-9, n_loops = 50)
+       mkl.fit(x_metric, y=self.y, views_ind=None)
+       pred =mkl.predict(self.test_kernel_dict)
+       self.assertEqual(pred.shape, (80,))
diff --git a/multimodal/tests/test_mumbo.py b/multimodal/tests/test_mumbo.py
new file mode 100644
index 0000000000000000000000000000000000000000..f5b3924b62b6ae6541b4db32467338cacaeb4c92
--- /dev/null
+++ b/multimodal/tests/test_mumbo.py
@@ -0,0 +1,881 @@
+# -*- coding: utf-8 -*-
+"""Testing for the mumbo module."""
+
+# Université d'Aix Marseille (AMU) -
+# Centre National de la Recherche Scientifique (CNRS) -
+# Université de Toulon (UTLN).
+# Copyright © 2017-2018 AMU, CNRS, UTLN
+#
+# This file is part of multimodalboost.
+#
+# multimodalboost is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# multimodalboost is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with multimodalboost. If not, see <http://www.gnu.org/licenses/>.
+#
+# Author: Florent JAILLET - Laboratoire d'Informatique et Systèmes - UMR 7020
+
+import pickle
+import unittest
+import numpy as np
+from scipy.sparse import csc_matrix, csr_matrix, coo_matrix, dok_matrix
+from scipy.sparse import lil_matrix
+from sklearn.model_selection import GridSearchCV
+from sklearn.svm import SVC
+from sklearn.utils.estimator_checks import check_estimator
+from sklearn.ensemble import RandomForestClassifier
+from sklearn.cluster import KMeans
+from sklearn.tree import DecisionTreeClassifier
+from sklearn import datasets
+from multimodalboost.mumbo import MumboClassifier
+
+
+class TestMuCumboClassifier(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(clf):
+        # Load the iris dataset
+        iris = datasets.load_iris()
+        iris.views_ind = np.array([0, 2, 4])
+        clf.iris = iris
+
+    def test_init_var(self):
+        n_classes = 3
+
+        n_views = 3
+        y = np.array([0, 2, 1, 2])
+        expected_cost = np.array(
+            [[[-2, 1, 1], [1, 1, -2], [1, -2, 1], [1, 1, -2]],
+             [[-2, 1, 1], [1, 1, -2], [1, -2, 1], [1, 1, -2]],
+             [[-2, 1, 1], [1, 1, -2], [1, -2, 1], [1, 1, -2]]],
+            dtype=np.float64)
+        expected_cost_glob = np.array(
+            [[-2, 1, 1], [1, 1, -2], [1, -2, 1], [1, 1, -2]], dtype=np.float64)
+        expected_label_score = np.zeros((n_views, y.shape[0], n_classes))
+        expected_label_score_glob = np.zeros((y.shape[0], n_classes))
+        expected_predicted_classes_shape = ((n_views, y.shape[0]))
+
+        clf = MumboClassifier()
+        clf.n_classes_ = n_classes
+        (cost, cost_glob, label_score, label_score_glob,
+         predicted_classes) = clf._init_var(n_views, y)
+        np.testing.assert_equal(cost, expected_cost)
+
+        np.testing.assert_equal(cost_glob, expected_cost_glob)
+        np.testing.assert_equal(label_score, expected_label_score)
+        np.testing.assert_equal(label_score_glob, expected_label_score_glob)
+        self.assertEqual(predicted_classes.shape, expected_predicted_classes_shape)
+
+
+    def test_compute_edge_global(self):
+        cost_global = np.array([[-2, 1, 1], [1, 1, -2], [1, -2, 1], [1, 1, -2]],
+                               dtype=np.float64)
+        predicted_classes = np.array([[0, 0, 1, 1], [0, 1, 0, 2], [2, 2, 0, 0]])
+        y = np.array([0, 2, 1, 2])
+        expected_edge_global = np.array([0.25, 0.25, -0.125])
+
+        clf = MumboClassifier()
+        edge_global = clf._compute_edge_global(cost_global, predicted_classes, y)
+        np.testing.assert_equal(edge_global, expected_edge_global)
+
+
+    def test_compute_dist(self):
+        cost = np.array(
+            [[[-2, 1, 1], [-1, -1, -2], [1, -2, 1], [1, 1, -2]],
+             [[-1, 2, 2], [2, 2, -1], [-2, 4, -2], [2, 2, -4]],
+             [[1, 4, -4], [-1, 3, -1], [-2, 2, 4], [4, 4, -4]]],
+            dtype=np.float64)
+        y = np.array([0, 2, 1, 2])
+        expected_dist = np.array(
+            [[0.25, 0.25, 0.25, 0.25], [0.5, 0.5, -2., 2.], [-0.5, 0.5, -1., 2.]])
+
+        clf = MumboClassifier()
+        dist = clf._compute_dist(cost, y)
+        np.testing.assert_equal(dist, expected_dist)
+
+        # The computation of the distribution only uses the costs when predicting
+        # the right classes, so the following cost matrix should give the same
+        # result as the previous.
+        cost = np.array(
+            [[[-2, 0, 0], [0, 0, -2], [0, -2, 0], [0, 0, -2]],
+             [[-1, 0, 0], [0, 0, -1], [0, 4, 0], [0, 0, -4]],
+             [[1, 0, 0], [0, 0, -1], [0, 2, 0], [0, 0, -4]]],
+            dtype=np.float64)
+
+        dist = clf._compute_dist(cost, y)
+        np.testing.assert_equal(dist, expected_dist)
+
+    def test_compute_coop_coef(self):
+        y = np.array([0, 1, 2, 0])
+        predicted_classes = np.array([[0, 0, 1, 1], [0, 1, 0, 2], [2, 2, 0, 0]])
+        expected_coop_coef = np.array([[1, 0, 1, 0], [1, 1, 1, 0], [0, 0, 1, 1]],
+                                      dtype=np.float64)
+
+        clf = MumboClassifier()
+        coop_coef = clf._compute_coop_coef(predicted_classes, y)
+        np.testing.assert_equal(coop_coef, expected_coop_coef)
+
+    def test_compute_edges(self):
+        cost = np.array(
+            [[[-2, 1, 1], [-1, -1, -2], [1, -2, 1], [1, 1, -2]],
+             [[-2, 2, 2], [2, 2, -4], [-2, -4, -2], [2, 2, -4]],
+             [[1, 4, -4], [-1, 3, -1], [-2, 4, 4], [4, 4, -1]]],
+            dtype=np.float64)
+        predicted_classes = np.array([[0, 2, 1, 1], [0, 1, 0, 2], [2, 2, 0, 1]])
+        y = np.array([0, 2, 1, 2])
+        expected_edges = np.array([1.25, 0.75, 0.25])
+
+        clf = MumboClassifier()
+        edges = clf._compute_edges(cost, predicted_classes, y)
+        np.testing.assert_equal(edges, expected_edges)
+
+    def test_compute_alphas(self):
+        decimal = 12
+        expected_alpha = 0.5
+        edge = (np.e-1.) / (np.e+1.)
+
+        clf = MumboClassifier()
+        alpha = clf._compute_alphas(edge)
+
+        np.testing.assert_almost_equal(alpha, expected_alpha, decimal)
+
+        expected_alphas = np.array([0.5, 1., 2.])
+        tmp = np.array([np.e, np.e**2, np.e**4])
+        edges = (tmp-1.) / (tmp+1.)
+
+        alphas = clf._compute_alphas(edges)
+        np.testing.assert_almost_equal(alphas, expected_alphas, decimal)
+
+    def test_compute_cost_global(self):
+        decimal = 12
+        label_score_glob = np.array(
+            [[-1, -2, 4], [-8, 1, 4], [2, 8, -4], [2, -1, 4]],
+            dtype=np.float64)
+        best_pred_classes = np.array([0, 1, 0, 2])
+        y = np.array([0, 2, 1, 2])
+        alpha = 0.5
+        expected_label_score_glob = np.array(
+            [[-0.5, -2, 4], [-8, 1.5, 4], [2.5, 8, -4], [2, -1, 4.5]],
+            dtype=np.float64)
+
+        clf = MumboClassifier()
+        cost_glob, label_score_glob = clf._compute_cost_global(
+            label_score_glob, best_pred_classes, y, alpha)
+        np.testing.assert_almost_equal(label_score_glob, expected_label_score_glob,
+                                       decimal)
+
+        label_score_glob = np.zeros((4, 3), dtype=np.float64)
+        alpha = 0.
+        expected_label_score_glob = np.zeros((4, 3), dtype=np.float64)
+        expected_cost_glob = np.array(
+            [[-2, 1, 1], [1, 1, -2], [1, -2, 1], [1, 1, -2]],
+            dtype=np.float64)
+
+        cost_glob, label_score_glob = clf._compute_cost_global(
+            label_score_glob, best_pred_classes, y, alpha)
+        np.testing.assert_equal(label_score_glob, expected_label_score_glob)
+        np.testing.assert_equal(cost_glob, expected_cost_glob, decimal)
+        label_score_glob = np.array(
+            [[0, 0, np.log(4)], [np.log(8), 0, 0], [0, 0, 0], [0, 0, 0]],
+            dtype=np.float64)
+        alpha = np.log(2)
+        expected_label_score_glob = np.array(
+            [[alpha, 0, np.log(4)],
+             [np.log(8), alpha, 0],
+             [alpha, 0, 0],
+             [0, 0, alpha]],
+            dtype=np.float64)
+        expected_cost_glob = np.array(
+            [[-2.5, 0.5, 2.], [8., 2., -10.], [2., -3., 1.], [0.5, 0.5, -1.]],
+            dtype=np.float64)
+
+        cost_glob, label_score_glob = clf._compute_cost_global(
+            label_score_glob, best_pred_classes, y, alpha)
+
+        np.testing.assert_almost_equal(label_score_glob, expected_label_score_glob,
+                                       decimal)
+        np.testing.assert_almost_equal(cost_glob, expected_cost_glob, decimal)
+
+    def test_compute_cost(self):
+        decimal = 12
+        label_score = np.array(
+            [[[-1, -2, 4], [-8, 1, 4], [2, 8, -4], [2, -1, 4]],
+             [[2, -2, 1], [4, -1, 2], [1, 2, 4], [-2, 8, -1]],
+             [[8, 2, -4], [2, 4, -2], [4, 1, -2], [8, 2, 1]]],
+            dtype=np.float64)
+        pred_classes = np.array([[0, 2, 1, 1], [0, 1, 0, 0], [2, 2, 0, 1]])
+        y = np.array([0, 2, 1, 2])
+        alphas = np.array([0.25, 0.5, 2.])
+        expected_label_score = np.array(
+            [[[-0.75, -2, 4], [-8, 1, 4.25], [2, 8.25, -4], [2, -0.75, 4]],
+             [[2.5, -2, 1], [4, -0.5, 2], [1.5, 2, 4], [-1.5, 8, -1]],
+             [[8, 2, -2.], [2, 4, 0.], [6., 1, -2], [8, 4., 1]]],
+            dtype=np.float64)
+
+        clf = MumboClassifier()
+        cost, label_score = clf._compute_cost(label_score, pred_classes, y, alphas,
+                                              use_coop_coef=False)
+
+        np.testing.assert_almost_equal(label_score, expected_label_score,
+                                       decimal)
+
+        label_score = np.array(
+            [[[-1, -2, 4], [-8, 1, 4], [2, 8, -4], [2, -1, 4]],
+             [[2, -2, 1], [4, -1, 2], [1, 2, 4], [-2, 8, -1]],
+             [[8, 2, -4], [2, 4, -2], [4, 1, -2], [8, 2, 1]]],
+            dtype=np.float64)
+        expected_label_score = np.array(
+            [[[-0.75, -2, 4], [-8, 1, 4.25], [2, 8.25, -4], [2, -0.75, 4]],
+             [[2.5, -2, 1], [4, -1, 2], [1, 2, 4], [-1.5, 8, -1]],
+             [[8, 2, -4], [2, 4, 0.], [4, 1, -2], [8, 4., 1]]],
+            dtype=np.float64)
+
+        clf = MumboClassifier()
+        cost, label_score = clf._compute_cost(label_score, pred_classes, y, alphas,
+                                              use_coop_coef=True)
+
+        np.testing.assert_almost_equal(label_score, expected_label_score,
+                                       decimal)
+
+        label_score = np.array(
+            [[[0, 0, np.log(4)], [np.log(8), 0, 0], [0, 0, 0], [0, 0, 0]],
+             [[0, np.log(2), 0], [0, 0, 0], [0, 0, 0], [0, np.log(4), 0]],
+             [[0, 0, 0], [np.log(8), 0, 0], [0, np.log(2), 0], [0, 0, 0]]],
+            dtype=np.float64)
+        alphas = np.array([np.log(2), np.log(4), np.log(8)])
+        expected_label_score = np.array(
+            [[[np.log(2), 0, np.log(4)],
+              [np.log(8), 0, np.log(2)],
+              [0, np.log(2), 0],
+              [0, np.log(2), 0]],
+             [[np.log(4), np.log(2), 0],
+              [0, np.log(4), 0],
+              [np.log(4), 0, 0],
+              [np.log(4), np.log(4), 0]],
+             [[0, 0, np.log(8)],
+              [np.log(8), 0, np.log(8)],
+              [np.log(8), np.log(2), 0],
+              [0, np.log(8), 0]]],
+            dtype=np.float64)
+        expected_cost = np.array(
+            [[[-2.5, 0.5, 2.], [4., 0.5, -4.5], [0.5, -1., 0.5], [1., 2., -3.]],
+             [[-0.75, 0.5, 0.25], [1., 4., -5.], [4., -5., 1.], [4., 4., -8.]],
+             [[-9., 1., 8.], [1., 0.125, -1.125], [4., -4.5, 0.5], [1., 8., -9.]]],
+            dtype=np.float64)
+
+        clf = MumboClassifier()
+        cost, label_score = clf._compute_cost(label_score, pred_classes, y, alphas,
+                                              use_coop_coef=False)
+
+        np.testing.assert_almost_equal(label_score, expected_label_score,
+                                       decimal)
+        np.testing.assert_almost_equal(cost, expected_cost, decimal)
+
+        label_score = np.array(
+            [[[0, 0, np.log(4)], [np.log(8), 0, 0], [0, 0, 0], [0, 0, 0]],
+             [[0, np.log(2), 0], [0, 0, 0], [0, 0, 0], [0, np.log(4), 0]],
+             [[0, 0, 0], [np.log(8), 0, 0], [0, np.log(2), 0], [0, 0, 0]]],
+            dtype=np.float64)
+        alphas = np.array([np.log(2), np.log(4), np.log(8)])
+        expected_label_score = np.array(
+            [[[np.log(2), 0, np.log(4)],
+              [np.log(8), 0, np.log(2)],
+              [0, np.log(2), 0],
+              [0, np.log(2), 0]],
+             [[np.log(4), np.log(2), 0],
+              [0, 0, 0],
+              [0, 0, 0],
+              [np.log(4), np.log(4), 0]],
+             [[0, 0, 0],
+              [np.log(8), 0, np.log(8)],
+              [0, np.log(2), 0],
+              [0, np.log(8), 0]]],
+            dtype=np.float64)
+        expected_cost = np.array(
+            [[[-2.5, 0.5, 2.], [4., 0.5, -4.5], [0.5, -1., 0.5], [1., 2., -3.]],
+             [[-0.75, 0.5, 0.25], [1., 1., -2.], [1., -2., 1.], [4., 4., -8.]],
+             [[-2., 1., 1.], [1., 0.125, -1.125], [0.5, -1., 0.5], [1., 8., -9.]]],
+            dtype=np.float64)
+
+        clf = MumboClassifier()
+        cost, label_score = clf._compute_cost(label_score, pred_classes, y, alphas,
+                                              use_coop_coef=True)
+
+        np.testing.assert_almost_equal(label_score, expected_label_score,
+                                       decimal)
+        np.testing.assert_almost_equal(cost, expected_cost, decimal)
+
+    def test_algo_options(self):
+        seed = 7
+        np.random.seed(seed)
+
+        n_estimators = 10
+
+        clf = MumboClassifier(n_estimators=n_estimators, best_view_mode='edge')
+        clf.fit(self.iris.data, self.iris.target, self.iris.views_ind)
+        score = clf.score(self.iris.data, self.iris.target)
+        self.assertGreater(score, 0.95, "Failed with score = {}".format(score))
+
+        clf = MumboClassifier(n_estimators=n_estimators, best_view_mode='error')
+        clf.fit(self.iris.data, self.iris.target, self.iris.views_ind)
+        score = clf.score(self.iris.data, self.iris.target)
+        self.assertGreater(score, 0.95, "Failed with score = {}".format(score))
+
+        self.assertRaises(ValueError, MumboClassifier, best_view_mode='test')
+
+        clf = MumboClassifier()
+        clf.best_view_mode = 'test'
+        self.assertRaises(ValueError, clf.fit, self.iris.data,
+                          self.iris.target, self.iris.views_ind)
+
+    def test_fit_arg(self):
+        seed = 7
+        np.random.seed(seed)
+
+        # Check that using the default value for views_ind corresponds to using 2
+        # views
+        X = np.array([[1., 1., 1.], [-1., -1., -1.]])
+        y = np.array([0, 1])
+        expected_views_ind = np.array([0, 1, 3])
+        clf = MumboClassifier()
+        clf.fit(X, y)
+        np.testing.assert_equal(clf.views_ind_, expected_views_ind)
+
+        # Check that classes labels can be integers or strings and can be stored
+        # into any kind of sequence
+        views_ind = np.array([0, 1, 3])
+        y = np.array([3, 1])
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+
+        y = np.array(["class_1", "class_2"])
+        clf = MumboClassifier()
+        clf.fit(X, y)
+        np.testing.assert_equal(clf.predict(X), y)
+
+        y = [1, 0]
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+
+        y = (2, 1)
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+
+        # Check that misformed or inconsistent inputs raise expections
+        X = np.zeros((5, 4, 2))
+        y = np.array([0, 1])
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        X = ["str1", "str2"]
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        X = np.array([[1., 1., 1.], [-1., -1., -1.]])
+        y = np.array([1])
+        views_ind = np.array([0, 1, 3])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        y = np.array([1, 0, 0, 1])
+        views_ind = np.array([0, 1, 3])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        y = np.array([3.2, 1.1])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        y = np.array([0, 1])
+        views_ind = np.array([0, 3, 1])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        views_ind = np.array([-1, 1, 3])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        views_ind = np.array([0, 1, 4])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        views_ind = np.array([0.5, 1, 3])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        views_ind = np.array("test")
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        views_ind = np.zeros((3, 2, 4))
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        views_ind = np.array([[-1], [1, 2]])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        views_ind = np.array([[3], [1, 2]])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        views_ind = np.array([[0.5], [1, 2]])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        views_ind = np.array([[-1, 0], [1, 2]])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        views_ind = np.array([[0, 3], [1, 2]])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+        views_ind = np.array([[0.5], [1], [2]])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y, views_ind)
+
+    def test_decision_function_arg(self):
+        # Test that decision_function() gives proper exception on deficient input.
+        seed = 7
+        np.random.seed(seed)
+
+        clf = MumboClassifier()
+        clf.fit(self.iris.data, self.iris.target, self.iris.views_ind)
+
+        X = np.zeros((4, 3))
+        self.assertRaises(ValueError, clf.decision_function, X)
+        X = np.zeros((4, 5))
+        self.assertRaises(ValueError, clf.decision_function, X)
+        X = np.zeros((5, 4, 2))
+        self.assertRaises(ValueError, clf.decision_function, X)
+        X = ["str1", "str2"]
+        self.assertRaises(ValueError, clf.decision_function, X)
+
+    def test_limit_cases(self):
+        seed = 7
+        np.random.seed(seed)
+
+        # Check that using empty data raises an exception
+        X = np.array([[]])
+        y = np.array([])
+        clf = MumboClassifier()
+        self.assertRaises(ValueError, clf.fit, X, y)
+
+        # Check that fit() works for the smallest possible dataset
+        X = np.array([[0.]])
+        y = np.array([0])
+        clf = MumboClassifier()
+        clf.fit(X, y)
+        np.testing.assert_equal(clf.predict(X), y)
+        np.testing.assert_equal(clf.predict(np.array([[1.]])), np.array([0]))
+
+        # Check that fit() works with samples from a single class
+        X = np.array([[0., 0.5, 0.7], [1., 1.5, 1.7], [2., 2.5, 2.7]])
+        y = np.array([1, 1, 1])
+        views_ind = np.array([0, 1, 3])
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+        np.testing.assert_equal(clf.predict(np.array([[-1., 0., 1.]])), np.array([1]))
+
+        X = np.array([[0., 0.5, 0.7], [1., 1.5, 1.7], [2., 2.5, 2.7]])
+        y = np.array([1, 1, 1])
+        views_ind = np.array([[0, 2], [1]])
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+        np.testing.assert_equal(clf.predict(np.array([[-1., 0., 1.]])), np.array([1]))
+
+    def test_simple_examples(self):
+        seed =7
+        np.random.seed(seed)
+
+        # Simple example with 2 classes and 1 view
+        X = np.array(
+            [[1.1, 2.1],
+             [2.1, 0.2],
+             [0.7, 1.2],
+             [-0.9, -1.8],
+             [-1.1, -2.2],
+             [-0.3, -1.3]])
+        y = np.array([0, 0, 0, 1, 1, 1])
+        views_ind = np.array([0, 2])
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+        np.testing.assert_equal(clf.predict(np.array([[1., 1.], [-1., -1.]])),
+                           np.array([0, 1]))
+        self.assertEqual(clf.decision_function(X).shape, y.shape)
+
+        views_ind = np.array([[1, 0]])
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+        np.testing.assert_equal(clf.predict(np.array([[1., 1.], [-1., -1.]])),
+                           np.array([0, 1]))
+        self.assertEqual(clf.decision_function(X).shape, y.shape)
+
+        # Simple example with 2 classes and 2 views
+        X = np.array(
+            [[1.1, 2.1, 0.5],
+             [2.1, 0.2, 1.2],
+             [0.7, 1.2, 2.1],
+             [-0.9, -1.8, -0.3],
+             [-1.1, -2.2, -0.9],
+             [-0.3, -1.3, -1.4]])
+        y = np.array([0, 0, 0, 1, 1, 1])
+        views_ind = np.array([0, 2, 3])
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+        np.testing.assert_equal(clf.predict(np.array([[1., 1., 1.], [-1., -1., -1.]])),
+                           np.array([0, 1]))
+        self.assertEqual(clf.decision_function(X).shape, y.shape)
+
+        views_ind = np.array([[2, 0], [1]])
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+        np.testing.assert_equal(clf.predict(np.array([[1., 1., 1.], [-1., -1., -1.]])),
+                           np.array([0, 1]))
+        self.assertEqual(clf.decision_function(X).shape, y.shape)
+
+        # Simple example with 2 classes and 3 views
+        X = np.array(
+            [[1.1, 2.1, 0.5, 1.2, 1.7],
+             [2.1, 0.2, 1.2, 0.6, 1.3],
+             [0.7, 1.2, 2.1, 1.1, 0.9],
+             [-0.9, -1.8, -0.3, -2.1, -1.1],
+             [-1.1, -2.2, -0.9, -1.5, -1.2],
+             [-0.3, -1.3, -1.4, -0.6, -0.7]])
+        y = np.array([0, 0, 0, 1, 1, 1])
+        views_ind = np.array([0, 2, 3, 5])
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+        data = np.array([[1., 1., 1., 1., 1.], [-1., -1., -1., -1., -1.]])
+        np.testing.assert_equal(clf.predict(data), np.array([0, 1]))
+        self.assertEqual(clf.decision_function(X).shape, y.shape)
+
+        views_ind = np.array([[2, 0], [1], [3, 4]])
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+        data = np.array([[1., 1., 1., 1., 1.], [-1., -1., -1., -1., -1.]])
+        np.testing.assert_equal(clf.predict(data), np.array([0, 1]))
+        self.assertEqual(clf.decision_function(X).shape, y.shape)
+
+        # Simple example with 3 classes and 3 views
+        X = np.array(
+            [[1.1, -1.2, 0.5, 1.2, -1.7],
+             [2.1, -0.2, 0.9, 0.6, -1.3],
+             [0.7, 1.2, 2.1, 1.1, 0.9],
+             [0.9, 1.8, 2.2, 2.1, 1.1],
+             [-1.1, -2.2, -0.9, -1.5, -1.2],
+             [-0.3, -1.3, -1.4, -0.6, -0.7]])
+        y = np.array([0, 0, 1, 1, 2, 2])
+        views_ind = np.array([0, 2, 3, 5])
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+        data = np.array(
+            [[1., -1., 1., 1., -1.],
+             [1., 1., 1., 1., 1.],
+             [-1., -1., -1., -1., -1.]])
+        np.testing.assert_equal(clf.predict(data), np.array([0, 1, 2]))
+        self.assertEqual(clf.decision_function(X).shape, (X.shape[0], 3))
+
+        views_ind = np.array([[1, 0], [2], [3, 4]])
+        clf = MumboClassifier()
+        clf.fit(X, y, views_ind)
+        np.testing.assert_equal(clf.predict(X), y)
+        data = np.array(
+            [[1., -1., 1., 1., -1.],
+             [1., 1., 1., 1., 1.],
+             [-1., -1., -1., -1., -1.]])
+        np.testing.assert_equal(clf.predict(data), np.array([0, 1, 2]))
+        self.assertEqual(clf.decision_function(X).shape, (X.shape[0], 3))
+
+    def test_generated_examples(self):
+        seed = 7
+        def generate_data_in_orthotope(n_samples, limits):
+            limits = np.array(limits)
+            n_features = limits.shape[0]
+            data = np.random.random((n_samples, n_features))
+            data = (limits[:, 1]-limits[:, 0]) * data + limits[:, 0]
+            return data
+
+        n_samples = 100
+
+        np.random.seed(seed)
+        view_0 = np.concatenate(
+            (generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]])))
+        view_1 = generate_data_in_orthotope(2*n_samples, [[0., 1.], [0., 1.]])
+        X = np.concatenate((view_0, view_1), axis=1)
+        y = np.zeros(2*n_samples, dtype=np.int64)
+        y[n_samples:] = 1
+        views_ind = np.array([0, 2, 4])
+        clf = MumboClassifier(n_estimators=1)
+        clf.fit(X, y, views_ind)
+        self.assertEqual(clf.score(X, y), 1.)
+
+        np.random.seed(seed)
+        view_0 = np.concatenate(
+            (generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 1.], [1., 2.]])))
+        view_1 = np.concatenate(
+            (generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 1.], [1., 2.]]),
+             generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]])))
+        X = np.concatenate((view_0, view_1), axis=1)
+        y = np.zeros(4*n_samples, dtype=np.int64)
+        y[2*n_samples:] = 1
+        views_ind = np.array([0, 2, 4])
+        clf = MumboClassifier(n_estimators=3)
+        clf.fit(X, y, views_ind)
+        self.assertEqual(clf.score(X, y), 1.)
+
+        np.random.seed(seed)
+        view_0 = np.concatenate(
+            (generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]])))
+        view_1 = np.concatenate(
+            (generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]])))
+        view_2 = np.concatenate(
+            (generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]])))
+        X = np.concatenate((view_0, view_1, view_2), axis=1)
+        y = np.zeros(3*n_samples, dtype=np.int64)
+        y[n_samples:2*n_samples] = 1
+        y[2*n_samples:] = 2
+        views_ind = np.array([0, 2, 4, 6])
+        clf = MumboClassifier(n_estimators=3)
+        clf.fit(X, y, views_ind)
+        self.assertEqual(clf.score(X, y), 1.)
+
+        np.random.seed(seed)
+        view_0 = np.concatenate(
+            (generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 2.], [0., 1.]])))
+        view_1 = np.concatenate(
+            (generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 2.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]])))
+        view_2 = np.concatenate(
+            (generate_data_in_orthotope(n_samples, [[0., 2.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[0., 1.], [0., 1.]]),
+             generate_data_in_orthotope(n_samples, [[1., 2.], [0., 1.]])))
+        X = np.concatenate((view_0, view_1, view_2), axis=1)
+        y = np.zeros(3*n_samples, dtype=np.int64)
+        y[n_samples:2*n_samples] = 1
+        y[2*n_samples:] = 2
+        views_ind = np.array([0, 2, 4, 6])
+        clf = MumboClassifier(n_estimators=4)
+        clf.fit(X, y, views_ind)
+        self.assertEqual(clf.score(X, y), 1.)
+
+
+    def test_classifier(self):
+        return check_estimator(MumboClassifier)
+
+    def test_iris(self):
+        # Check consistency on dataset iris.
+        seed = 7
+        np.random.seed(seed)
+        n_estimators = 5
+        classes = np.unique(self.iris.target)
+
+        for views_ind in [self.iris.views_ind, np.array([[0, 2], [1, 3]])]:
+            clf = MumboClassifier(n_estimators=n_estimators)
+
+            clf.fit(self.iris.data, self.iris.target, views_ind)
+
+            self.assertTrue(np.all((0. <= clf.estimator_errors_)
+                               & (clf.estimator_errors_ <= 1.)))
+            self.assertTrue(np.all(np.diff(clf.estimator_errors_) < 0.))
+
+            np.testing.assert_equal(classes, clf.classes_)
+            self.assertEqual(clf.decision_function(self.iris.data).shape[1], len(classes))
+
+            score = clf.score(self.iris.data, self.iris.target)
+            self.assertGreater(score, 0.95, "Failed with score = {}".format(score))
+
+            self.assertEqual(len(clf.estimators_), n_estimators)
+
+            # Check for distinct random states
+            self.assertEqual(len(set(est.random_state for est in clf.estimators_)),
+                         len(clf.estimators_))
+
+    def test_staged_methods(self):
+        n_estimators = 10
+        seed = 7
+
+        target_two_classes = np.zeros(self.iris.target.shape, dtype=np.int64)
+        target_two_classes[target_two_classes.shape[0]//2:] = 1
+
+        data = (
+                (self.iris.data, self.iris.target, self.iris.views_ind),
+                (self.iris.data, self.iris.target, np.array([[0, 2], [1, 3]])),
+                (self.iris.data, target_two_classes, self.iris.views_ind),
+                (self.iris.data, target_two_classes, np.array([[0, 2], [1, 3]])),
+               )
+
+        for X, y, views_ind in data:
+            clf = MumboClassifier(n_estimators=n_estimators, random_state=seed)
+            clf.fit(X, y, views_ind)
+
+            staged_dec_func = [dec_f for dec_f in clf.staged_decision_function(X)]
+            staged_predict = [predict for predict in clf.staged_predict(X)]
+            staged_score = [score for score in clf.staged_score(X, y)]
+
+            self.assertEqual(len(staged_dec_func), n_estimators)
+            self.assertEqual(len(staged_predict), n_estimators)
+            self.assertEqual(len(staged_score), n_estimators)
+
+            for ind in range(n_estimators):
+                clf = MumboClassifier(n_estimators=ind+1, random_state=seed)
+                clf.fit(X, y, views_ind)
+                dec_func = clf.decision_function(X)
+                predict = clf.predict(X)
+                score = clf.score(X, y)
+                np.testing.assert_equal(dec_func, staged_dec_func[ind])
+                np.testing.assert_equal(predict, staged_predict[ind])
+                self.assertEqual(score, staged_score[ind])
+
+    def test_gridsearch(self):
+        seed = 7
+        np.random.seed(seed)
+
+        # Check that base trees can be grid-searched.
+        mumbo = MumboClassifier(base_estimator=DecisionTreeClassifier())
+        parameters = {'n_estimators': (1, 2),
+                      'base_estimator__max_depth': (1, 2)}
+        clf = GridSearchCV(mumbo, parameters)
+        clf.fit(self.iris.data, self.iris.target, views_ind=self.iris.views_ind)
+
+    def test_pickle(self):
+        seed = 7
+        np.random.seed(seed)
+
+        # Check pickability.
+
+        clf = MumboClassifier()
+        clf.fit(self.iris.data, self.iris.target, self.iris.views_ind)
+        score = clf.score(self.iris.data, self.iris.target)
+        dump = pickle.dumps(clf)
+
+        clf_loaded = pickle.loads(dump)
+        self.assertEqual(type(clf_loaded), clf.__class__)
+        score_loaded = clf_loaded.score(self.iris.data, self.iris.target)
+        self.assertEqual(score, score_loaded)
+
+    def test_base_estimator(self):
+        seed = 7
+        np.random.seed(seed)
+
+        # Test different base estimators.
+        n_estimators = 5
+        clf = MumboClassifier(RandomForestClassifier(), n_estimators=n_estimators)
+        clf.fit(self.iris.data, self.iris.target, self.iris.views_ind)
+        score = clf.score(self.iris.data, self.iris.target)
+        self.assertGreater(score, 0.95, "Failed with score = {}".format(score))
+
+        clf = MumboClassifier(SVC(), n_estimators=n_estimators)
+        clf.fit(self.iris.data, self.iris.target, self.iris.views_ind)
+        score = clf.score(self.iris.data, self.iris.target)
+        self.assertGreater(score, 0.95, "Failed with score = {}".format(score))
+
+        # Check that using a base estimator that doesn't support sample_weight
+        # raises an error.
+        clf = MumboClassifier(KMeans())
+        self.assertRaises(ValueError, clf.fit, self.iris.data, self.iris.target, self.iris.views_ind)
+
+
+    def test_sparse_classification(self):
+        # Check classification with sparse input.
+        seed = 7
+        np.random.seed(seed)
+
+        class CustomSVC(SVC):
+            """SVC variant that records the nature of the training set."""
+
+            def fit(self, X, y, sample_weight=None):
+                """Modification on fit caries data type for later verification."""
+                super(CustomSVC, self).fit(X, y, sample_weight=sample_weight)
+                self.data_type_ = type(X)
+                return self
+
+        n_estimators = 5
+        X_dense = self.iris.data
+        y = self.iris.target
+
+        for sparse_format in [csc_matrix, csr_matrix, lil_matrix, coo_matrix,
+                              dok_matrix]:
+            for views_ind in (self.iris.views_ind, np.array([[0, 2], [1, 3]])):
+                X_sparse = sparse_format(X_dense)
+
+                clf_sparse = MumboClassifier(
+                    base_estimator=CustomSVC(),
+                    random_state=seed,
+                    n_estimators=n_estimators)
+                clf_sparse.fit(X_sparse, y, views_ind)
+
+                clf_dense = MumboClassifier(
+                    base_estimator=CustomSVC(),
+                    random_state=seed,
+                    n_estimators=n_estimators)
+                clf_dense.fit(X_dense, y, views_ind)
+
+                np.testing.assert_equal(clf_sparse.decision_function(X_sparse),
+                                   clf_dense.decision_function(X_dense))
+
+                np.testing.assert_equal(clf_sparse.predict(X_sparse),
+                                   clf_dense.predict(X_dense))
+
+                self.assertEqual(clf_sparse.score(X_sparse, y),
+                             clf_dense.score(X_dense, y))
+
+                for res_sparse, res_dense in \
+                        zip(clf_sparse.staged_decision_function(X_sparse),
+                            clf_dense.staged_decision_function(X_dense)):
+                    np.testing.assert_equal(res_sparse, res_dense)
+
+                for res_sparse, res_dense in \
+                        zip(clf_sparse.staged_predict(X_sparse),
+                            clf_dense.staged_predict(X_dense)):
+                    np.testing.assert_equal(res_sparse, res_dense)
+
+                for res_sparse, res_dense in \
+                        zip(clf_sparse.staged_score(X_sparse, y),
+                            clf_dense.staged_score(X_dense, y)):
+                    np.testing.assert_equal(res_sparse, res_dense)
+
+                # Check that sparsity of data is maintained during training
+                types = [clf.data_type_ for clf in clf_sparse.estimators_]
+                if sparse_format == csc_matrix:
+                    self.assertTrue(all([type_ == csc_matrix for type_ in types]))
+                else:
+                    self.assertTrue(all([type_ == csr_matrix for type_ in types]))
+
+
+if __name__ == '__main__':
+    unittest.main()
\ No newline at end of file
diff --git a/multimodal/tests/test_mvml.py b/multimodal/tests/test_mvml.py
new file mode 100644
index 0000000000000000000000000000000000000000..3de8a3588fa33010e556b4ac3cb206bf5c38b2b7
--- /dev/null
+++ b/multimodal/tests/test_mvml.py
@@ -0,0 +1,220 @@
+# -*- coding: utf-8 -*-
+
+import pickle
+import unittest
+
+import numpy as np
+from sklearn.exceptions import NotFittedError
+
+from metriclearning.datasets.data_sample import Metriclearn_array
+from metriclearning.mvml import MVML
+from metriclearning.tests.datasets.get_dataset_path import get_dataset_path
+
+
+class MVMLTest(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(clf):
+        input_x = get_dataset_path("input_x_dic.pkl")
+        f = open(input_x, "rb")
+        kernel_dict = pickle.load(f)
+        f.close()
+        test_input_x = get_dataset_path("test_kernel_input_x.pkl")
+        f = open(test_input_x, "rb")
+        test_kernel_dict = pickle.load(f)
+        f.close()
+        test_input_y = get_dataset_path("test_input_y.npy")
+        input_y = get_dataset_path("input_y.npy")
+        y = np.load(input_y)
+        test_y = np.load(test_input_y)
+        clf.y = y
+        clf.kernel_dict = kernel_dict
+        clf.test_kernel_dict = test_kernel_dict
+        clf.test_y = test_y
+
+
+    def testInitMVML(self):
+        mvml = MVML(lmbda=0.1, eta=1, nystrom_param=0.2)
+        self.assertEqual(mvml.nystrom_param, 0.2)
+        self.assertEqual(mvml.learn_A, 1)
+        self.assertEqual(mvml.learn_w, 0)
+        self.assertEqual(mvml.n_loops, 6)
+        self.assertEqual(mvml.lmbda, 0.1)
+        self.assertEqual(mvml.eta, 1)
+
+    def testFitMVMLDict(self):
+        #######################################################
+        # task with dict and not precomputed
+        #######################################################
+        mvml = MVML(lmbda=0.1, eta=1,
+                    kernel=['rbf'], kernel_params=[{'gamma':50}],
+                    nystrom_param=0.2)
+        views_ind = [120, 240]
+        mvml.fit(self.kernel_dict, y=self.y, views_ind=None)
+        self.assertEqual(mvml.A.shape, (48, 48))
+        self.assertEqual(mvml.g.shape,(48, 1))
+        w_expected = np.array([[0.5],[0.5]])
+        np.testing.assert_almost_equal(mvml.w, w_expected, 8)
+
+    def testFitMVMLPrecision(self):
+        #######################################################
+        # task with dict and not precomputed
+        #######################################################
+        mvml = MVML(lmbda=0.1, eta=1,
+                    kernel=['rbf'], kernel_params=[{'gamma':50}],
+                    nystrom_param=0.2, precision=1E-0)
+        views_ind = [120, 240]
+        mvml.fit(self.kernel_dict, y=self.y, views_ind=None)
+        self.assertEqual(mvml.A.shape, (48, 48))
+        self.assertEqual(mvml.g.shape,(48, 1))
+        w_expected = np.array([[0.5],[0.5]])
+        np.testing.assert_almost_equal(mvml.w, w_expected, 8)
+
+    def testFitMVMLDictNLoop(self):
+        #######################################################
+        # task with dict and not precomputed
+        #######################################################
+        mvml = MVML(lmbda=0.1, eta=1,
+                    kernel=['rbf'], kernel_params=[{'gamma':50}],
+                    nystrom_param=0.2, n_loops=2, learn_w=1)
+        views_ind = [120, 240]
+        mvml.fit(self.kernel_dict, y=self.y, views_ind=None)
+        self.assertEqual(mvml.A.shape, (48, 48))
+        self.assertEqual(mvml.g.shape,(48, 1))
+        w_expected = np.array([0.73849765, 0.52974952])
+        np.testing.assert_almost_equal(mvml.w, w_expected, 3)
+
+    def testFitMVMLMetric(self):
+        #######################################################
+        # task with Metric array
+        #######################################################
+        # mvml = MVML.fit(self.kernel_dict, self.y)
+        w_expected = np.array([[0.5], [0.5]])
+        x_metricl = Metriclearn_array(self.kernel_dict)
+        mvml2 = MVML(lmbda=0.1, eta=1, nystrom_param=1.0)
+        mvml2.fit(x_metricl, y=self.y, views_ind=None)
+        self.assertEqual(mvml2.A.shape, (240, 240))
+        self.assertEqual(mvml2.g.shape,(240, 1))
+        np.testing.assert_almost_equal(mvml2.w, w_expected, 8)
+        with self.assertRaises(TypeError):
+            mvml2.fit([1, 2, 3])
+
+    def testFitMVMLMetric_learA4(self):
+        #######################################################
+        # task with Metric array
+        #######################################################
+        # mvml = MVML.fit(self.kernel_dict, self.y)
+        w_expected = np.array([[0.5], [0.5]])
+        x_metricl = Metriclearn_array(self.kernel_dict)
+        mvml2 = MVML(lmbda=0.1, eta=1, nystrom_param=1.0, learn_A=4)
+        mvml2.fit(x_metricl, y=self.y, views_ind=None)
+        self.assertEqual(mvml2.A.shape, (240, 240))
+        self.assertEqual(mvml2.g.shape,(240, 1))
+        np.testing.assert_almost_equal(mvml2.w, w_expected, 8)
+        with self.assertRaises(TypeError):
+            mvml2.fit([1, 2, 3])
+
+    def testFitMVMLMetric_learA3(self):
+        #######################################################
+        # task with Metric array
+        #######################################################
+        # mvml = MVML.fit(self.kernel_dict, self.y)
+        w_expected = np.array([[0.5], [0.5]])
+        x_metricl = Metriclearn_array(self.kernel_dict)
+        mvml2 = MVML(lmbda=0.1, eta=1, nystrom_param=1.0, learn_A=3)
+        mvml2.fit(x_metricl, y=self.y, views_ind=None)
+        self.assertEqual(mvml2.A.shape, (240, 240))
+        self.assertEqual(mvml2.g.shape,(240, 1))
+        np.testing.assert_almost_equal(mvml2.w, w_expected, 8)
+        with self.assertRaises(TypeError):
+            mvml2.fit([1, 2, 3])
+
+    def testFitMVMLMetric_PredictA2(self):
+        #######################################################
+        # task with Metric array
+        #######################################################
+        w_expected = np.array([0.2,  0.1]) # [0.94836083 , 0.94175933] [ 0.7182,  0.7388]
+        x_metricl = Metriclearn_array(self.kernel_dict)
+        mvml2 = MVML(lmbda=0.1, eta=1, nystrom_param=0.6,
+                     learn_A=2, learn_w=1)
+        mvml2.fit(x_metricl, y=self.y, views_ind=None)
+        self.assertEqual(mvml2.A.shape, (144, 144))
+        self.assertEqual(mvml2.g.shape,(144, 1))
+        np.testing.assert_almost_equal(mvml2.w, w_expected, 0)
+        pred = mvml2.predict(self.test_kernel_dict)
+        self.assertEqual(pred.shape, (80,1))
+
+    def testFitMVMLMetric_PredictA1(self):
+        #######################################################
+        # task with Metric array
+        #######################################################
+        w_expected = np.array([1.3,  1.4]) # [0.94836083 , 0.94175933] [ 0.7182,  0.7388]
+        x_metricl = Metriclearn_array(self.kernel_dict)
+        mvml2 = MVML(lmbda=0.1, eta=1, nystrom_param=0.6,
+                     learn_A=1, learn_w=1)
+        mvml2.fit(x_metricl, y=self.y, views_ind=None)
+        self.assertEqual(mvml2.A.shape, (144, 144))
+        self.assertEqual(mvml2.g.shape,(144, 1))
+        np.testing.assert_almost_equal(mvml2.w, w_expected, 0)
+        pred = mvml2.predict(self.test_kernel_dict)
+        self.assertEqual(pred.shape, (80,1))
+
+    def testFitMVMLArray_2d(self):
+        #######################################################
+        # task with nparray 2d
+        #######################################################
+        w_expected = np.array([[0.5], [0.5]])
+        x_metricl = Metriclearn_array(self.kernel_dict)
+        x_array = np.asarray(x_metricl)
+        mvml3 = MVML(lmbda=0.1, eta=1, nystrom_param=1.0)
+        mvml3.fit(x_array, y=self.y, views_ind=[0, 120, 240])
+        self.assertEqual(mvml3.A.shape, (240, 240))
+        self.assertEqual(mvml3.g.shape,(240, 1))
+        np.testing.assert_almost_equal(mvml3.w, w_expected, 8)
+
+    def testFitMVMLArray_1d(self):
+        #######################################################
+        # task with nparray 1d
+        #######################################################
+        w_expected = np.array([[0.5], [0.5]])
+        n_views = len(self.kernel_dict)
+        x_array_1d = np.ndarray((n_views), dtype=object)
+        for v in range(n_views):
+            x_array_1d[v] = self.kernel_dict[v]
+        mvml4 = MVML(lmbda=0.1, eta=1, learn_A=3, nystrom_param=0.6,
+                     kernel=['rbf'], kernel_params=[{'gamma':50}])
+        mvml4.fit(x_array_1d, y=self.y)
+        self.assertEqual(mvml4.A.shape, (144, 144))
+        self.assertEqual(mvml4.g.shape,(144, 1))
+        np.testing.assert_almost_equal(mvml4.w, w_expected, 8)
+
+
+    def testPredictMVML_witoutFit(self):
+       mvml = MVML(lmbda=0.1, eta=1,
+                   kernel=['rbf'], kernel_params=[{'gamma':50}],
+                   nystrom_param=0.2)
+       with self.assertRaises(NotFittedError):
+           mvml.predict(self.test_kernel_dict)
+
+    def testPredictMVMLKernel(self):
+       mvml = MVML(lmbda=0.1, eta=1,
+                   kernel=['rbf'], kernel_params={'gamma':50},
+                   nystrom_param=0.2, learn_A=4)
+       mvml.fit(self.kernel_dict, y=self.y)
+       pred =mvml.predict(self.test_kernel_dict)
+       self.assertEqual(pred.shape, (80,1))
+
+    def testPredictMVML(self):
+       mvml = MVML(lmbda=0.1, eta=1,
+                   nystrom_param=1.0, learn_A=4)
+       mvml.fit(self.kernel_dict, y=self.y)
+       pred = mvml.predict(self.test_kernel_dict)
+       self.assertEqual(pred.shape, (80,1))
+       # print(pred.shape)
+
+
+
+if __name__ == "__main__":
+    # import sys;sys.argv = ['', 'Test.testName']
+    unittest.main()
+    # MVMLTest.testFitMVMLMetric_PredictA2()
\ No newline at end of file