...
 
Commits (44)
# run the test suite
tests:
image: registry.gitlab.lis-lab.fr:5005/skmad-suite/madarrays/ubuntu:18.04
tags:
- docker
script:
- cd python
- pip3 install --no-deps ltfatpy madarrays yafe skpomade pandas xarray
- pip3 install 'scipy==1.4.1' -U
- pip3 install 'matplotlib==3.1.2' -U
- pip3 install --no-deps .
- python3 tffpy/tests/ci_config.py
- pytest-3
# generate the documentation
pages:
image: registry.gitlab.lis-lab.fr:5005/skmad-suite/madarrays/ubuntu:18.04
tags:
- docker
only:
- master
script:
- cd python
- pip3 install --no-deps ltfatpy madarrays yafe skpomade pandas xarray
- pip3 install 'scipy==1.4.1' -U
- pip3 install 'matplotlib==3.1.2' -U
- pip3 install --no-deps .
- python3 setup.py build_sphinx
- python3 tffpy/tests/ci_config.py
- cp -r build/sphinx/html public
artifacts:
paths:
- public
# TFF2020 - Time-Frequency Fading
Code and data to reproduce experiments from paper
*Time-frequency fading algorithms based on Gabor multipliers*
by A. Marina Krémé, Valentin Emiya, Caroline Chaux and Bruno Torré́sani, 2020.
The sound material is available in folder 'data'.
The code is available in folders 'matlab' and 'python'. The main experiments
are available in both programming languages. Some code is only available in
one language.
\ No newline at end of file
This diff is collapsed.
include *.txt
include *.rst
include VERSION
recursive-include doc *.rst *.py *.ipynb
include tffpy/tests/*.py
prune doc/build
# Python code for time-frequency fading
tffpy
=====
:py:mod:tffpy: is a Python package for time-frequency fading using Gabor
multipliers based on the work in paper *Time-frequency fading algorithms
based on Gabor multipliers* by A. Marina Krémé, Valentin Emiya, Caroline
Chaux and Bruno Torré́sani, 2020.
Install
-------
Install the current release with ``pip``::
pip install tffpy
Download the data from `this link <https://gitlab.lis-lab
.fr/skmad-suite/tff2020/-/tree/master/data>`.
Then run function :py:fun:tffpy.utils.generate_config(): in order to create
a configuration file and modify it to specify the path to your data folder.
The location of the configuration file is given by function
:py:fun:tffpy.utils.get_config_file():.
For additional details, see doc/install.rst.
Usage
-----
See the `documentation <http://templates.pages.lis-lab.fr/tffpy/>`_.
Bugs
----
Please report any bugs that you find through the `tffpy GitLab project
<https://gitlab.lis-lab.fr/templates/python-package-template/issues>`_.
You can also fork the repository and create a merge request.
Source code
-----------
The source code of tffpy is available via its `GitLab project
<https://gitlab.lis-lab.fr/templates/python-package-template>`_.
You can clone the git repository of the project using the command::
git clone git@gitlab.lis-lab.fr:templates/python-package-template.git
Copyright © 2020
----------------
* `Laboratoire d'Informatique et Systèmes <http://www.lis-lab.fr/>`_
* `Université d'Aix-Marseille <http://www.univ-amu.fr/>`_
* `Centre National de la Recherche Scientifique <http://www.cnrs.fr/>`_
* `Université de Toulon <http://www.univ-tln.fr/>`_
Contributors
------------
* `Valentin Emiya <mailto:valentin.emiya@lis-lab.fr>`_
* `Ama Marina Krémé <mailto:ama-marina.kreme@lis-lab.fr>`_
License
-------
Released under the GNU General Public License version 3 or later
(see `LICENSE.txt`).
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = python -m sphinx
PAPER =
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest epub
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " pickle to make pickle files"
@echo " epub to make an epub"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " gitwash to update the gitwash documentation"
clean:
-rm -rf build/*
-rm -rf ghpages_build
-rm -rf auto_examples modules
-rm -rf reference/generated reference/algorithms/generated reference/classes/generated reference/readwrite/generated
dist: html
test -d build/latex || make latex
make -C build/latex all-pdf
-rm -rf build/dist
(cd build/html; cp -r . ../../build/dist)
(cd build/dist && tar czf ../dist.tar.gz .)
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html
@echo
@echo "Build finished. The HTML pages are in build/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) build/dirhtml
@echo
@echo "Build finished. The HTML pages are in build/dirhtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) build/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in build/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) build/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in build/qthelp, like this:"
@echo "# qcollectiongenerator build/qthelp/test.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile build/qthelp/test.qhc"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) build/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex
@echo
@echo "Build finished; the LaTeX files are in build/latex."
@echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
"run these through (pdf)latex."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes
@echo
@echo "The overview file is in build/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in build/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) build/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in build/doctest/output.txt."
latexpdf: latex
@echo "Running LaTeX files through latexmk..."
$(MAKE) -C build/latex all-pdf
@echo "latexmk finished; the PDF files are in build/latex."
docs: clean html latexpdf
cp build/latex/networkx_reference.pdf build/html/_downloads/.
gitwash-update:
python ../tools/gitwash_dumper.py developer networkx \
--project-url=http://networkx.github.io \
--project-ml-url=http://groups.google.com/group/networkx-discuss/ \
--gitwash-url git@github.com:matthew-brett/gitwash.git
If you only want to get the documentation, note that a pre-built
version for the latest release is available
[online](http://skmad-suite.pages.lis-lab.fr/tffpy/).
Sphinx is used to generate the API and reference documentation.
## Instructions to build the documentation
In addition to installing ``tffpy`` and its dependencies, install the
Python packages needed to build the documentation by entering
```
pip install -r ../requirements/doc.txt
```
in the ``doc/`` directory.
To build the HTML documentation, run:
```
make html
```
in the ``doc/`` directory. This will generate a ``build/html`` subdirectory
containing the built documentation.
To build the PDF documentation, run:
```
make latexpdf
```
You will need to have Latex installed for this.
{
"cells": [
{
"cell_type": "markdown",
"metadata": {
"ein.tags": [
"worksheet-0"
],
"slideshow": {
"slide_type": "-"
}
},
"source": [
"# Demo for `tffpy.interpolation_solver`\n",
"\n",
"A simple demonstration of the baseline interpolation solver"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%javascript\n",
"IPython.OutputArea.prototype._should_scroll = function(lines) {\n",
" return false;\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"autoscroll": "json-false",
"collapsed": true,
"ein.tags": [
"worksheet-0"
],
"slideshow": {
"slide_type": "-"
}
},
"outputs": [],
"source": [
"import numpy as np\n",
"import matplotlib as mpl\n",
"mpl.rcParams['figure.figsize'] = [15.0, 7.0]\n",
"\n",
"from tffpy.datasets import get_mix\n",
"from tffpy.interpolation_solver import solve_by_interpolation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"win_type = 'gauss'\n",
"win_dur = 256 / 8000\n",
"hop_ratio = 1 / 4\n",
"n_bins_ratio = 4\n",
"delta_mix_db = 0\n",
"delta_loc_db = 30\n",
"n_iter_closing = n_iter_opening = 3\n",
"wb_to_loc_ratio_db = 8\n",
"closing_first = True\n",
"or_mask = True\n",
"fig_dir = 'fig_interpolation'\n",
"\n",
"x_mix, dgt_params, signal_params, mask, x_bird, x_engine = \\\n",
" get_mix(loc_source='bird', wideband_src='car',\n",
" wb_to_loc_ratio_db=wb_to_loc_ratio_db,\n",
" win_dur=win_dur, win_type=win_type,\n",
" hop_ratio=hop_ratio, n_bins_ratio=n_bins_ratio,\n",
" n_iter_closing=n_iter_closing,\n",
" n_iter_opening=n_iter_opening,\n",
" closing_first=closing_first,\n",
" delta_mix_db=delta_mix_db, delta_loc_db=delta_loc_db,\n",
" or_mask=or_mask, fig_dir=fig_dir)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"x_est = solve_by_interpolation(x_mix, mask, dgt_params, signal_params,\n",
" fig_dir)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.2"
},
"name": "data_structures.ipynb"
},
"nbformat": 4,
"nbformat_minor": 1
}
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Demo for `tffpy.create_subregions.create_subregions`\n",
"\n",
"A simple demonstration for creating sub-regions from a boolean time-frequency mask"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%javascript\n",
"IPython.OutputArea.prototype._should_scroll = function(lines) {\n",
" return false;\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"import matplotlib as mpl\n",
"mpl.rcParams['figure.figsize'] = [15.0, 7.0]\n",
"import matplotlib.pyplot as plt\n",
"\n",
"from tffpy.datasets import get_mix\n",
"from tffpy.create_subregions import create_subregions"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fig_dir = 'fig_create_subregions'\n",
"x_mix, dgt_params, signal_params, mask, x_loc, x_wb = \\\n",
" get_mix(loc_source='bird',\n",
" wideband_src='car',\n",
" crop=8192,\n",
" win_dur=256/8000,\n",
" win_type='gauss',\n",
" hop_ratio=1/4,\n",
" n_bins_ratio=4,\n",
" n_iter_closing=3,\n",
" n_iter_opening=3,\n",
" closing_first=True,\n",
" delta_mix_db=0,\n",
" delta_loc_db=20,\n",
" wb_to_loc_ratio_db=16,\n",
" or_mask=True,\n",
" fig_dir=None)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"tol = 1e-9\n",
"mask_with_subregions, norms = create_subregions(mask_bool=mask, \n",
" dgt_params=dgt_params, signal_params=signal_params, \n",
" tol=tol, fig_dir=fig_dir, return_norms=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"print('Values in mask:', np.unique(mask_with_subregions))\n",
"print('Number of sub-regions:', np.max(mask_with_subregions))\n",
"plt.semilogy(np.sort(np.unique(norms)))\n",
"plt.title('Distances between sub-regions (sorted norm coefficients without duplicates)')\n",
"plt.grid()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Demo for `tffpy.tf_fading.estimate_energy_in_mask`\n",
"\n",
"A simple demonstration for the estimation of energy in time-frequency regions."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
"\n",
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%javascript\n",
"IPython.OutputArea.prototype._should_scroll = function(lines) {\n",
" return false;\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import numpy as np\n",
"import matplotlib as mpl\n",
"mpl.rcParams['figure.figsize'] = [15.0, 7.0]\n",
"\n",
"from tffpy.datasets import get_mix\n",
"from tffpy.tf_fading import estimate_energy_in_mask"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"fig_dir = 'fig_energy_estimation'\n",
"x_mix, dgt_params, signal_params, mask, x_loc, x_wb = \\\n",
" get_mix(loc_source='bird',\n",
" wideband_src='car',\n",
" crop=None,\n",
" win_dur=256/8000,\n",
" win_type='gauss',\n",
" hop_ratio=1/4,\n",
" n_bins_ratio=4,\n",
" n_iter_closing=3,\n",
" n_iter_opening=3,\n",
" closing_first=True,\n",
" delta_mix_db=0,\n",
" delta_loc_db=40,\n",
" wb_to_loc_ratio_db=8,\n",
" or_mask=True,\n",
" fig_dir=fig_dir)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"estimate_energy_in_mask(x_mix=x_mix, mask=mask, dgt_params=dgt_params, signal_params=signal_params,\n",
" fig_dir=fig_dir, prefix=None)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import os
from datetime import date
import tffpy
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../tffpy/'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'numpydoc',
'nbsphinx',
'IPython.sphinxext.ipython_console_highlighting',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'tffpy'
author = 'V. Emiya, A. Nother'
copyright = '2019-{}, {}'.format(date.today().year, author)
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = tffpy.__version__
# The full version, including alpha/beta/rc tags.
release = tffpy.__version__.replace('_', '')
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', '**/test_*.rst', '**.ipynb_checkpoints']
# The reST default role (used for this markup: `text`) to use for all
# documents.
default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'bizstyle'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# Custom sidebar templates, maps document names to template names.
html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
html_search_scorer = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'tffpydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
'preamble': '',
# Latex figure (float) alignment
'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'tffpy.tex', 'tffpy Documentation', author, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
latex_use_parts = False
# If true, show page references after internal links.
latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
latex_appendices = []
# If false, no module index is generated.
latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, 'tffpy', 'tffpy Documentation', [author], 1)]
# If true, show URL addresses after external links.
man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'tffpy.tex', 'tffpy Documentation', author, 'manual'),
]
# Documents to append as an appendix to all manuals.
texinfo_appendices = []
# If false, no module index is generated.
texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None),
'pandas': ('https://pandas.pydata.org/docs/', None),
'skpomade': ('http://valentin.emiya.pages.lis-lab.fr/skpomade/', None),
'yafe': ('http://skmad-suite.pages.lis-lab.fr/yafe/', None),
'ltfatpy': ('http://dev.pages.lis-lab.fr/ltfatpy/', None),
}
# Allow errors in notebook
nbsphinx_allow_errors = True
# Timeout in notebook
nbsphinx_timeout = 120
# Do not show class members
numpydoc_show_class_members = False
# Include todos
todo_include_todos = True
# Order members by member type
autodoc_member_order = 'groupwise'
Credits
=======
Copyright(c) 2020
-----------------
* Laboratoire d'Informatique et Systèmes <http://www.lis-lab.fr/>
* Université d'Aix-Marseille <http://www.univ-amu.fr/>
* Centre National de la Recherche Scientifique <http://www.cnrs.fr/>
* Université de Toulon <http://www.univ-tln.fr/>
Contributors
------------
* Valentin Emiya <firstname.lastname_AT_lis-lab.fr>
* Ama Marina Kreme <firstname.lastname_AT_lis-lab.fr>
This package has been created thanks to the joint work with Florent Jaillet
and Ronan Hamon on other packages.
Licence
-------
This file is part of tffpy.
tffpy is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
##########################
:mod:`tffpy` documentation
##########################
Overview
========
:py:mod:`tffpy`: time-frequency fading problem and solvers using Gabor
multipliers, based paper
*Time-frequency fading algorithms based on Gabor multipliers*
by M. Kreme, V. Emiya, C. Chaux and B. Torrésani in 2020.
The package :py:mod:`tffpy` includes in particular:
* class :py:class:`tffpy.tf_fading.GabMulTff` that implements the proposed
solver for reconstructing a source from a mixture and a time-frequency
binary mask.
* class :py:class:`tffpy.experiments.exp_solve_tff.SolveTffExperiment` to
conduct the main experiment on mixtures of real sounds, with time-frequency
masks generated automatically, using the proposed solutions and baseline
solvers. Script `tffpy.scripts.script_exp_solve_tff` provide example of
code to handle the experiment (configuring, running on a computer grid or a
single computer, display results).
Similar and complementary code is available in Matlab.
Documentation
=============
.. only:: html
:Release: |version|
:Date: |today|
.. toctree::
:maxdepth: 1
installation
references
tutorials
credits
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
Installation
############
``tffpy`` requires the following packages, which will be automatically
installed with ``tffpy`` using ``pip``:
* `python >= 3.6 <https://wiki.python.org/moin/BeginnersGuide/Download>`_
* `numpy >= 1.13 <http://www.numpy.org>`_
* `scipy <https://www.scipy.org/>`_
* `matplotlib <https://matplotlib.org/>`_
* `pandas <https://pandas.pydata.org/>`_
* `xarray <https://xarray.pydata.org/>`_
* `ltfatpy <http://dev.pages.lis-lab.fr/ltfatpy/>`_
* `skpomade <http://valentin.emiya.pages.lis-lab.fr/skpomade/>`_
* `yafe <http://skmad-suite.pages.lis-lab.fr/yafe/>`_
* `madarrays <https://gitlab.lis-lab.fr/skmad-suite/madarrays>`_
Make sure your Python environment is properly configured. It is recommended to
install ``tffpy`` in a virtual environment.
Release version
---------------
First, make sure you have the latest version of pip (the Python package
manager) installed. If you do not, refer to the `Pip documentation
<https://pip.pypa.io/en/stable/installing/>`_ and install ``pip`` first.
Install the current release with ``pip``::
pip install tffpy
To upgrade to a newer release use the ``--upgrade`` flag::
pip install --upgrade tffpy
If you do not have permission to install software systemwide, you can install
into your user directory using the ``--user`` flag::
pip install --user tffpy
Alternatively, you can manually download ``tffpy`` from its `GitLab project
<https://gitlab.lis-lab.fr/skmad-suite/tff2020>`_ or `PyPI
<https://pypi.python.org/pypi/tffpy>`_. To install one of these versions,
unpack it and run the following from the top-level source directory using the
Terminal::
pip install .
Dataset installation
--------------------
Download the data from `this link <https://gitlab.lis-lab
.fr/skmad-suite/tff2020/-/tree/master/data>`_.
Then run function :py:func:`tffpy.utils.generate_config` in order to create
a configuration file and modify it to specify the path to your data folder.
The location of the configuration file is given by function
:py:func:`tffpy.utils.get_config_file`.
Development version
-------------------
If you have `Git <https://git-scm.com/>`_ installed on your system, it is also
possible to install the development version of ``tffpy``.
Before installing the development version, you may need to uninstall the
standard version of ``tffpy`` using ``pip``::
pip uninstall tffpy
Clone the Git repository::
git clone git@gitlab.lis-lab.fr:skmad-suite/tff2020.git
cd tffpy
You may also need to install required packages::
pip install -r requirements/defaults.txt
Then execute ``pip`` with flag ``-e`` to follow the development branch::
pip install -e .
To update ``tffpy`` at any time, in the same directory do::
git pull
To run unitary tests, first install required packages::
pip install -r requirements/dev.txt
and execute ``pytest``::
pytest
References
==========
:Release: |release|
:Date: |today|
tffpy\.create_subregions module
-------------------------------
.. automodule:: tffpy.create_subregions
:members:
:undoc-members:
:show-inheritance:
tffpy\.datasets module
----------------------
.. automodule:: tffpy.datasets
:members:
:undoc-members:
:show-inheritance:
tffpy\.interpolation_solver module
----------------------------------
.. automodule:: tffpy.interpolation_solver
:members:
:undoc-members:
:show-inheritance:
tffpy\.tf_fading module
-----------------------
.. automodule:: tffpy.tf_fading
:members:
:undoc-members:
:show-inheritance:
tffpy\.tf_tools module
----------------------
.. automodule:: tffpy.tf_tools
:members:
:undoc-members:
:show-inheritance:
tffpy\.utils module
-------------------
.. automodule:: tffpy.utils
:members:
:undoc-members:
:show-inheritance:
tffpy\.experiments\.exp_solve_tff module
----------------------------------------
.. automodule:: tffpy.experiments.exp_solve_tff
:members:
:special-members: __call__
:undoc-members:
:show-inheritance:
Tutorials and demonstrations
############################
.. toctree::
:maxdepth: 1
_notebooks/mask_energy_estimation.ipynb
_notebooks/create_subregions.ipynb
_notebooks/baseline_interpolation_solver.ipynb
--index-url https://pypi.python.org/simple/
numpy>=1.13
scipy>=1.4.1
matplotlib>=3.1.2
pandas
xarray
ltfatpy
skpomade
yafe
madarrays
--index-url https://pypi.python.org/simple/
coverage
pytest
pytest-cov
pytest-randomly
--index-url https://pypi.python.org/simple/
nbsphinx
numpydoc
sphinx
[tool:pytest]
testpaths = tffpy
addopts = --verbose
--cov-report=term-missing
--cov-report=html
--cov=tffpy
--doctest-modules
[coverage:run]
branch = True
source = tffpy
include = */tffpy/*
omit = */tests/*
[coverage:report]
exclude_lines =
pragma: no cover
if self.debug:
if settings.DEBUG
raise AssertionError
raise NotImplementedError
if 0:
if __name__ == .__main__.:
if obj is None: return
if verbose > 0:
if self.verbose > 0:
if verbose > 1:
if self.verbose > 1:
pass
def __str__(self):
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
import sys
NAME = 'tffpy'
DESCRIPTION = 'Time-frequency fading using Gabor multipliers'
LICENSE = 'GNU General Public License v3 (GPLv3)'
URL = 'https://gitlab.lis-lab.fr/skmad-suite/{}'.format(NAME)
AUTHOR = 'Valentin Emiya, Ama Marina Kreme'
AUTHOR_EMAIL = ('valentin.emiya@lis-lab.fr, '
'ama-marina.kreme@lis-lab.fr')
INSTALL_REQUIRES = ['numpy', 'scipy', 'matplotlib', 'pandas', 'xarray',
'ltfatpy', 'skpomade', 'yafe', 'madarrays']
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Mathematics',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X ',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 3.6']
PYTHON_REQUIRES = '>=3.6'
EXTRAS_REQUIRE = {
'dev': ['coverage', 'pytest', 'pytest-cov', 'pytest-randomly'],
'doc': ['nbsphinx', 'numpydoc', 'sphinx']}
PROJECT_URLS = {'Bug Reports': URL + '/issues',
'Source': URL}
KEYWORDS = 'time-frequency, fading, filtering, Gabor multiplier, audio'
###############################################################################
if sys.argv[-1] == 'setup.py':
print("To install, run 'python setup.py install'\n")
if sys.version_info[:2] < (3, 6):
errmsg = '{} requires Python 3.6 or later ({[0]:d}.{[1]:d} detected).'
print(errmsg.format(NAME, sys.version_info[:2]))
sys.exit(-1)
def get_version():
v_text = open('VERSION').read().strip()
v_text_formted = '{"' + v_text.replace('\n', '","').replace(':', '":"')
v_text_formted += '"}'
v_dict = eval(v_text_formted)
return v_dict[NAME]
def set_version(path, VERSION):
filename = os.path.join(path, '__init__.py')
buf = ""
for line in open(filename, "rb"):
if not line.decode("utf8").startswith("__version__ ="):
buf += line.decode("utf8")
f = open(filename, "wb")
f.write(buf.encode("utf8"))
f.write(('__version__ = "%s"\n' % VERSION).encode("utf8"))
def setup_package():
"""Setup function"""
# set version
VERSION = get_version()
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
mod_dir = NAME
set_version(mod_dir, get_version())
setup(name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=long_description,
url=URL,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license=LICENSE,
classifiers=CLASSIFIERS,
keywords=KEYWORDS,
packages=find_packages(exclude=['doc', 'dev']),
install_requires=INSTALL_REQUIRES,
python_requires=PYTHON_REQUIRES,
extras_require=EXTRAS_REQUIRE,
project_urls=PROJECT_URLS)
if __name__ == "__main__":
setup_package()
# -*- coding: utf-8 -*-
""" Filtering out time-frequency using Gabor multipliers
.. moduleauthor:: Valentin Emiya
"""
# from .tf_tools import GaborMultiplier, get_dgt_params, get_signal_params, dgt
# __all__ = ['GaborMultiplier', 'get_dgt_params', 'get_signal_params', 'dgt']
# TODO minimal documentation__version__ = "0.1"
__version__ = "0.1"
# -*- coding: utf-8 -*-
"""
.. moduleauthor:: Valentin Emiya
"""
# TODO check if eigs(, 1) can be replaced by Halko to run faster
from pathlib import Path
import warnings
import numpy as np
import matplotlib.pyplot as plt
from scipy.ndimage import label
from scipy.sparse.linalg import eigs
from tffpy.utils import plot_mask
from tffpy.tf_tools import GaborMultiplier
def create_subregions(mask_bool, dgt_params, signal_params, tol,
fig_dir=None, return_norms=False):
"""
Create sub-regions from boolean mask and tolerance on sub-region distance.
See Algorithm 3 *Finding sub-regions for TFF-P* in the reference paper.
Parameters
----------
mask_bool : nd-array
Time-frequency boolean mask
dgt_params : dict
DGT parameters
signal_params : dict
Signal parameters
tol : float
Tolerance on sub-region distance (spectral norm of the composition
of the Gabor multipliers related to two candidate sub-regions).
fig_dir : Path
If not None, folder where figures are stored. If None, figures are
not plotted.
return_norms : bool
If True, the final distance matrix is returned as a second output.
Returns
-------
mask_labeled : nd-array
Time-frequency mask with one positive integer for each sub-region
and zeros outside sub-regions.
pq_norms : nd-array
Matrix of distances between sub-regions.
"""
mask_labeled, n_labels = label(mask_bool)
pq_norms = _get_pq_norms(mask=mask_labeled,
dgt_params=dgt_params, signal_params=signal_params)
if fig_dir is not None:
fig_dir = Path(fig_dir)
fig_dir.mkdir(parents=True, exist_ok=True)
plt.figure()
plot_mask(mask=mask_labeled, hop=dgt_params['hop'],
n_bins=dgt_params['n_bins'], fs=signal_params['fs'])
plt.set_cmap('nipy_spectral')
plt.title('Initial subregions')
plt.savefig(fig_dir / 'initial_subregions.pdf')
# from matplotlib.colors import LogNorm
plt.figure()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
plt.imshow(np.log10(pq_norms+pq_norms.T), origin='lower')
plt.ylabel('Sub-region index')
plt.xlabel('Sub-region index')
plt.colorbar()
plt.set_cmap('viridis')
plt.title('Initial norms of Gabor multiplier composition')
plt.savefig(fig_dir / 'initial_norms.pdf')
n_labels_max = n_labels
while pq_norms.max() > tol:
# Merge each pair (p, q), q < p, such that pq_norms[p, q] > tol
to_be_updated = [False] * n_labels
while pq_norms.max() > tol:
i_p, i_q = np.unravel_index(np.argmax(pq_norms, axis=None),
pq_norms.shape)
mask_labeled, pq_norms = _merge_subregions(mask=mask_labeled,
pq_norms=pq_norms,
i_p=i_p, i_q=i_q)
to_be_updated[i_q] = True
to_be_updated[i_p] = to_be_updated[-1]
to_be_updated = to_be_updated[:-1]
n_labels -= 1
for i_p in range(n_labels):
if to_be_updated[i_p]:
_update_pq_norms(mask=mask_labeled,
pq_norms=pq_norms, i_p=i_p,
dgt_params=dgt_params,
signal_params=signal_params)
# print('Merge sub-region p={}'.format(i_p))
if fig_dir is not None:
plt.figure()
plot_mask(mask=mask_labeled, hop=dgt_params['hop'],
n_bins=dgt_params['n_bins'], fs=signal_params['fs'])
plt.title('subregions')
plt.set_cmap('nipy_spectral')
plt.savefig(fig_dir / 'subregions_i{}.pdf'
.format(n_labels_max-n_labels))
plt.figure()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
plt.imshow(np.log10(pq_norms+pq_norms.T), origin='lower')
plt.ylabel('Sub-region index')
plt.xlabel('Sub-region index')
plt.colorbar()
plt.set_cmap('viridis')
plt.title('norms of Gabor multiplier composition')
plt.savefig(fig_dir / 'norms__i{}.pdf'
.format(n_labels_max-n_labels))
if fig_dir is not None:
plt.figure()
plot_mask(mask=mask_labeled, hop=dgt_params['hop'],
n_bins=dgt_params['n_bins'], fs=signal_params['fs'])
plt.title('Final subregions')
plt.set_cmap('nipy_spectral')
plt.savefig(fig_dir / 'final_subregions.pdf')
plt.figure()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
plt.imshow(np.log10(pq_norms+pq_norms.T), origin='lower')
plt.ylabel('Sub-region index')
plt.xlabel('Sub-region index')
plt.colorbar()
plt.set_cmap('viridis')
plt.title('Final norms of Gabor multiplier composition')
plt.savefig(fig_dir / 'final_norms.pdf')
if return_norms:
return mask_labeled, pq_norms
else:
return mask_labeled
def _get_pq_norms(mask, dgt_params, signal_params):
"""
Compute distance matrix between sub-regions.
Parameters
----------
mask : nd-array
Time-frequency mask with one positive integer for each sub-region
and zeros outside sub-regions.
dgt_params : dict
DGT parameters
signal_params : dict
Signal parameters
Returns
-------
pq_norms : nd-array
Matrix of distances between sub-regions.
"""
n_labels = np.unique(mask).size - 1
pq_norms = np.zeros((n_labels, n_labels))
for i_p in range(n_labels):
for i_q in range(i_p):
gabmul_p = GaborMultiplier(mask=(mask == i_p + 1),
dgt_params=dgt_params,
signal_params=signal_params)
gabmul_q = GaborMultiplier(mask=(mask == i_q + 1),
dgt_params=dgt_params,
signal_params=signal_params)
gabmul_pq = gabmul_p @ gabmul_q
pq_norms[i_p, i_q] = \
np.real(eigs(A=gabmul_pq, k=1, return_eigenvectors=False)[0])
return pq_norms
def _update_pq_norms(mask, pq_norms, i_p, dgt_params, signal_params):
"""
Update (in-place) distance between one particular sub-region and all
sub-regions in distance matrix.
Parameters
----------
mask : nd-array
Time-frequency mask with one positive integer for each sub-region
and zeros outside sub-regions.
pq_norms : nd-array
Matrix of distances between sub-regions, updated in-place.
i_p : int
Index of sub-region to be updated
dgt_params : dict
DGT parameters