From f3898164e514b60eba52cff8528ae75b9ca18eb9 Mon Sep 17 00:00:00 2001 From: Tolga Dincer Date: Thu, 19 Jan 2023 20:37:22 -0600 Subject: [PATCH] packaging --- .gitignore | 116 +++++++++++++ CHANGELOG.md | 8 + CODE_OF_CONDUCT.md | 133 +++++++++++++++ LICENSE | 0 compress_multiphoton/__init__.py | 4 + compress_multiphoton/analysis.py | 44 +++++ compress_multiphoton/compression.py | 51 ++++++ compress_multiphoton/version.py | 3 + notebooks/19Jan2023Meeting.ipynb | 152 ++++++++++++++++++ .../Compression.ipynb | 0 requirements.txt | 5 + setup.py | 31 ++++ 12 files changed, 547 insertions(+) create mode 100644 .gitignore create mode 100644 CHANGELOG.md create mode 100644 CODE_OF_CONDUCT.md create mode 100644 LICENSE create mode 100644 compress_multiphoton/__init__.py create mode 100644 compress_multiphoton/analysis.py create mode 100644 compress_multiphoton/compression.py create mode 100644 compress_multiphoton/version.py create mode 100644 notebooks/19Jan2023Meeting.ipynb rename Compression.ipynb => notebooks/Compression.ipynb (100%) create mode 100644 requirements.txt create mode 100644 setup.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c48ebaa --- /dev/null +++ b/.gitignore @@ -0,0 +1,116 @@ +# User data +.DS_Store +temp* + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +.idea/ + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy +scratchpaper.* + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# dotenv +.env + +# virtualenv +.venv +venv/ +ENV/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# datajoint +dj_local_conf.json +dj_local_conf_old.json + +# emacs +**/*~ +**/#*# +**/.#* \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..04b6d06 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,8 @@ +# Changelog + +Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. + +## [0.1.0] - YYYY-MM-DD ++ First release + +[0.4.2]: https://github.com/datajoint/compress-multiphoton/releases/tag/0.1.0 \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..684cf81 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,133 @@ + +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, caste, color, religion, or sexual +identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the overall + community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or advances of + any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email address, + without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +[Support@DataJoint.com](mailto:support@datajoint.com). +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series of +actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or permanent +ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the +community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.1, available at +[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. + +Community Impact Guidelines were inspired by +[Mozilla's code of conduct enforcement ladder][Mozilla CoC]. + +For answers to common questions about this code of conduct, see the FAQ at +[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at +[https://www.contributor-covenant.org/translations][translations]. + +[homepage]: https://www.contributor-covenant.org +[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html +[Mozilla CoC]: https://github.com/mozilla/diversity +[FAQ]: https://www.contributor-covenant.org/faq +[translations]: https://www.contributor-covenant.org/translations diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..e69de29 diff --git a/compress_multiphoton/__init__.py b/compress_multiphoton/__init__.py new file mode 100644 index 0000000..2cdb7aa --- /dev/null +++ b/compress_multiphoton/__init__.py @@ -0,0 +1,4 @@ +from .compression import compute_quantal_size +from .analysis import analyze + +__all__ = ["compute_quantal_size", "analyze"] diff --git a/compress_multiphoton/analysis.py b/compress_multiphoton/analysis.py new file mode 100644 index 0000000..f80edb3 --- /dev/null +++ b/compress_multiphoton/analysis.py @@ -0,0 +1,44 @@ +import numpy as np +from time import time +import plotly.graph_objects as go +from .compression import compute_quantal_size + + +def timer_func(func): + # This function shows the execution time of the function object passed + def wrap_func(*args, **kwargs): + t1 = time() + result = func(*args, **kwargs) + t2 = time() + print(f"Function {func.__name__!r} executed in {(t2-t1):.4f}s") + return result + + return wrap_func + + +def analyze(scan): + ( + model, + min_intensity, + max_intensity, + unique_pixels, + unique_variances, + quantal_size, + zero_level, + ) = compute_quantal_size(scan) + + print(f"Quantal size: {quantal_size}") + print(f"Intercept: {zero_level}") + + x = np.arange(len(unique_pixels)) + f = go.Figure(go.Scatter(x=unique_pixels, y=unique_variances, mode="markers")) + f.add_trace(go.Scatter(x=x, y=model.predict(x.reshape(-1, 1)))) + f.update_layout( + width=700, + height=700, + yaxis=dict(title_text="Variance"), + xaxis=dict(title_text="Pixel Intensity"), + showlegend=False, + ) + + return f diff --git a/compress_multiphoton/compression.py b/compress_multiphoton/compression.py new file mode 100644 index 0000000..28c16c9 --- /dev/null +++ b/compress_multiphoton/compression.py @@ -0,0 +1,51 @@ +import numpy as np +from sklearn.linear_model import TheilSenRegressor + + +def compute_quantal_size(scan): + # Set some params + num_frames = scan.shape[2] + # min_count = num_frames * 0.1 # pixel values with fewer appearances will be ignored + min_count = scan.min() + max_acceptable_intensity = 3000 # pixel values higher than this will be ignored + # max_acceptable_intensity = 100000 + + # Make sure field is at least 32 bytes (int16 overflows if summed to itself) + scan = scan.astype(np.float32, copy=False) + + # Create pixel values at each position in field + eps = 1e-4 # needed for np.round to not be biased towards even numbers (0.5 -> 1, 1.5 -> 2, 2.5 -> 3, etc.) + pixels = np.round((scan[:, :, :-1] + scan[:, :, 1:]) / 2 + eps) + + pixels = pixels.astype(np.int16 if np.max(abs(pixels)) < 2**15 else np.int32) + + # Compute a good range of pixel values (common, not too bright values) + unique_pixels, counts = np.unique(pixels, return_counts=True) + min_intensity = min(unique_pixels[counts > min_count]) + max_intensity = max(unique_pixels[counts > min_count]) + max_acceptable_intensity = min(max_intensity, max_acceptable_intensity) + pixels_mask = np.logical_and(pixels >= min_intensity, pixels <= max_acceptable_intensity) + + # Select pixels in good range + pixels = pixels[pixels_mask] + unique_pixels, counts = np.unique(pixels, return_counts=True) + + # Compute noise variance + variances = ((scan[:, :, :-1] - scan[:, :, 1:]) ** 2 / 2)[pixels_mask] + pixels -= min_intensity + variance_sum = np.zeros(len(unique_pixels)) # sum of variances per pixel value + for i in range(0, len(pixels), int(1e8)): # chunk it for memory efficiency + variance_sum += np.bincount( + pixels[i : i + int(1e8)], weights=variances[i : i + int(1e8)], minlength=np.ptp(unique_pixels) + 1 + )[unique_pixels - min_intensity] + unique_variances = variance_sum / counts # average variance per intensity + + # Compute quantal size (by fitting a linear regressor to predict the variance from intensity) + X = unique_pixels.reshape(-1, 1) + y = unique_variances + model = TheilSenRegressor() # robust regression + model.fit(X, y) + quantal_size = model.coef_[0] + zero_level = -model.intercept_ / model.coef_[0] + + return (model, min_intensity, max_intensity, unique_pixels, unique_variances, quantal_size, zero_level) diff --git a/compress_multiphoton/version.py b/compress_multiphoton/version.py new file mode 100644 index 0000000..16325bf --- /dev/null +++ b/compress_multiphoton/version.py @@ -0,0 +1,3 @@ +"""Package metadata""" + +__version__ = "0.1.0" diff --git a/notebooks/19Jan2023Meeting.ipynb b/notebooks/19Jan2023Meeting.ipynb new file mode 100644 index 0000000..8ed3efe --- /dev/null +++ b/notebooks/19Jan2023Meeting.ipynb @@ -0,0 +1,152 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "9c0eb316-099b-4eec-991b-b71a7b7a6a22", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "from tifffile import TiffFile\n", + "from compress_multiphoton import compute_quantal_size, analyze\n", + "import plotly.graph_objects as go\n", + "import matplotlib.pyplot as plt" + ] + }, + { + "cell_type": "markdown", + "id": "2a54b08d-5e78-4b5f-b8b7-8a6f31b9ee1c", + "metadata": {}, + "source": [ + "## Read movie" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c672bb0c-602b-462f-a7f4-b53f5739f2ae", + "metadata": {}, + "outputs": [], + "source": [ + "file = '../testdata/2020-09-15_RL102_t-006_Cycle00001_Ch3.tif'\n", + "scan = TiffFile(file).asarray()\n", + "print(f\"Movie shape: {scan.shape}\")" + ] + }, + { + "cell_type": "markdown", + "id": "59499fe9-554c-43a2-b38f-8f356ca81129", + "metadata": {}, + "source": [ + "## Mean Image" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0417fd9c-82dc-4527-ba99-701f60f503b6", + "metadata": {}, + "outputs": [], + "source": [ + "quantal_size = 461 # This number may slightly vary at each run as it is estimated through robust fitting.\n", + "\n", + "plt.imshow((scan[300:302].mean(0)/quantal_size), cmap='gray')\n", + "cbar = plt.colorbar(fraction=0.05, pad=0.1)\n", + "ax2 = cbar.ax.twinx()\n", + "ymin, ymax = cbar.ax.get_ylim()\n", + "ax2.set_ylim([ymin * quantal_size, ymax * quantal_size]);" + ] + }, + { + "cell_type": "markdown", + "id": "422b2c6a-fdf5-4f43-ac25-c38a1c9bed6a", + "metadata": {}, + "source": [ + "## Histogram of Mean Intensity" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aaf5aacc-497e-4e92-8518-48c356412fd0", + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(1)\n", + "p = ax.hist((scan[:500].mean(0).flatten()), bins=1000, log=True)\n", + "plt.title('Historgram of Mean Intensity')\n", + "ax.set_xlabel('Intensity')\n", + "ax.set_ylabel('N')\n", + "\n", + "intensity = lambda x: p\n", + "\n", + "photons = lambda x: intensity / 461\n", + "\n", + "ax2 = ax.twiny()\n", + "xmin, xmax = ax.get_xlim()\n", + "ax2.set_xlim(xmin/461., xmax/461.)\n", + "ax2.set_xlabel('Photon');" + ] + }, + { + "cell_type": "markdown", + "id": "f41e433a-3bb9-4c30-9a40-3b0385c3b8ac", + "metadata": {}, + "source": [ + "## Difference Image\n", + "Image = Frame(t+1) - Frame(t)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dc78f124-759a-47ba-8a4c-2c1284f84777", + "metadata": {}, + "outputs": [], + "source": [ + "t = 2000\n", + "plt.imshow((scan[t+1].astype(float)-scan[t].astype(float)), cmap='seismic',clim=[-2000, 2000]);" + ] + }, + { + "cell_type": "markdown", + "id": "824467c0-a143-4380-83c4-5bde26cdf3a9", + "metadata": {}, + "source": [ + "## Quantal size estimation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8d9d336c-9fcc-4e4c-8750-e67618661ad5", + "metadata": {}, + "outputs": [], + "source": [ + "analyze(scan[:500])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.15" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/Compression.ipynb b/notebooks/Compression.ipynb similarity index 100% rename from Compression.ipynb rename to notebooks/Compression.ipynb diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..6e6a590 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +numpy +plotly +tifffile +matplotlib +scikit-learn \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..c5d2109 --- /dev/null +++ b/setup.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +from setuptools import setup, find_packages +from os import path + +pkg_name = next(p for p in find_packages() if "." not in p) +here = path.abspath(path.dirname(__file__)) + +with open(path.join(here, "README.md"), "r") as f: + long_description = f.read() + +with open(path.join(here, "requirements.txt")) as f: + requirements = f.read().splitlines() + +with open(path.join(here, pkg_name, "version.py")) as f: + exec(f.read()) + +setup( + name=pkg_name.replace("_", "-"), + version=__version__, + description="", + long_description=long_description, + long_description_content_type="text/markdown", + author="DataJoint", + author_email="info@datajoint.com", + license="MIT", + url=f'https://github.com/datajoint/{pkg_name.replace("_", "-")}', + keywords="neuroscience multiphoton calcium-imaging compression science datajoint", + packages=find_packages(exclude=["contrib", "docs", "tests*"]), + scripts=[], + install_requires=requirements, +)