Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
74 commits
Select commit Hold shift + click to select a range
8560dc1
codecov changes to workflow
MukundhMurthy Jan 4, 2023
8babf85
install pytest-cov
MukundhMurthy Jan 4, 2023
2992a77
correct norm log likelihood function
Sichao25 May 2, 2023
a369b5c
update comment info
Sichao25 May 4, 2023
e46b7e8
remove unexpected parameter
Sichao25 May 5, 2023
72175e2
create new files for preprocess
Sichao25 May 5, 2023
cde5b27
reorganize QC funcs
Sichao25 May 5, 2023
e69e4f6
reorganize pca funcs
Sichao25 May 5, 2023
ccfb493
reorganize deprecated funcs
Sichao25 May 5, 2023
e3ad1fc
remove circular import
Sichao25 May 5, 2023
02c5ce5
Merge branch 'aristoteleo:master' into codecov
MukundhMurthy May 6, 2023
fe5a0e4
create deprecated wrapper
Sichao25 May 8, 2023
b54a16a
debug test_preprocess
Sichao25 May 8, 2023
503e915
delete files related with Conda recipe
Ukyeon May 8, 2023
c047a2f
Merge remote-tracking branch 'refs/remotes/origin/conda' into conda
Ukyeon May 8, 2023
9eac1fa
optionalize cell_cycle_score in recipe monocle
Ukyeon May 8, 2023
646dfa1
debug tests in data_io and plot
Sichao25 May 9, 2023
3bdcd5a
reorganize normalization funcs
Sichao25 May 9, 2023
98f0b2c
reorganize transform funcs
Sichao25 May 9, 2023
82e60e2
move regress out to QC
Sichao25 May 9, 2023
c814573
reorder transform funcs
Sichao25 May 9, 2023
6defbfa
reorganize file preprocess and preprocessor_utils
Sichao25 May 9, 2023
77d653f
remove unused import
Sichao25 May 9, 2023
e0d0fe6
delete empty files
Sichao25 May 9, 2023
58f621a
Merge pull request #493 from Ukyeon/fano
Xiaojieqiu May 9, 2023
c4e6520
Merge pull request #495 from Sichao25/norm_logll
Xiaojieqiu May 9, 2023
0d78e85
Merge pull request #492 from Ukyeon/conda
Xiaojieqiu May 9, 2023
2c0aa13
Merge pull request #497 from Sichao25/reorganize_pp
Xiaojieqiu May 9, 2023
8a38870
delete extra init file
Sichao25 May 9, 2023
66808d8
rename deprecated functions
May 10, 2023
145ef96
enable legacy normalize func in init
May 10, 2023
7ace35c
update import statement
May 10, 2023
cb83453
remove unexpected operation
May 10, 2023
8b4f35f
raise error when no pts found
May 11, 2023
8485539
modify import statement
Sichao25 May 11, 2023
9c8dc96
debug parameters
Sichao25 May 11, 2023
cc8b956
remove extra output
Sichao25 May 11, 2023
0a21336
debug
Sichao25 May 11, 2023
5f1503b
fork only works in Mac OS
Ukyeon May 12, 2023
dcf356d
update init
Sichao25 May 15, 2023
6e1d81a
Merge pull request #501 from Sichao25/fix_pts
Xiaojieqiu May 15, 2023
1a32c67
debug import
Sichao25 May 15, 2023
ddc54c1
Merge pull request #502 from Sichao25/debug_prepare_dim_reduction
Xiaojieqiu May 15, 2023
6e0e4c2
Merge pull request #496 from Sichao25/gseapy
Xiaojieqiu May 15, 2023
b47f151
Merge pull request #500 from Sichao25/reorganize_pp
Xiaojieqiu May 15, 2023
a0e4632
fix error
Ukyeon May 15, 2023
03726bf
bug fix for unintialization of invalid_ids
Ukyeon May 15, 2023
3cfe8f9
Merge pull request #504 from Ukyeon/fano
Xiaojieqiu May 15, 2023
e7dd314
Merge pull request #503 from Ukyeon/debug_perturbation
Xiaojieqiu May 15, 2023
54f8b3d
error raised while writing key 'notfound' in h5py
Ukyeon May 16, 2023
d4c7046
Merge pull request #508 from Ukyeon/h5ad_save
Xiaojieqiu May 17, 2023
1f84101
add the logging info when X_pca, X_umap are added to adata.obsm
Ukyeon May 17, 2023
587ead4
debug save_show_or_return
Sichao25 May 18, 2023
2016289
Merge pull request #509 from Ukyeon/fano
Xiaojieqiu May 18, 2023
a9b3958
Merge pull request #511 from Sichao25/debug
Xiaojieqiu May 18, 2023
d7b5616
remove adata.obsm[X] in future, use adata.obsm.X_pca instead
Ukyeon May 19, 2023
364e4e6
merged to the lastest code
Ukyeon May 19, 2023
aad7f23
update badges
Ukyeon May 19, 2023
7a4dd68
Merge pull request #515 from Ukyeon/regressout
Xiaojieqiu May 20, 2023
d263664
Update README.md
Xiaojieqiu May 20, 2023
65dcc2c
Merge pull request #516 from aristoteleo/Xiaojieqiu-patch-2
Xiaojieqiu May 20, 2023
9e48ccf
graph calculus Jacobian fix
yaz62 May 22, 2023
372362c
graph calculus jacobian fix
yaz62 May 22, 2023
dfa6a81
Merge pull request #518 from yaz62/master
Xiaojieqiu May 22, 2023
beaf71a
Merge pull request #514 from Ukyeon/plot_doc
Xiaojieqiu May 22, 2023
418b35d
zebrafish data preparation as fixture
MukundhMurthy May 23, 2023
fd3c5d1
put zebrafish fixture inside conftest
MukundhMurthy May 23, 2023
921815f
fixtures for other datasets and add mygene to requirements.txt
MukundhMurthy May 24, 2023
cb12911
Merge branch 'codecov' of https://github.com/MukundhMurthy/dynamo-rel…
MukundhMurthy May 26, 2023
0b9d68f
add additional test dependencies to python-plain-run-test.yml
MukundhMurthy May 26, 2023
49f1bac
remove 3.7
MukundhMurthy May 26, 2023
f7f1d76
skip cluster tests
MukundhMurthy May 26, 2023
3d0cfa5
skip test_neighbors
MukundhMurthy May 26, 2023
555c0d0
remove test_pl_utils.py
MukundhMurthy May 26, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 11 additions & 5 deletions .github/workflows/python-plain-run-test.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# This workflow will install Python dependencies, run tests, collect coverage and lint with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions

name: dynamo test
Expand All @@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.7, 3.8, 3.9]
python-version: [3.8, 3.9]

steps:
- uses: actions/checkout@v2
Expand All @@ -39,7 +39,13 @@ jobs:
# - name: Test with pytest
# run: |
# pytest
- name: Test with plain runs
- name: Run tests and collect coverage
run: |
# pytest -v
# bash ./tests/run_plain_tests.sh
pip install pytest-cov
pip install mygene igraph leidenalg
pytest --cov=./ --cov-report=xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
fail_ci_if_error: true
verbose: true
7 changes: 4 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,14 @@

##

[![package](https://github.com/aristoteleo/dynamo-release/workflows/Python%20package/badge.svg)](https://github.com/aristoteleo/dynamo-release/runs/950435412)
[![upload](https://github.com/aristoteleo/dynamo-release/workflows/Upload%20Python%20Package/badge.svg)](https://pypi.org/project/dynamo-release/)
[![package](https://github.com/aristoteleo/dynamo-release/workflows/Python%20package/badge.svg)](https://github.com/aristoteleo/dynamo-release)
[![documentation](https://readthedocs.org/projects/dynamo-release/badge/?version=latest)](https://dynamo-release.readthedocs.io/en/latest/)
[![upload](https://img.shields.io/pypi/v/dynamo-release?logo=PyPI)](https://pypi.org/project/dynamo-release/)
[![download](https://static.pepy.tech/badge/dynamo-release)](https://pepy.tech/project/dynamo-release)
[![star](https://img.shields.io/github/stars/aristoteleo/dynamo-release?logo=GitHub&color=red)](https://github.com/aristoteleo/dynamo-release/stargazers)
![build](https://github.com/aristoteleo/dynamo-release/actions/workflows/python-package.yml/badge.svg)
![test](https://github.com/aristoteleo/dynamo-release/actions/workflows/python-plain-run-test.yml/badge.svg)


## **Dynamo**: Mapping Transcriptomic Vector Fields of Single Cells

Inclusive model of expression dynamics with metabolic labeling based scRNA-seq / multiomics, vector field reconstruction, potential landscape mapping, differential geometry analyses, and most probably paths / *in silico* perturbation predictions.
Expand Down
Empty file removed __init__.py
Empty file.
1 change: 0 additions & 1 deletion build.sh

This file was deleted.

2 changes: 0 additions & 2 deletions conda_recipe/bld.bat

This file was deleted.

1 change: 0 additions & 1 deletion conda_recipe/build.sh

This file was deleted.

57 changes: 0 additions & 57 deletions conda_recipe/meta.yaml

This file was deleted.

9 changes: 4 additions & 5 deletions dynamo/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ class DynamoAdataKeyManager:
# special key names frequently used in dynamo
X_LAYER = "X"
PROTEIN_LAYER = "protein"
X_PCA = "X_pca"

def gen_new_layer_key(layer_name, key, sep="_") -> str:
"""utility function for returning a new key name for a specific layer. By convention layer_name should not have the separator as the last character."""
Expand Down Expand Up @@ -106,7 +107,7 @@ def check_if_layer_exist(adata: AnnData, layer: str) -> bool:
def get_available_layer_keys(adata, layers="all", remove_pp_layers=True, include_protein=True):
"""Get the list of available layers' keys. If `layers` is set to all, return a list of all available layers; if `layers` is set to a list, then the intersetion of available layers and `layers` will be returned."""
layer_keys = list(adata.layers.keys())
if layers is None: # layers=adata.uns["pp"]["experiment_layers"], in calc_sz_factor
if layers is None: # layers=adata.uns["pp"]["experiment_layers"], in calc_sz_factor
layers = "X"
if remove_pp_layers:
layer_keys = [i for i in layer_keys if not i.startswith("X_")]
Expand Down Expand Up @@ -143,10 +144,7 @@ def allowed_X_layer_names():
def init_uns_pp_namespace(adata: AnnData):
adata.uns[DynamoAdataKeyManager.UNS_PP_KEY] = {}

def get_excluded_layers(
X_total_layers: bool = False,
splicing_total_layers: bool = False
) -> List:
def get_excluded_layers(X_total_layers: bool = False, splicing_total_layers: bool = False) -> List:
"""Get a list of excluded layers based on the provided arguments.

When splicing_total_layers is False, the function normalize spliced and unspliced RNA separately using each
Expand Down Expand Up @@ -199,6 +197,7 @@ def aggregate_layers_into_total(
layers.extend(["_total_"])
return total_layers, layers


# TODO discuss alias naming convention
DKM = DynamoAdataKeyManager

Expand Down
34 changes: 25 additions & 9 deletions dynamo/dynamo_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,19 +152,35 @@ def error(self, message, indent_level=1, *args, **kwargs):
message = format_logging_message(message, logging.ERROR, indent_level=indent_level)
return self.logger.error(message, *args, **kwargs)

def info_insert_adata(self, key, adata_attr="obsm", indent_level=1, *args, **kwargs):
def info_insert_adata(self, key, adata_attr="obsm", log_level=logging.NOTSET, indent_level=1, *args, **kwargs):
message = "<insert> %s to %s in AnnData Object." % (key, adata_attr)
message = format_logging_message(message, logging.DEBUG, indent_level=indent_level)
return self.logger.debug(message, *args, **kwargs)
if log_level == logging.NOTSET or log_level == logging.DEBUG:
self.debug(message, indent_level=indent_level, *args, **kwargs)
elif log_level == logging.INFO:
self.info(message, indent_level=indent_level, *args, **kwargs)
elif log_level == logging.WARN:
self.warning(message, indent_level=indent_level, *args, **kwargs)
elif log_level == logging.ERROR:
self.error(message, indent_level=indent_level, *args, **kwargs)
elif log_level == logging.CRITICAL:
self.critical(message, indent_level=indent_level, *args, **kwargs)
else:
raise NotImplementedError

def info_insert_adata_var(self, key, indent_level=1, *args, **kwargs):
return self.info_insert_adata(self, key, adata_attr="var", indent_level=1, *args, **kwargs)
def info_insert_adata_var(self, key, log_level, indent_level, *args, **kwargs):
return self.info_insert_adata(
self, key, adata_attr="var", log_level=log_level, indent_level=indent_level, *args, **kwargs
)

def info_insert_adata_obsm(self, key, indent_level=1, *args, **kwargs):
return self.info_insert_adata(self, key, adata_attr="obsm", indent_level=1, *args, **kwargs)
def info_insert_adata_obsm(self, key, log_level, indent_level, *args, **kwargs):
return self.info_insert_adata(
self, key, adata_attr="obsm", log_level=log_level, indent_level=indent_level, *args, **kwargs
)

def info_insert_adata_uns(self, key, indent_level=1, *args, **kwargs):
return self.info_insert_adata(self, key, adata_attr="uns", indent_level=1, *args, **kwargs)
def info_insert_adata_uns(self, key, log_level, indent_level, *args, **kwargs):
return self.info_insert_adata(
self, key, adata_attr="uns", log_level=log_level, indent_level=indent_level, *args, **kwargs
)

def log_time(self):
now = time.time()
Expand Down
1 change: 0 additions & 1 deletion dynamo/external/gseapy.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ def enrichr(
gene_sets=gene_sets, # GO_Biological_Process_2018
organism=organism, # don't forget to set organism to the one you desired! e.g. Yeast
background=background,
description=description,
outdir=outdir,
no_plot=no_plot,
cutoff=cutoff, # test dataset, use lower value from range(0,1)
Expand Down
2 changes: 1 addition & 1 deletion dynamo/external/scifate.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def adata_processing_TF_link(
total = adata.layers[nt_layers[1]]

# recalculate size factor
from ..preprocessing import calc_sz_factor_legacy
from ..preprocessing.deprecated import calc_sz_factor_legacy

adata = calc_sz_factor_legacy(
adata,
Expand Down
2 changes: 1 addition & 1 deletion dynamo/external/scribe.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def scribe(

if normalize:
# recalculate size factor
from ..preprocessing import calc_sz_factor_legacy
from ..preprocessing.deprecated import calc_sz_factor_legacy

adata = calc_sz_factor_legacy(
adata,
Expand Down
2 changes: 0 additions & 2 deletions dynamo/plot/connectivity.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,8 +331,6 @@ def connectivity_base(
plt.show()
if save_show_or_return in ["return", "all"]:
return ax
else:
raise NotImplementedError("Unsupported save_show_or_return")


docstrings.delete_params("con_base.parameters", "edge_df", "save_show_or_return", "save_kwargs")
Expand Down
2 changes: 0 additions & 2 deletions dynamo/plot/dynamics.py
Original file line number Diff line number Diff line change
Expand Up @@ -1089,8 +1089,6 @@ def phase_portraits(
plt.show()
if save_show_or_return in ["return", "all"]:
return g
else:
raise NotImplementedError("Unsupported save_show_or_return")


def dynamics(
Expand Down
5 changes: 3 additions & 2 deletions dynamo/plot/networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,6 +367,7 @@ def circosPlot(
"""
try:
import nxviz as nv
from nxviz import annotate
except ImportError:
raise ImportError("install nxviz via `pip install nxviz`.")

Expand All @@ -382,9 +383,9 @@ def circosPlot(
},
)

nv.annotate.circos_labels(network, group_by=node_label_key, layout=circos_label_layout)
annotate.circos_labels(network, group_by=node_label_key, layout=circos_label_layout)
if node_color_key and show_colorbar:
nv.annotate.node_colormapping(
annotate.node_colormapping(
network,
color_by=node_color_key,
legend_kwargs={"loc": "upper right", "bbox_to_anchor": (0.0, 1.0)},
Expand Down
10 changes: 5 additions & 5 deletions dynamo/plot/preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from ..configuration import DynamoAdataKeyManager
from ..dynamo_logger import main_warning
from ..preprocessing import preprocess as pp
from ..preprocessing import gene_selection
from ..preprocessing.gene_selection import get_prediction_by_svr
from ..preprocessing.utils import detect_experiment_datatype
from ..tools.utils import get_mapper, update_dict
Expand Down Expand Up @@ -49,8 +49,10 @@ def basic_stats(
(seaborn.FacetGrid) would be returned.
"""

import matplotlib.pyplot as plt

if len(adata.obs.columns.intersection(["nGenes", "nCounts", "pMito"])) != 3:
from ..preprocessing.utils import basic_stats
from ..preprocessing.QC import basic_stats

basic_stats(adata)

Expand Down Expand Up @@ -118,7 +120,6 @@ def basic_stats(
s_kwargs["close"] = False
save_fig(**s_kwargs)
if save_show_or_return in ["show", "both", "all"]:
import matplotlib.pyplot as plt
plt.tight_layout()
plt.show()
if save_show_or_return in ["return", "all"]:
Expand Down Expand Up @@ -435,7 +436,6 @@ def biplot(
figsize: Tuple[float, float] = (6, 4),
scale_pca_embedding: bool = False,
draw_pca_embedding: bool = False,

save_show_or_return: Literal["save", "show", "return"] = "show",
save_kwargs: Dict[str, Any] = {},
ax: Optional[Axes] = None,
Expand Down Expand Up @@ -992,7 +992,7 @@ def highest_frac_genes(
if log:
ax.set_xscale("log")

adata = pp.highest_frac_genes(
adata = gene_selection.highest_frac_genes(
adata,
store_key=store_key,
n_top=n_top,
Expand Down
5 changes: 3 additions & 2 deletions dynamo/prediction/fate.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from sklearn.neighbors import NearestNeighbors
from tqdm import tqdm

from ..configuration import DKM
from ..dynamo_logger import (
LoggerManager,
main_info,
Expand Down Expand Up @@ -173,7 +174,7 @@ def fate(
ndim = adata.uns["umap_fit"]["fit"]._raw_data.shape[1]

if "X" in adata.obsm_keys():
if ndim == adata.obsm["X"].shape[1]: # lift the dimension up again
if ndim == adata.obsm[DKM.X_PCA].shape[1]: # lift the dimension up again
exprs = adata.uns["pca_fit"].inverse_transform(prediction)

if adata.var.use_for_dynamics.sum() == exprs.shape[1]:
Expand Down Expand Up @@ -211,7 +212,7 @@ def _fate(
interpolation_num: int = 250,
average: bool = True,
sampling: str = "arc_length",
cores:int = 1,
cores: int = 1,
) -> Tuple[np.ndarray, np.ndarray]:
"""Predict the historical and future cell transcriptomic states over arbitrary time scales by integrating vector
field functions from one or a set of initial cell state(s).
Expand Down
26 changes: 11 additions & 15 deletions dynamo/preprocessing/Preprocessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,30 +16,26 @@
main_info_insert_adata,
main_warning,
)
from ..tools.connectivity import neighbors as default_neighbors
from ..tools.utils import update_dict
from .cell_cycle import cell_cycle_scores
from .external import (
normalize_layers_pearson_residuals,
sctransform,
select_genes_by_pearson_residuals,
)
from ..tools.connectivity import neighbors as default_neighbors
from ..tools.utils import update_dict
from .cell_cycle import cell_cycle_scores
from .gene_selection import select_genes_by_seurat_recipe, select_genes_monocle
from .preprocess import normalize_cell_expr_by_size_factors_legacy, pca
from .preprocessor_utils import (
Freeman_Tukey,
_infer_labeling_experiment_type,
calc_sz_factor,
)
from .preprocessor_utils import (
from .normalization import calc_sz_factor, normalize
from .pca import pca
from .QC import (
basic_stats,
filter_cells_by_outliers as monocle_filter_cells_by_outliers,
)
from .preprocessor_utils import (
filter_genes_by_outliers as monocle_filter_genes_by_outliers,
regress_out_parallel,
)
from .preprocessor_utils import log, log1p, log2, normalize, regress_out_parallel
from .transform import Freeman_Tukey, log, log1p, log2
from .utils import (
basic_stats,
_infer_labeling_experiment_type,
collapse_species_adata,
convert2symbol,
convert_layers2csr,
Expand Down Expand Up @@ -128,7 +124,6 @@ def __init__(
self.regress_out = regress_out_parallel
self.pca = pca_function
self.pca_kwargs = pca_kwargs
self.cell_cycle_score = cell_cycle_scores

# self.n_top_genes = n_top_genes
self.convert_gene_name = convert_gene_name_function
Expand Down Expand Up @@ -493,6 +488,7 @@ def config_monocle_recipe(self, adata: AnnData, n_top_genes: int = 2000) -> None
self.pca = pca
self.pca_kwargs = {"pca_key": "X_pca"}

self.cell_cycle_score = None # optional: cell_cycle_scores
self.cell_cycle_score_kwargs = {
"layer": None,
"gene_list": None,
Expand Down
Loading